prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>universal.py<|end_file_name|><|fim▁begin|># $Id$
# -*- coding: utf8 -*-
# Authors: David Goodger <[email protected]>; Ueli Schlaepfer; Günter Milde
# Maintainer: [email protected]
# Copyright: This module has been placed in the public domain.
"""
Transforms needed by most or all documents:
- `Decorations`: Generate a document's header & footer.
- `Messages`: Placement of system messages stored in
`nodes.document.transform_messages`.
- `TestMessages`: Like `Messages`, used on test runs.
- `FinalReferences`: Resolve remaining references.
"""
__docformat__ = 'reStructuredText'
import re
import sys
import time
from docutils import nodes, utils
from docutils.transforms import TransformError, Transform
from docutils.utils import smartquotes
class Decorations(Transform):
"""
Populate a document's decoration element (header, footer).
"""
default_priority = 820
def apply(self):
header_nodes = self.generate_header()
if header_nodes:
decoration = self.document.get_decoration()
header = decoration.get_header()
header.extend(header_nodes)
footer_nodes = self.generate_footer()
if footer_nodes:
decoration = self.document.get_decoration()
footer = decoration.get_footer()
footer.extend(footer_nodes)
def generate_header(self):
return None
def generate_footer(self):
# @@@ Text is hard-coded for now.
# Should be made dynamic (language-dependent).
settings = self.document.settings
if settings.generator or settings.datestamp or settings.source_link \
or settings.source_url:
text = []
if settings.source_link and settings._source \
or settings.source_url:
if settings.source_url:
source = settings.source_url
else:
source = utils.relative_path(settings._destination,
settings._source)
text.extend([
nodes.reference('', 'View document source',
refuri=source),
nodes.Text('.\n')])
if settings.datestamp:
datestamp = time.strftime(settings.datestamp, time.gmtime())
text.append(nodes.Text('Generated on: ' + datestamp + '.\n'))
if settings.generator:
text.extend([
nodes.Text('Generated by '),
nodes.reference('', 'Docutils', refuri=
'http://docutils.sourceforge.net/'),
nodes.Text(' from '),
nodes.reference('', 'reStructuredText', refuri='http://'
'docutils.sourceforge.net/rst.html'),
nodes.Text(' source.\n')])
return [nodes.paragraph('', '', *text)]
else:
return None
class ExposeInternals(Transform):
<|fim▁hole|>
default_priority = 840
def not_Text(self, node):
return not isinstance(node, nodes.Text)
def apply(self):
if self.document.settings.expose_internals:
for node in self.document.traverse(self.not_Text):
for att in self.document.settings.expose_internals:
value = getattr(node, att, None)
if value is not None:
node['internal:' + att] = value
class Messages(Transform):
"""
Place any system messages generated after parsing into a dedicated section
of the document.
"""
default_priority = 860
def apply(self):
unfiltered = self.document.transform_messages
threshold = self.document.reporter.report_level
messages = []
for msg in unfiltered:
if msg['level'] >= threshold and not msg.parent:
messages.append(msg)
if messages:
section = nodes.section(classes=['system-messages'])
# @@@ get this from the language module?
section += nodes.title('', 'Docutils System Messages')
section += messages
self.document.transform_messages[:] = []
self.document += section
class FilterMessages(Transform):
"""
Remove system messages below verbosity threshold.
"""
default_priority = 870
def apply(self):
for node in self.document.traverse(nodes.system_message):
if node['level'] < self.document.reporter.report_level:
node.parent.remove(node)
class TestMessages(Transform):
"""
Append all post-parse system messages to the end of the document.
Used for testing purposes.
"""
default_priority = 880
def apply(self):
for msg in self.document.transform_messages:
if not msg.parent:
self.document += msg
class StripComments(Transform):
"""
Remove comment elements from the document tree (only if the
``strip_comments`` setting is enabled).
"""
default_priority = 740
def apply(self):
if self.document.settings.strip_comments:
for node in self.document.traverse(nodes.comment):
node.parent.remove(node)
class StripClassesAndElements(Transform):
"""
Remove from the document tree all elements with classes in
`self.document.settings.strip_elements_with_classes` and all "classes"
attribute values in `self.document.settings.strip_classes`.
"""
default_priority = 420
def apply(self):
if not (self.document.settings.strip_elements_with_classes
or self.document.settings.strip_classes):
return
# prepare dicts for lookup (not sets, for Python 2.2 compatibility):
self.strip_elements = dict(
[(key, None)
for key in (self.document.settings.strip_elements_with_classes
or [])])
self.strip_classes = dict(
[(key, None) for key in (self.document.settings.strip_classes
or [])])
for node in self.document.traverse(self.check_classes):
node.parent.remove(node)
def check_classes(self, node):
if isinstance(node, nodes.Element):
for class_value in node['classes'][:]:
if class_value in self.strip_classes:
node['classes'].remove(class_value)
if class_value in self.strip_elements:
return 1
class SmartQuotes(Transform):
"""
Replace ASCII quotation marks with typographic form.
Also replace multiple dashes with em-dash/en-dash characters.
"""
default_priority = 850
def __init__(self, document, startnode):
Transform.__init__(self, document, startnode=startnode)
self.unsupported_languages = set()
def get_tokens(self, txtnodes):
# A generator that yields ``(texttype, nodetext)`` tuples for a list
# of "Text" nodes (interface to ``smartquotes.educate_tokens()``).
texttype = {True: 'literal', # "literal" text is not changed:
False: 'plain'}
for txtnode in txtnodes:
nodetype = texttype[isinstance(txtnode.parent,
(nodes.literal,
nodes.math,
nodes.image,
nodes.raw,
nodes.problematic))]
yield (nodetype, txtnode.astext())
def apply(self):
smart_quotes = self.document.settings.smart_quotes
if not smart_quotes:
return
try:
alternative = smart_quotes.startswith('alt')
except AttributeError:
alternative = False
# print repr(alternative)
document_language = self.document.settings.language_code
# "Educate" quotes in normal text. Handle each block of text
# (TextElement node) as a unit to keep context around inline nodes:
for node in self.document.traverse(nodes.TextElement):
# skip preformatted text blocks and special elements:
if isinstance(node, (nodes.FixedTextElement, nodes.Special)):
continue
# nested TextElements are not "block-level" elements:
if isinstance(node.parent, nodes.TextElement):
continue
# list of text nodes in the "text block":
txtnodes = [txtnode for txtnode in node.traverse(nodes.Text)
if not isinstance(txtnode.parent,
nodes.option_string)]
# language: use typographical quotes for language "lang"
lang = node.get_language_code(document_language)
# use alternative form if `smart-quotes` setting starts with "alt":
if alternative:
if '-x-altquot' in lang:
lang = lang.replace('-x-altquot', '')
else:
lang += '-x-altquot'
# drop subtags missing in quotes:
for tag in utils.normalize_language_tag(lang):
if tag in smartquotes.smartchars.quotes:
lang = tag
break
else: # language not supported: (keep ASCII quotes)
if lang not in self.unsupported_languages:
self.document.reporter.warning('No smart quotes '
'defined for language "%s".'%lang, base_node=node)
self.unsupported_languages.add(lang)
lang = ''
# Iterator educating quotes in plain text:
# '2': set all, using old school en- and em- dash shortcuts
teacher = smartquotes.educate_tokens(self.get_tokens(txtnodes),
attr='2', language=lang)
for txtnode, newtext in zip(txtnodes, teacher):
txtnode.parent.replace(txtnode, nodes.Text(newtext))
self.unsupported_languages = set() # reset<|fim▁end|> | """
Expose internal attributes if ``expose_internals`` setting is set.
""" |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import unittest
from app import read_config
<|fim▁hole|> self.assertEqual(config['cmus_passwd'], 'PaSsWd')
self.assertEqual(config['app_host'], 'localhost')
self.assertEqual(config['app_port'], '8080')
if __name__ == '__main__':
unittest.main()<|fim▁end|> | class ConfigFileReaderTest(unittest.TestCase):
def test_read(self):
config = read_config('config')
self.assertEqual(config['cmus_host'], 'raspberry') |
<|file_name|>xrenderer.d.ts<|end_file_name|><|fim▁begin|>declare module "core/src/pointer/src/ByteArrayBase" {
export class ByteArrayBase {
static BIG_ENDIAN: string;
static LITTLE_ENDIAN: string;
static SIZE_OF_BOOLEAN: number;
static SIZE_OF_INT8: number;
static SIZE_OF_INT16: number;
static SIZE_OF_INT32: number;
static SIZE_OF_INT64: number;
static SIZE_OF_UINT8: number;
static SIZE_OF_UINT16: number;
static SIZE_OF_UINT32: number;
static SIZE_OF_UINT64: number;
static SIZE_OF_FLOAT32: number;
static SIZE_OF_FLOAT64: number;
private BUFFER_EXT_SIZE;
array: Uint8Array;
data: DataView;
private _position;
write_position: number;
endian: string;
constructor(buffer?: ArrayBuffer, offset?: number, length?: number);
buffer: ArrayBuffer;
dataView: DataView;
phyPosition: number;
bufferOffset: number;
position: number;
length: number;
bytesAvailable: number;
clear(): void;
getArray(): Uint8Array;
setArray(array: Uint8Array): void;
setBuffer(buffer: ArrayBuffer, offset?: number, length?: number): void;
readBoolean(): boolean;
readByte(): number;
readBytes(_bytes?: ByteArrayBase, offset?: number, length?: number, createNewBuffer?: boolean): ByteArrayBase;
readDouble(): number;
readFloat(): number;
readInt(): number;
readMultiByte(length: number, charSet?: string): string;
readShort(): number;
readUnsignedByte(): number;
readUnsignedInt(): number;
readVariableSizedUnsignedInt(): number;
readU16VX(): number;
readUnsignedShort(): number;
readUTF(): string;
readUTFBytes(length: number): string;
readStandardString(length: number): string;
readStringTillNull(keepEvenByte?: boolean): string;
writeBoolean(value: boolean): void;
writeByte(value: number): void;
writeUnsignedByte(value: number): void;
writeBytes(_bytes: ByteArrayBase, offset?: number, length?: number): void;
writeDouble(value: number): void;
writeFloat(value: number): void;
writeInt(value: number): void;
writeMultiByte(value: string, charSet: string): void;
writeShort(value: number): void;
writeUnsignedShort(value: number): void;
writeUnsignedInt(value: number): void;
writeUTF(value: string): void;
writeUTFBytes(value: string): void;
toString(): string;
writeUint8Array(_bytes: Uint8Array): void;
writeUint16Array(_bytes: Uint16Array): void;
writeUint32Array(_bytes: Uint32Array): void;
writeInt8Array(_bytes: Int8Array): void;
writeInt16Array(_bytes: Int16Array): void;
writeInt32Array(_bytes: Int32Array): void;
writeFloat32Array(_bytes: Float32Array): void;
writeFloat64Array(_bytes: Float64Array): void;
readUint8Array(length: number, createNewBuffer?: boolean): Uint8Array;
readUint16Array(length: number, createNewBuffer?: boolean): Uint16Array;
readUint32Array(length: number, createNewBuffer?: boolean): Uint32Array;
readInt8Array(length: number, createNewBuffer?: boolean): Int8Array;
readInt16Array(length: number, createNewBuffer?: boolean): Int16Array;
readInt32Array(length: number, createNewBuffer?: boolean): Int32Array;
readFloat32Array(length: number, createNewBuffer?: boolean): Float32Array;
readFloat64Array(length: number, createNewBuffer?: boolean): Float64Array;
validate(len: number): boolean;
private validateBuffer(len);
private encodeUTF8(str);
private decodeUTF8(data);
private encoderError(code_point);
private decoderError(fatal, opt_code_point?);
private EOF_byte;
private EOF_code_point;
private inRange(a, min, max);
private div(n, d);
private stringToCodePoints(string);
}
}
declare module "core/src/pointer/src/MemoryUtils" {
export class MemoryUtils {
static i8: Int8Array;
static ui16: Uint16Array;
static ui32: Uint32Array;
static i32: Int32Array;
static i16: Int16Array;
static f32: Float32Array;
static f64: Float64Array;
static ui32mem: Uint8Array;
static ui16mem: Uint8Array;
static i32mem: Uint8Array;
static i16mem: Uint8Array;
static f32mem: Uint8Array;
static f64mem: Uint8Array;
static readUint16(memory: Uint8Array, offset: number, littleEndian?: boolean): number;
static writeUint16(memory: Uint8Array, offset: number, value: number, littleEndian?: boolean): number;
static readInt16(memory: Uint8Array, offset: number, littleEndian?: boolean): number;
static writeInt16(memory: Uint8Array, offset: number, value: number, littleEndian?: boolean): number;
static readInt32(memory: Uint8Array, offset: number, littleEndian?: boolean): number;
static writeInt32(memory: Uint8Array, offset: number, value: number, littleEndian?: boolean): number;
static readUint32(memory: Uint8Array, offset: number, littleEndian?: boolean): number;
static writeUint32(memory: Uint8Array, offset: number, value: number, littleEndian?: boolean): number;
static readFloat32(memory: Uint8Array, offset: number, littleEndian?: boolean): number;
static writeFloat32(memory: Uint8Array, offset: number, value: number, littleEndian?: boolean): number;
static readFloat64(memory: Uint8Array, offset: number, littleEndian?: boolean): number;
static writeFloat64(memory: Uint8Array, offset: number, value: number, littleEndian?: boolean): number;
}
}
declare module "core/src/pointer/src/UTF8" {
export class UTF8 {
static instance: UTF8;
static encode(str: string): Uint8Array;
static decode(data: Uint8Array): string;
constructor();
encode(str: string): Uint8Array;
decode(data: Uint8Array): string;
private encoderError(code_point);
private decoderError(fatal, opt_code_point?);
private EOF_byte;
private EOF_code_point;
private inRange(a, min, max);
private div(n, d);
private stringToCodePoints(string);
}
}
declare module "core/src/pointer/src/DirectMemory" {
export class DirectMemory {
buffer: ArrayBuffer;
private offset;
static BIG_ENDIAN: string;
static LITTLE_ENDIAN: string;
static MIN_FLOAT32_VALUE: number;
static SIZE_OF_BOOLEAN: number;
static SIZE_OF_INT8: number;
static SIZE_OF_INT16: number;
static SIZE_OF_INT32: number;
static SIZE_OF_INT64: number;
static SIZE_OF_UINT8: number;
static SIZE_OF_UINT16: number;
static SIZE_OF_UINT32: number;
static SIZE_OF_UINT64: number;
static SIZE_OF_FLOAT32: number;
static SIZE_OF_FLOAT64: number;
private BUFFER_EXT_SIZE;
data: Uint8Array;
private _position;
write_position: number;
endian: string;
constructor(buffer?: ArrayBuffer, offset?: number, length?: number);
phyPosition: number;
bufferOffset: number;
position: number;
length: number;
bytesAvailable: number;
clear(): void;
setBuffer(buffer: ArrayBuffer, offset?: number, length?: number): void;
readBoolean(): boolean;
readByte(): number;
readBytes(_bytes?: DirectMemory, offset?: number, length?: number, createNewBuffer?: boolean): DirectMemory;
readDouble(): number;
readFloat(): number;
readInt(): number;
readMultiByte(length: number, charSet?: string): string;
readShort(): number;
readUnsignedByte(): number;
readUnsignedInt(): number;
readVariableSizedUnsignedInt(): number;
readU16VX(): number;
readUnsignedShort(): number;
readUTF(): string;
readUTFBytes(length: number): string;
readStandardString(length: number): string;
readStringTillNull(keepEvenByte?: boolean): string;
writeBoolean(value: boolean): void;
writeByte(value: number): void;
writeUnsignedByte(value: number): void;
writeBytes(_bytes: DirectMemory, offset?: number, length?: number): void;
writeDouble(value: number): void;
writeFloat(value: number): void;
writeInt(value: number): void;
writeMultiByte(value: string, charSet: string): void;
writeShort(value: number): void;
writeUnsignedShort(value: number): void;
writeUnsignedInt(value: number): void;
writeUTF(value: string): void;
writeUTFBytes(value: string): void;
toString(): string;
writeUint8Array(_bytes: Uint8Array): void;
writeUint16Array(_bytes: Uint16Array): void;
writeUint32Array(_bytes: Uint32Array): void;
writeInt8Array(_bytes: Int8Array): void;
writeInt16Array(_bytes: Int16Array): void;
writeInt32Array(_bytes: Int32Array): void;
writeFloat32Array(_bytes: Float32Array): void;
writeFloat64Array(_bytes: Float64Array): void;
readUint8Array(length: number, createNewBuffer?: boolean): Uint8Array;
readUint16Array(length: number, createNewBuffer?: boolean): Uint16Array;
readUint32Array(length: number, createNewBuffer?: boolean): Uint32Array;
readInt8Array(length: number, createNewBuffer?: boolean): Int8Array;
readInt16Array(length: number, createNewBuffer?: boolean): Int16Array;
readInt32Array(length: number, createNewBuffer?: boolean): Int32Array;
readFloat32Array(length: number, createNewBuffer?: boolean): Float32Array;
readFloat64Array(length: number, createNewBuffer?: boolean): Float64Array;
validate(len: number): boolean;
private validateBuffer(len);
}
}
declare module "core/src/pointer/src/IPointer" {
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export interface IPointer {
memorySize: number;
write(memory: ByteArrayBase | DirectMemory): number;
directWrite(memory: Uint8Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
directRead(memory: Uint8Array, offset: number): number;
}
}
declare module "core/src/pointer/src/Pointer" {
import { IPointer } from "core/src/pointer/src/IPointer";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Pointer {
private reference;
static offset: number;
static heap: Uint8Array;
static memory: DirectMemory;
static initialized: boolean;
static init(): DirectMemory;
private beginLocation;
private currentLocation;
constructor(reference: IPointer);
read(): IPointer;
}
export function sizeof(ptr: IPointer): number;
}
declare module "core/src/pointer/pointer" {
export * from "core/src/pointer/src/ByteArrayBase";
export * from "core/src/pointer/src/DirectMemory";
export * from "core/src/pointer/src/MemoryUtils";
export * from "core/src/pointer/src/UTF8";
export * from "core/src/pointer/src/IPointer";
export * from "core/src/pointer/src/Pointer";
}
declare module "core/src/engine/data/DataCache" {
export class DataCache {
private static cache;
static getItem(url: string): any;
static add(url: string, item: any): any;
}
}
declare module "core/src/engine/data/ImageLoader" {
export class ImageLoader {
static crossOrigin: string;
constructor();
load(url: string, onLoad: Function, onProgress: Function, onError: Function): HTMLImageElement;
}
}
declare module "core/src/engine/math/Vector3" {
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Vector3 {
x: number;
y: number;
z: number;
static SIZE: number;
static NullVector: Vector3;
memorySize: number;
constructor(x?: number, y?: number, z?: number);
static fromJson(v: Vector3): Vector3;
setFromArray(a: any, offset?: number): void;
setFromJson(a: any): void;
length(): number;
dot(b: Vector3): number;
cross(b: Vector3): Vector3;
normalize(): Vector3;
add(b: Vector3): Vector3;
sub(b: Vector3): Vector3;
mul(b: Vector3): Vector3;
div(b: Vector3): Vector3;
mulScalar(b: any): Vector3;
divScalar(b: any): Vector3;
min(b: Vector3): Vector3;
max(b: Vector3): Vector3;
minAxis(): Vector3;
minComponent(): number;
reflect(i: Vector3): Vector3;
refract(i: Vector3, n1: any, n2: any): Vector3;
reflectance(i: Vector3, n1: any, n2: any): number;
toString(): string;
equals(v: Vector3): Boolean;
isZero(): Boolean;
directWrite(memory: Float32Array, offset: number): number;
directRead(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
isNullVector(): boolean;
}
}
declare module "core/src/engine/math/Color" {
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export interface RGBA {
r: number;
g: number;
b: number;
a: number;
}
export class Color {
r: number;
g: number;
b: number;
static SIZE: number;
constructor(r?: number, g?: number, b?: number);
directWrite(mem: Float32Array, offset: number): number;
directRead(mem: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(color: Color): Color;
static hexColor(hex: number): Color;
static newColor(c: RGBA): Color;
RGBA(): RGBA;
add(b: Color): Color;
sub(b: Color): Color;
mul(b: Color): Color;
mulScalar(b: number): Color;
divScalar(b: number): Color;
min(b: Color): Color;
max(b: Color): Color;
pow(b: number): Color;
mix(b: Color, pct: number): Color;
set(r: number, g: number, b: number): Color;
clone(): Color;
}
}
declare module "core/src/engine/math/Constants" {
export const INF: number;
export const EPS: number;
export const shift: number;
export const uvnan: number;
export const uvinf: number;
export const uvneginf: number;
export const mask: number;
export const bias: number;
}
declare module "core/src/engine/utils/MathUtils" {
export class MathUtils {
static radians(degrees: number): number;
static degrees(radians: number): number;
static median(items: number[]): number;
static fract(x: number): number;
static Modf(f: any): {
int: number;
frac: number;
};
static clampInt(x: number, lo: number, hi: number): number;
}
}
declare module "core/src/engine/scene/materials/Texture" {
import { Color } from "core/src/engine/math/Color";
import { Vector3 } from "core/src/engine/math/Vector3";
import { ImageLoader } from "core/src/engine/data/ImageLoader";
export class Texture extends ImageLoader {
static map: Map<string, Texture>;
static getTexture(url: any): Texture;
static fromJson(texture: Texture): Texture;
private static ctx;
sourceFile: string;
loaded: boolean;
width: number;
height: number;
image: HTMLImageElement;
data: Color[];
pixels: number[] | Uint8ClampedArray;
constructor(url?: string);
sample(u: number, v: number): Color;
normalSample(u: number, v: number): Vector3;
bumpSample(u: number, v: number): Vector3;
load(url: string, onLoad?: Function, onProgress?: Function, onError?: Function): HTMLImageElement;
}
}
declare module "core/src/engine/scene/materials/Attenuation" {
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Attenuation {
constant: number;
linear: number;
quadratic: number;
static SIZE: number;
constructor(constant?: number, linear?: number, quadratic?: number);
static fromJson(attenuation: Attenuation): Attenuation;
compute(d: number): number;
set(attenation: Attenuation): Attenuation;
clone(): Attenuation;
directWrite(mem: Float32Array, offset: number): number;
directRead(mem: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
}
export const NoAttenuation: Attenuation;
export class LinearAttenuation extends Attenuation {
constructor(value: number);
}
export class QuadraticAttenuation extends Attenuation {
constructor(value: number);
}
}
declare module "core/src/engine/scene/materials/Material" {
import { Color } from "core/src/engine/math/Color";
import { Texture } from "core/src/engine/scene/materials/Texture";
import { Attenuation } from "core/src/engine/scene/materials/Attenuation";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export enum MaterialType {
GENERIC = 0,
DIFFUSE = 1,
SPECULAR = 2,
CLEAR = 3,
GLOSSY = 4,
EMISSIVE = 5,
}
export class Material {
color: Color;
texture: Texture;
normalTexture: Texture;
bumpTexture: Texture;
bumpMultiplier: number;
emittance: number;
attenuation: Attenuation;
ior: number;
gloss: number;
tint: number;
transparent: boolean;
static SIZE: number;
static map: Array<Material>;
type: MaterialType;
index: number;
constructor(color?: Color, texture?: Texture, normalTexture?: Texture, bumpTexture?: Texture, bumpMultiplier?: number, emittance?: number, attenuation?: Attenuation, ior?: number, gloss?: number, tint?: number, transparent?: boolean);
clone(): Material;
directRead(memory: Float32Array, offset: number): number;
directWrite(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static estimatedMemory: number;
static directWrite(memory: Float32Array, offset: number): number;
static directRestore(memory: Float32Array, offset?: number): number;
static write(memory: ByteArrayBase | DirectMemory): number;
static restore(memory: ByteArrayBase | DirectMemory): number;
}
}
declare module "core/src/engine/scene/Axis" {
export enum Axis {
AxisNone = 0,
AxisX = 1,
AxisY = 2,
AxisZ = 3,
}
}
declare module "core/src/engine/math/Ray" {
import { Vector3 } from "core/src/engine/math/Vector3";
import { HitInfo } from "core/src/engine/math/HitInfo";
export class Ray {
origin: Vector3;
direction: Vector3;
data: Float32Array;
constructor(origin?: Vector3, direction?: Vector3);
position(t: number): Vector3;
reflect(i: Ray): Ray;
Refract(i: Ray, n1: number, n2: number): Ray;
reflectance(i: Ray, n1: number, n2: number): number;
weightedBounce(u: any, v: number): Ray;
coneBounce(theta: any, u: any, v: number): Ray;
bounce(info: HitInfo, p: number, u: number, v: number): {
ray: Ray;
reflected: boolean;
};
toString(): string;
}
}
declare module "core/src/engine/math/HitInfo" {
import { Ray } from "core/src/engine/math/Ray";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Vector3 } from "core/src/engine/math/Vector3";
import { Color } from "core/src/engine/math/Color";
import { Material } from "core/src/engine/scene/materials/Material";
export class HitInfo {
shape: Shape;
position: Vector3;
normal: Vector3;
ray: Ray;
color: Color;
material: Material;
inside: boolean;
constructor(shape: Shape, position: Vector3, normal: Vector3, ray: Ray, color: Color, material: Material, inside: boolean);
}
}
declare module "core/src/engine/math/Hit" {
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { HitInfo } from "core/src/engine/math/HitInfo";
import { Ray } from "core/src/engine/math/Ray";
export class Hit {
shape: Shape;
T: number;
info: HitInfo;
constructor(shape: Shape, T: number, info?: HitInfo);
ok(): boolean;
getInfo(ray: Ray): HitInfo;
}
export var NoHit: Hit;
}
declare module "core/src/engine/scene/materials/DiffuseMaterial" {
import { Material } from "core/src/engine/scene/materials/Material";
import { Color } from "core/src/engine/math/Color";
import { MaterialType } from "core/src/engine/scene/materials/Material";
export class DiffuseMaterial extends Material {
type: MaterialType;
constructor(color: Color);
}
}
declare module "core/src/engine/scene/materials/SpecularMaterial" {
import { Material } from "core/src/engine/scene/materials/Material";
import { Color } from "core/src/engine/math/Color";
import { MaterialType } from "core/src/engine/scene/materials/Material";
export class SpecularMaterial extends Material {
type: MaterialType;
constructor(color: Color, index: number);
}
}
declare module "core/src/engine/scene/materials/ClearMaterial" {
import { Material } from "core/src/engine/scene/materials/Material";
import { MaterialType } from "core/src/engine/scene/materials/Material";
export class ClearMaterial extends Material {
type: MaterialType;
constructor(index: number, gloss: number);
}
}
declare module "core/src/engine/scene/materials/GlossyMaterial" {
import { Material } from "core/src/engine/scene/materials/Material";
import { Color } from "core/src/engine/math/Color";
import { MaterialType } from "core/src/engine/scene/materials/Material";
export class GlossyMaterial extends Material {
type: MaterialType;
constructor(color: Color, index: number, gloss: number);
}
}
declare module "core/src/engine/scene/materials/LightMaterial" {
import { Material } from "core/src/engine/scene/materials/Material";
import { Color } from "core/src/engine/math/Color";
import { Attenuation } from "core/src/engine/scene/materials/Attenuation";
import { MaterialType } from "core/src/engine/scene/materials/Material";
export class LightMaterial extends Material {
type: MaterialType;
constructor(color: Color, emittance: number, attenuation: Attenuation);
}
}
declare module "core/src/engine/scene/materials/MaterialUtils" {
import { Material } from "core/src/engine/scene/materials/Material";
export class MaterialUtils {
static fromJson(material: Material): Material;
static debug: boolean;
}
}
declare module "core/src/engine/scene/shapes/Cube" {
import { Vector3 } from "core/src/engine/math/Vector3";
import { Material } from "core/src/engine/scene/materials/Material";
import { Box } from "core/src/engine/scene/shapes/Box";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Ray } from "core/src/engine/math/Ray";
import { Color } from "core/src/engine/math/Color";
import { Hit } from "core/src/engine/math/Hit";
import { ShapeType } from "core/src/engine/scene/shapes/Shape";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Cube implements Shape {
min: Vector3;
max: Vector3;
material: Material;
box: Box;
type: ShapeType;
memorySize: number;
index: number;
constructor(min?: Vector3, max?: Vector3, material?: Material, box?: Box);
write(memory: ByteArrayBase | DirectMemory): number;
read(memory: ByteArrayBase | DirectMemory): number;
directWrite(memory: Float32Array, offset: number): number;
directRead(memory: Float32Array, offset: number): number;
static fromJson(shape: Cube): Cube;
static newCube(min: Vector3, max: Vector3, material: Material): Shape;
compile(): void;
intersect(r: Ray): Hit;
getColor(p: Vector3): Color;
getMaterial(p: Vector3): Material;
getNormal(p: Vector3): Vector3;
getRandomPoint(): Vector3;
}
}
declare module "core/src/engine/scene/shapes/Sphere" {
import { Vector3 } from "core/src/engine/math/Vector3";
import { Material } from "core/src/engine/scene/materials/Material";
import { Box } from "core/src/engine/scene/shapes/Box";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Ray } from "core/src/engine/math/Ray";
import { Hit } from "core/src/engine/math/Hit";
import { Color } from "core/src/engine/math/Color";
import { ShapeType } from "core/src/engine/scene/shapes/Shape";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Sphere implements Shape {
center: Vector3;
radius: number;
material: Material;
box: Box;
type: ShapeType;
memorySize: number;
index: number;
constructor(center?: Vector3, radius?: number, material?: Material, box?: Box);
directRead(memory: Float32Array, offset: number): number;
directWrite(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(sphere: Sphere): Sphere;
static newSphere(center: Vector3, radius: number, material: Material): Shape;
compile(): void;
intersect(r: Ray): Hit;
getColor(p: Vector3): Color;
getMaterial(p: Vector3): Material;
getNormal(p: Vector3): Vector3;
getRandomPoint(): Vector3;
}
}
declare module "core/src/engine/math/Matrix4" {
import { Vector3 } from "core/src/engine/math/Vector3";
import { Box } from "core/src/engine/scene/shapes/Box";
import { Ray } from "core/src/engine/math/Ray";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Matrix4 {
x00: number;
x01: number;
x02: number;
x03: number;
x10: number;
x11: number;
x12: number;
x13: number;
x20: number;
x21: number;
x22: number;
x23: number;
x30: number;
x31: number;
x32: number;
x33: number;
static SIZE: number;
m: Float32Array;
constructor(x00?: number, x01?: number, x02?: number, x03?: number, x10?: number, x11?: number, x12?: number, x13?: number, x20?: number, x21?: number, x22?: number, x23?: number, x30?: number, x31?: number, x32?: number, x33?: number);
directRead(memory: Float32Array, offset: number): number;
directWrite(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(m: Matrix4): Matrix4;
static fromTHREEJS(e: number[]): Matrix4;
static identity(): Matrix4;
static translate(v: Vector3): Matrix4;
static scale(v: Vector3): Matrix4;
static rotate(v: Vector3, a: number): Matrix4;
static frustum(l: number, r: number, b: number, t: number, n: number, f: number): Matrix4;
static orthographic(l: number, r: number, b: number, t: number, n: number, f: number): Matrix4;
static perspective(fov: number, aspect: number, near: number, far: number): Matrix4;
static LookAtMatrix(eye: Vector3, center: Vector3, up: Vector3, fovy: number): Matrix4;
translate(v: Vector3): Matrix4;
scale(v: Vector3): Matrix4;
rotate(v: Vector3, a: number): Matrix4;
frustum(l: number, r: number, b: number, t: number, n: number, f: number): Matrix4;
orthographic(l: number, r: number, b: number, t: number, n: number, f: number): Matrix4;
perspective(fov: any, aspect: any, near: any, far: number): Matrix4;
mul(b: Matrix4): Matrix4;
mulPosition(b: Vector3): Vector3;
mulDirection(b: Vector3): Vector3;
mulRay(b: Ray): Ray;
mulBox(box: Box): Box;
transpose(): Matrix4;
determinant(): number;
inverse(): Matrix4;
}
}
declare module "core/src/engine/scene/shapes/TransformedShape" {
import { Box } from "core/src/engine/scene/shapes/Box";
import { Hit } from "core/src/engine/math/Hit";
import { Ray } from "core/src/engine/math/Ray";
import { Vector3 } from "core/src/engine/math/Vector3";
import { Material } from "core/src/engine/scene/materials/Material";
import { Color } from "core/src/engine/math/Color";
import { Shape, ShapeType } from "core/src/engine/scene/shapes/Shape";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
import { Matrix4 } from "core/src/engine/math/Matrix4";
export class TransformedShape implements Shape {
shape: Shape;
matrix: Matrix4;
inverse: Matrix4;
normalMatrix: THREE.Matrix3;
type: ShapeType;
index: number;
memorySize: number;
constructor(shape?: Shape, matrix?: Matrix4, inverse?: Matrix4, normalMatrix?: THREE.Matrix3);
directRead(memory: Float32Array, offset: number): number;
directWrite(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(transformedShape: TransformedShape): TransformedShape;
static newTransformedShape(s: Shape, m: Matrix4): Shape;
box: Box;
compile(): void;
intersect(r: Ray): Hit;
getColor(p: Vector3): Color;
getMaterial(p: Vector3): Material;
getNormal(p: Vector3): Vector3;
getRandomPoint(): Vector3;
}
}
declare module "core/src/engine/scene/shapes/Shape" {
import { Box } from "core/src/engine/scene/shapes/Box";
import { Hit } from "core/src/engine/math/Hit";
import { Color } from "core/src/engine/math/Color";
import { Material } from "core/src/engine/scene/materials/Material";
import { Vector3 } from "core/src/engine/math/Vector3";
import { Ray } from "core/src/engine/math/Ray";
import { IPointer } from "core/src/pointer/src/IPointer";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export enum ShapeType {
TRIANGLE = 0,
CUBE = 1,
SPHERE = 2,
MESH = 3,
TRANSFORMED_SHAPE = 4,
}
export interface Shape extends IPointer {
index: number;
type: ShapeType;
box: Box;
compile(): any;
intersect(r: Ray): Hit;
getColor(p: Vector3): Color;
getMaterial(p: Vector3): Material;
getNormal(p: Vector3): Vector3;
getRandomPoint(): Vector3;
directWrite(memory: Uint8Array, offset: number): number;
}
export function ShapesfromJson(shapes: Shape[]): Shape[];
export function ShapefromJson(shape: Shape): Shape;
export function directRestoreShape(memory: Float32Array, offset: number, container: Shape[]): number;
export function restoreShape(memory: ByteArrayBase | DirectMemory, container: Shape[]): number;
}
declare module "core/src/engine/scene/shapes/Box" {
import { Vector3 } from "core/src/engine/math/Vector3";
import { Axis } from "core/src/engine/scene/Axis";
import { Triangle } from "core/src/engine/scene/shapes/Triangle";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Ray } from "core/src/engine/math/Ray";
import { IPointer } from "core/src/pointer/src/IPointer";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Box implements IPointer {
min: Vector3;
max: Vector3;
static SIZE: number;
memorySize: number;
constructor(min?: Vector3, max?: Vector3);
directWrite(memory: Float32Array, offset: number): number;
directRead(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(box: Box): Box;
static boxForShapes(shapes: Array<Shape>): Box;
static boxForTriangles(shapes: Array<Triangle>): Box;
anchor(anchor: Vector3): Vector3;
center(): Vector3;
size(): Vector3;
extend(b: Box): Box;
intersect(r: Ray): any;
partition(axis: Axis, point: number): {
left: boolean;
right: boolean;
};
toString(): string;
}
}
declare module "core/src/engine/scene/shapes/Triangle" {
import { Material } from "core/src/engine/scene/materials/Material";
import { Box } from "core/src/engine/scene/shapes/Box";
import { Vector3 } from "core/src/engine/math/Vector3";
import { Ray } from "core/src/engine/math/Ray";
import { Hit } from "core/src/engine/math/Hit";
import { Color } from "core/src/engine/math/Color";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { ShapeType } from "core/src/engine/scene/shapes/Shape";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Triangle implements Shape {
material: Material;
box: Box;
v1: Vector3;
v2: Vector3;
v3: Vector3;
n1: Vector3;
n2: Vector3;
n3: Vector3;
t1: Vector3;
t2: Vector3;
t3: Vector3;
static SIZE: number;
type: ShapeType;
memorySize: number;
index: number;
private data;
constructor(material?: Material, box?: Box, v1?: Vector3, v2?: Vector3, v3?: Vector3, n1?: Vector3, n2?: Vector3, n3?: Vector3, t1?: Vector3, t2?: Vector3, t3?: Vector3);
directRead(memory: Float32Array, offset: number): number;
directWrite(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(triangles: Triangle | Triangle[]): Triangle | Triangle[];
static newTriangle(v1: Vector3, v2: Vector3, v3: Vector3, t1: Vector3, t2: Vector3, t3: Vector3, material: Material): Triangle;
compile(): void;
vertices: Vector3[];
intersect(r: Ray): Hit;
getColor(p: Vector3): Color;
getMaterial(p: Vector3): Material;
getNormal(p: Vector3): Vector3;
getRandomPoint(): Vector3;
area(): number;
baryCentric(p: Vector3): {
u: number;
v: number;
w: number;
};
updateBox(): void;
fixNormals(): void;
}
}
declare module "core/src/engine/utils/MapUtils" {
export function append(slice: Array<any>, ...elements: any[]): Array<any>;
export function sortAscending(slice: any): void;
export function sortDescending(slice: any): void;
}
declare module "core/src/engine/scene/tree/Node" {
import { Axis } from "core/src/engine/scene/Axis";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Ray } from "core/src/engine/math/Ray";
import { Hit } from "core/src/engine/math/Hit";
export class Node {
axis: Axis;
point: number;
shapes: Shape[];
left: Node;
right: Node;
static map: Array<Node>;
index: number;
constructor(axis: Axis, point: number, shapes: Shape[], left: Node, right: Node);
static newNode(shapes: Shape[]): Node;
intersect(r: Ray, tmin: number, tmax: number): Hit;
intersectShapes(r: Ray): Hit;
partitionScore(axis: Axis, point: number): number;
partition(size: number, axis: Axis, point: number): {
left: Shape[];
right: Shape[];
};
split(depth: number): void;
}
}
declare module "core/src/engine/scene/tree/Tree" {
import { Box } from "core/src/engine/scene/shapes/Box";
import { Node } from "core/src/engine/scene/tree/Node";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Hit } from "core/src/engine/math/Hit";
import { Ray } from "core/src/engine/math/Ray";
export class Tree {
box: Box;
root: Node;
constructor(box: Box, root: Node);
static newTree(shapes: Shape[], box?: Box): Tree;
intersect(r: Ray): Hit;
}
}
declare module "core/src/engine/scene/tree/SharedNode" {
import { Axis } from "core/src/engine/scene/Axis";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Ray } from "core/src/engine/math/Ray";
import { Hit } from "core/src/engine/math/Hit";
import { Mesh } from "core/src/engine/scene/shapes/Mesh";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export enum NodeMarker {
ROOT = 1118481,
LEFT = 15597585,
RIGHT = 1114350,
LEAF = 15597806,
EON = 14737632,
NULL = 15658734,
}
export class SharedNode {
axis: Axis;
point: number;
shapes: Shape[];
shapeIndices: number[];
private _left;
private _right;
static map: Array<SharedNode>;
index: number;
marker: NodeMarker;
mesh: Mesh;
size: number;
treeLength: number;
memory: ByteArrayBase | DirectMemory;
thisPtr: number;
leftPtr: number;
rightPtr: number;
resolved: boolean;
constructor(axis?: Axis, point?: number, shapes?: Shape[], shapeIndices?: number[], _left?: SharedNode, _right?: SharedNode);
left: SharedNode;
right: SharedNode;
readRoot(memory: ByteArrayBase | DirectMemory): number;
read(memory: ByteArrayBase | DirectMemory): number;
readChild(memory: ByteArrayBase | DirectMemory, marker: NodeMarker): number;
static newNode(shapes: Shape[], memory?: ByteArrayBase | DirectMemory): SharedNode;
static fromJson(node: SharedNode): SharedNode;
intersect(r: Ray, tmin: number, tmax: number): Hit;
intersectNode(node: SharedNode, r: Ray, tmin: number, tmax: number): Hit;
intersectShapes(node: SharedNode, r: Ray): Hit;
partitionScore(axis: Axis, point: number): number;
partition(size: number, axis: Axis, point: number): {
left: Shape[];
right: Shape[];
};
split(depth: number): boolean;
}
}
declare module "core/src/engine/scene/tree/SharedTree" {
import { Box } from "core/src/engine/scene/shapes/Box";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Hit } from "core/src/engine/math/Hit";
import { Ray } from "core/src/engine/math/Ray";
import { SharedNode } from "core/src/engine/scene/tree/SharedNode";
import { Mesh } from "core/src/engine/scene/shapes/Mesh";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class SharedTree {
box: Box;
root: SharedNode;
constructor(box: Box, root: SharedNode);
static newTree(shapes: Shape[], box?: Box): SharedTree;
intersect(r: Ray): Hit;
static fromJson(tree: SharedTree, mesh: Mesh): SharedTree;
static readFromMemory(memory: ByteArrayBase | DirectMemory, shapes: Shape[]): SharedTree;
static buildAndWrite(memory: ByteArrayBase | DirectMemory, shapes: Shape[]): number;
}
}
declare module "core/src/engine/scene/shapes/Mesh" {
import { Triangle } from "core/src/engine/scene/shapes/Triangle";
import { Matrix4 } from "core/src/engine/math/Matrix4";
import { Vector3 } from "core/src/engine/math/Vector3";
import { Hit } from "core/src/engine/math/Hit";
import { Ray } from "core/src/engine/math/Ray";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Color } from "core/src/engine/math/Color";
import { Material } from "core/src/engine/scene/materials/Material";
import { Tree } from "core/src/engine/scene/tree/Tree";
import { Box } from "core/src/engine/scene/shapes/Box";
import { ShapeType } from "core/src/engine/scene/shapes/Shape";
import { SharedTree } from "core/src/engine/scene/tree/SharedTree";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class Mesh implements Shape {
box: Box;
triangles: Triangle[];
tree: Tree | SharedTree;
type: ShapeType;
index: number;
material: Material;
memorySize: number;
constructor(box?: Box, triangles?: Triangle[], tree?: Tree | SharedTree);
directRead(memory: Float32Array, offset: number): number;
directWrite(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(mesh: Mesh): Mesh;
static newMesh(triangles: Triangle[]): Mesh;
compile(): void;
static inter: number;
intersect(r: Ray): Hit;
getColor(p: Vector3): Color;
getMaterial(p: Vector3): Material;
getNormal(p: Vector3): Vector3;
getRandomPoint(): Vector3;
updateBox(): void;
private _smoothNormalsThreshold(normal, normals, threshold);
smoothNormalsThreshold(radians: number): void;
smoothNormals(): void;
moveTo(position: Vector3, anchor: Vector3): void;
fitInside(box: Box, anchor: Vector3): void;
transform(matrix: Matrix4): void;
}
}
declare module "core/src/engine/data/OBJLoader" {
import { Mesh } from "core/src/engine/scene/shapes/Mesh";
import { Material } from "core/src/engine/scene/materials/Material";
export class OBJLoader {
parentMaterial: Material;
lastMesh: Mesh;
materials: Map<string, Material>;
private hasMaterials;
private materialsLoaded;
private materialsLoading;
private pendingCallback;
private basePath;
constructor();
load(url: string, onLoad: Function): Mesh;
static parseIndex(value: string, length: number): number;
static parseLine(line: string): {
keyword: string;
value: string[];
};
static parseFloats(fs: string[]): number[];
loadOBJ(data: string): Mesh;
getMaterial(index: string): Material;
loadMTL(url: string): any;
}
}
declare module "core/src/engine/math/TMatrix4" {
import { Vector3 } from "core/src/engine/math/Vector3";
import { Box } from "core/src/engine/scene/shapes/Box";
import { Ray } from "core/src/engine/math/Ray";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class TMatrix4 {
static SIZE: number;
tm: THREE.Matrix4;
x00: number;
x01: number;
x02: number;
x03: number;
x10: number;
x11: number;
x12: number;
x13: number;
x20: number;
x21: number;
x22: number;
x23: number;
x30: number;
x31: number;
x32: number;
x33: number;
constructor(x00?: any, x01?: number, x02?: number, x03?: number, x10?: number, x11?: number, x12?: number, x13?: number, x20?: number, x21?: number, x22?: number, x23?: number, x30?: number, x31?: number, x32?: number, x33?: number);
directRead(memory: Float32Array, offset: number): number;
directWrite(memory: Float32Array, offset: number): number;
read(memory: ByteArrayBase | DirectMemory): number;
write(memory: ByteArrayBase | DirectMemory): number;
static fromJson(m: TMatrix4): TMatrix4;
static identity(): TMatrix4;
static translate(v: Vector3): TMatrix4;
static scale(v: Vector3): TMatrix4;
static rotate(v: Vector3, a: number): TMatrix4;
static frustum(l: number, r: number, b: number, t: number, n: number, f: number): TMatrix4;
static orthographic(l: number, r: number, b: number, t: number, n: number, f: number): TMatrix4;
static perspective(fov: number, aspect: number, near: number, far: number): TMatrix4;
translate(v: Vector3): TMatrix4;
scale(v: Vector3): TMatrix4;
rotate(v: Vector3, a: number): TMatrix4;
frustum(l: number, r: number, b: number, t: number, n: number, f: number): TMatrix4;
orthographic(l: number, r: number, b: number, t: number, n: number, f: number): TMatrix4;
perspective(fov: any, aspect: any, near: any, far: number): TMatrix4;
mul(b: TMatrix4): TMatrix4;
mulPosition(b: Vector3): Vector3;
mulDirection(b: Vector3): Vector3;
mulRay(b: Ray): Ray;
mulBox(b: Box): Box;
transpose(): TMatrix4;
determinant(): number;
inverse(): TMatrix4;
}
}
declare module "core/src/engine/renderer/worker/TraceJob" {
import { Thread } from "core/src/engine/renderer/worker/Thread";
export class TraceJob {
param: any;
extra: any;
static INIT: string;
static INITED: string;
static TRACE: string;
static TRACED: string;
static TERMINATE: string;
static LOCKED: string;
finished: boolean;
runCount: number;
private id;
private _time;
private _lifeCount;
lifeCount: number;
time: number;
constructor(param: any, extra?: any);
start(thread: Thread, onComplete: Function): void;
getTraceParam(): {
init_iterations: number;
};
}
}
declare module "core/src/engine/renderer/worker/ThreadPool" {
import { Thread } from "core/src/engine/renderer/worker/Thread";
export class ThreadPool {
static maxThreads: number;
private static pool;
static getThreads(): Thread[];
}
}
declare module "core/src/engine/scene/Scene" {
import { Color } from "core/src/engine/math/Color";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Tree } from "core/src/engine/scene/tree/Tree";
import { Ray } from "core/src/engine/math/Ray";
import { Hit } from "core/src/engine/math/Hit";
import { SharedTree } from "core/src/engine/scene/tree/SharedTree";
export class Scene {
color: Color;
shapes: Shape[];
lights: Shape[];
tree: Tree | SharedTree;
rays: number;
estimatedMemory: number;
shared: boolean;
constructor(color?: Color, shapes?: Shape[], lights?: Shape[], tree?: Tree | SharedTree, rays?: number);
static fromJson(scene: Scene): Scene;
compile(): Scene;
add(shape: Shape): void;
rayCount(): number;
intersect(r: Ray): Hit;
shadow(r: Ray, light: Shape, max: number): boolean;
directLight(n: Ray): Color;
static interval: number;
sample(r: Ray, emission: boolean, samples: number, depth: number): Color;
}
}
declare module "core/src/engine/scene/SharedScene" {
import { Tree } from "core/src/engine/scene/tree/Tree";
import { Color } from "core/src/engine/math/Color";
import { Shape } from "core/src/engine/scene/shapes/Shape";
import { Scene } from "core/src/engine/scene/Scene";
import { SharedTree } from "core/src/engine/scene/tree/SharedTree";
import { ByteArrayBase } from "core/src/pointer/src/ByteArrayBase";
import { DirectMemory } from "core/src/pointer/src/DirectMemory";
export class SharedScene extends Scene {
sharedTreeMap: SharedTree[];
constructor(color?: Color, shapes?: Shape[], lights?: Shape[], tree?: Tree | SharedTree, rays?: number);
getMemory(): DirectMemory;
static getScene(memory: ByteArrayBase | DirectMemory): SharedScene;
}
}
declare module "core/src/engine/renderer/worker/TraceJobManager" {
import { TraceJob } from "core/src/engine/renderer/worker/TraceJob";
import { SharedScene } from "core/src/engine/scene/SharedScene";
export class TraceJobManager {
referenceQueue: TraceJob[];
queue: TraceJob[];
deferredQueue: TraceJob[];
iterations: number;
updatePixels: Function;
static flags: Uint8Array;
private width;
private height;
private pixelMemory;
private sampleMemory;
private sceneMemory;
private flags;
private traceParameters;
private threads;
private initCount;
maxLoop: number;
private currentLoop;
private totalThreads;
private _initialized;
private _finished;
private _await;
private deferredStart;
private stopped;
private lockCount;
initialized: boolean;
isAllLocked: boolean;
finished: boolean;
pixels: Uint8Array;
constructor();
configure(param: any, scene: SharedScene): void;
add(job: TraceJob): void;
init(callback?: any): void;
private initNext(callback);
private onThreadLocked();
private lockAllThreads();
stop(): void;
clear(): void;
private resetTimerId;
restart(): void;
isAllThreadsFree: boolean;
start(): void;
private processQueue(job, thread);
private initDeferredQueue();
}
}
declare module "core/src/engine/renderer/worker/Thread" {
export class Thread {
id: number;
static workerUrl: string;
instance: Worker;
onTraceComplete: Function;
onInitComplete: Function;
onThreadLocked: Function;
initialized: boolean;
private _isTracing;
isTracing: boolean;
constructor(name: string, id: number);
onMessageReceived(event: any): void;
init(param: any, transferable: any[], onInit: Function): void;
trace(param: any, onComplete: Function): void;
send(data: any, buffers?: any): void;
terminate(): void;
}
}
declare module "core/src/engine/scene/Camera" {
import { Vector3 } from "core/src/engine/math/Vector3";
import { Ray } from "core/src/engine/math/Ray";
export class Camera {
p: Vector3;
u: Vector3;
v: Vector3;
w: Vector3;
m: number;
focalDistance: number;
apertureRadius: number;
constructor(p?: Vector3, u?: Vector3, v?: Vector3, w?: Vector3, m?: number, focalDistance?: number, apertureRadius?: number);
static fromJson(camera: Camera): Camera;
static lookAt(eye: any, look: any, up: Vector3, fovy: number): Camera;
updateFromArray(eye: any, look: any, up: any, fovy: number, focus?: number, aperture?: number): void;
updateFromJson(prop: any): void;
setFocus(focalPoint: Vector3, apertureRadius: number): void;
static debug: boolean;
castRay(x: number, y: number, w: number, h: number, u: number, v: number): Ray;
toJSON(): {
p: Vector3;
w: Vector3;
u: Vector3;
v: Vector3;
m: number;
focalDistance: number;
apertureRadius: number;
};
}
}
declare module "core/src/engine/renderer/LiteBucketRenderer" {
import { Camera } from "core/src/engine/scene/Camera";
import { TraceJobManager } from "core/src/engine/renderer/worker/TraceJobManager";
import { SharedScene } from "core/src/engine/scene/SharedScene";
export class LiteBucketRenderer {
static DEBUG: boolean;
traceManager: TraceJobManager;
initialized: boolean;
bucketSize: number;
constructor();
static interval: number;
iterations: number;
render(scene: SharedScene, camera: Camera, width: number, height: number, cameraSamples: number, hitSamples: number, bounces: number, iterations: number, onUpdate: Function): Uint8ClampedArray;
}
}
declare module "core/src/engine/renderer/SmartBucketRenderer" {
import { Camera } from "core/src/engine/scene/Camera";
import { TraceJobManager } from "core/src/engine/renderer/worker/TraceJobManager";
import { SharedScene } from "core/src/engine/scene/SharedScene";
export class SmartBucketRenderer {
static DEBUG: boolean;
traceManager: TraceJobManager;
initialized: boolean;
bucketSize: number;
constructor();
static interval: number;
iterations: number;
updateCameraSamples(newValue: number): void;
updateHitSamples(newValue: number): void;
updateCamera(newValue: any): void;
render(scene: SharedScene, camera: Camera, width: number, height: number, cameraSamples: number, hitSamples: number, bounces: number, iterations: number, blockIterations: number, onUpdate: Function, onInit?: Function): Uint8ClampedArray;
}
}
declare module "core/src/engine/scene/materials/TransparentMaterial" {
import { Material } from "core/src/engine/scene/materials/Material";
import { Color } from "core/src/engine/math/Color";
export class TransparentMaterial extends Material {
constructor(color: Color, index?: number, gloss?: number, tint?: number);
}
}
declare module "core/src/engine/engine" {
export * from "core/src/engine/data/DataCache";
export * from "core/src/engine/data/ImageLoader";
export * from "core/src/engine/data/OBJLoader";
export * from "core/src/engine/math/Color";
export * from "core/src/engine/math/Constants";
export * from "core/src/engine/math/Hit";
export * from "core/src/engine/math/HitInfo";
export * from "core/src/engine/math/Matrix4";
export * from "core/src/engine/math/TMatrix4";
export * from "core/src/engine/math/Ray";
export * from "core/src/engine/math/Vector3";
export * from "core/src/engine/renderer/worker/Thread";
export * from "core/src/engine/renderer/worker/ThreadPool";
export * from "core/src/engine/renderer/worker/TraceJobManager";
export * from "core/src/engine/renderer/worker/TraceJob";
export * from "core/src/engine/renderer/LiteBucketRenderer";
export * from "core/src/engine/renderer/SmartBucketRenderer";
export * from "core/src/engine/scene/Axis";
export * from "core/src/engine/scene/Camera";
export * from "core/src/engine/scene/Scene";
export * from "core/src/engine/scene/SharedScene";
export * from "core/src/engine/scene/materials/Attenuation";
export * from "core/src/engine/scene/materials/Material";
export * from "core/src/engine/scene/materials/DiffuseMaterial";
export * from "core/src/engine/scene/materials/GlossyMaterial";
export * from "core/src/engine/scene/materials/ClearMaterial";
export * from "core/src/engine/scene/materials/LightMaterial";
export * from "core/src/engine/scene/materials/MaterialUtils";
export * from "core/src/engine/scene/materials/SpecularMaterial";
export * from "core/src/engine/scene/materials/Texture";
export * from "core/src/engine/scene/materials/TransparentMaterial";
export * from "core/src/engine/scene/shapes/Box";
export * from "core/src/engine/scene/shapes/Shape";
export * from "core/src/engine/scene/shapes/Cube";
export * from "core/src/engine/scene/shapes/Mesh";
export * from "core/src/engine/scene/shapes/Sphere";
export * from "core/src/engine/scene/shapes/TransformedShape";
export * from "core/src/engine/scene/shapes/Triangle";
export * from "core/src/engine/scene/tree/Node";
export * from "core/src/engine/scene/tree/SharedNode";
export * from "core/src/engine/scene/tree/Tree";
export * from "core/src/engine/scene/tree/SharedTree";
export * from "core/src/engine/utils/MapUtils";
export * from "core/src/engine/utils/MathUtils";
}
declare module "core/src/ThreeObjects" {
export class ThreeObjects {<|fim▁hole|> }
}
declare module "core/src/ThreeJSView" {
export class ThreeJSView {
width: number;
height: number;
container: HTMLElement;
appContainer: HTMLElement;
camera: THREE.PerspectiveCamera;
scene: THREE.Scene;
renderer: THREE.WebGLRenderer;
controls: any;
onCameraChange: Function;
onMouseDown: Function;
onMouseUp: Function;
constructor(width: number, height: number, container: HTMLElement, appContainer: HTMLElement);
animate(): void;
render(): void;
}
}
declare module "core/src/CanvasDisplay" {
export abstract class CanvasDisplay {
i_width: number;
i_height: number;
container: HTMLElement;
canvas: HTMLCanvasElement;
ctx: CanvasRenderingContext2D;
imageData: ImageData;
data: Uint8ClampedArray | number[];
constructor(i_width?: number, i_height?: number, container?: HTMLElement);
attachDom(dom: HTMLElement): void;
onWindowResize(): void;
setResolution(width: number, height: number): void;
updatePixels(pixels: Uint8ClampedArray): void;
updatePixelsRect(rect: any, pixels: Uint8ClampedArray): void;
}
}
declare module "core/src/GIRenderBase" {
import { CanvasDisplay } from "core/src/CanvasDisplay";
import { SmartBucketRenderer } from "core/src/engine/renderer/SmartBucketRenderer";
import { Camera } from "core/src/engine/scene/Camera";
import { SharedScene } from "core/src/engine/scene/SharedScene";
export abstract class GIRenderBase extends CanvasDisplay {
protected renderer: SmartBucketRenderer;
protected pixels: Uint8ClampedArray;
scene: SharedScene;
protected camera: Camera;
cameraSamples: number;
hitSamples: number;
bounces: number;
iterations: number;
blockIterations: number;
dirty: boolean;
constructor(i_width?: number, i_height?: number, container?: HTMLElement);
updateCameraSamples(newValue: number): void;
updateHitSamples(newValue: number): void;
updateCamera(newValue: any): void;
updateCameraMatrix(matrix: number[]): void;
toggleTrace(newValue: boolean): void;
render(onInit?: Function): void;
}
}
declare module "core/src/GIJSView" {
import { GIRenderBase } from "core/src/GIRenderBase";
export class GIJSView extends GIRenderBase {
width: number;
height: number;
container: HTMLElement;
constructor(width: number, height: number, container?: HTMLElement);
setThreeJSScene(scene: any, onInit?: Function): void;
private loadChildren(parent);
identityMatrix: THREE.Matrix4;
private buildSceneObject(src);
private buildGeometry(geometry, material);
computeNormals(positions: Float32Array): Float32Array;
updateCamera(camera: THREE.PerspectiveCamera): void;
private static getMaterial(srcMaterial);
private getLight(src);
}
}
declare module "core/core" {
export * from "core/src/pointer/pointer";
export * from "core/src/engine/engine";
export * from "core/src/ThreeObjects";
export * from "core/src/ThreeJSView";
export * from "core/src/CanvasDisplay";
export * from "core/src/GIRenderBase";
export * from "core/src/GIJSView";
}
declare module "xrenderer" {
export * from "core/core";
}
declare module "core/src/three/GIThree" {
export class GIThree {
scene: any;
constructor(scene: any);
}
}
declare module "core/src/utils/NetworkUtils" {
export class NetworkUtils {
static baseUrl: string;
}
}
declare function fetch(url: string, opt: any): any;
declare function postMessage(arg: any): any;
interface Atomics {
futexWait(ta: any, index: number, value: number, timeOut: number): any;
futexWake(ta: any, index: number, count: number): any;
store(ta: any, index: number, value: number, timeOut: number): any;
load(ta: any, index: number, value: number, timeOut: number): any;
}
interface SharedArrayBuffer {
byteLength: number;
slice(begin: number, end?: number): ArrayBuffer;
}
interface SharedArrayBufferConstructor extends ArrayBufferConstructor {
}
declare var SharedArrayBuffer: SharedArrayBufferConstructor;
interface Thenable<T> {
then<U>(onFulfilled?: (value: T) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
then<U>(onFulfilled?: (value: T) => U | Thenable<U>, onRejected?: (error: any) => void): Thenable<U>;
catch<U>(onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
}
declare class Promise<T> implements Thenable<T> {
constructor(callback: (resolve: (value?: T | Thenable<T>) => void, reject: (error?: any) => void) => void);
then<U>(onFulfilled?: (value: T) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Promise<U>;
then<U>(onFulfilled?: (value: T) => U | Thenable<U>, onRejected?: (error: any) => void): Promise<U>;
catch<U>(onRejected?: (error: any) => U | Thenable<U>): Promise<U>;
}
declare module Promise {
function resolve<T>(value?: T | Thenable<T>): Promise<T>;
function reject(error: any): Promise<any>;
function reject<T>(error: T): Promise<T>;
function all<T>(promises: (T | Thenable<T>)[]): Promise<T[]>;
function race<T>(promises: (T | Thenable<T>)[]): Promise<T>;
}
declare module 'es6-promise' {
var foo: typeof Promise;
module rsvp {
var Promise: typeof foo;
}
export = rsvp;
}
interface Symbol {
toString(): string;
valueOf(): Object;
[Symbol.toStringTag]: string;
}
interface SymbolConstructor {
prototype: Symbol;
(description?: string | number): symbol;
for(key: string): symbol;
keyFor(sym: symbol): string;
hasInstance: symbol;
isConcatSpreadable: symbol;
iterator: symbol;
match: symbol;
replace: symbol;
search: symbol;
species: symbol;
split: symbol;
toPrimitive: symbol;
toStringTag: symbol;
unscopables: symbol;
}
declare var Symbol: SymbolConstructor;
interface IteratorResult<T> {
done: boolean;
value?: T;
}
interface Iterator<T> {
next(value?: any): IteratorResult<T>;
return?(value?: any): IteratorResult<T>;
throw?(e?: any): IteratorResult<T>;
}
interface Iterable<T> {
[Symbol.iterator](): Iterator<T>;
}
interface IterableIterator<T> extends Iterator<T> {
[Symbol.iterator](): IterableIterator<T>;
}
interface Map<K, V> {
clear(): void;
delete(key: K): boolean;
entries(): IterableIterator<[K, V]>;
forEach(callbackfn: (value: V, index: K, map: Map<K, V>) => void, thisArg?: any): void;
get(key: K): V;
has(key: K): boolean;
keys(): IterableIterator<K>;
set(key: K, value?: V): Map<K, V>;
size: number;
values(): IterableIterator<V>;
[Symbol.iterator](): IterableIterator<[K, V]>;
[Symbol.toStringTag]: string;
}
interface MapConstructor {
new (): Map<any, any>;
new <K, V>(): Map<K, V>;
new <K, V>(iterable: Iterable<[K, V]>): Map<K, V>;
prototype: Map<any, any>;
}
declare var Map: MapConstructor;<|fim▁end|> | static PointLight: string;
static Mesh: string;
static Group: string; |
<|file_name|>Show next instance.py<|end_file_name|><|fim▁begin|>#MenuTitle: Show next instance
# -*- coding: utf-8 -*-
__doc__="""
Jumps to next instance shown in the preview field of the current Edit tab.
"""
import GlyphsApp
<|fim▁hole|>Doc = Glyphs.currentDocument
numberOfInstances = len( Glyphs.font.instances )
try:
currentInstanceNumber = Doc.windowController().activeEditViewController().selectedInstance()
if currentInstanceNumber < numberOfInstances:
Doc.windowController().activeEditViewController().setSelectedInstance_( currentInstanceNumber + 1 )
else:
Doc.windowController().activeEditViewController().setSelectedInstance_( 1 )
except Exception, e:
print "Error:", e<|fim▁end|> | |
<|file_name|>buffer_pool.go<|end_file_name|><|fim▁begin|>package quic
import (
"sync"
"github.com/lucas-clemente/quic-go/internal/protocol"
)
type packetBuffer struct {
Data []byte
// refCount counts how many packets Data is used in.
// It doesn't support concurrent use.
// It is > 1 when used for coalesced packet.
refCount int
}
// Split increases the refCount.
// It must be called when a packet buffer is used for more than one packet,
// e.g. when splitting coalesced packets.
func (b *packetBuffer) Split() {
b.refCount++
}<|fim▁hole|>func (b *packetBuffer) Decrement() {
b.refCount--
if b.refCount < 0 {
panic("negative packetBuffer refCount")
}
}
// MaybeRelease puts the packet buffer back into the pool,
// if the reference counter already reached 0.
func (b *packetBuffer) MaybeRelease() {
// only put the packetBuffer back if it's not used any more
if b.refCount == 0 {
b.putBack()
}
}
// Release puts back the packet buffer into the pool.
// It should be called when processing is definitely finished.
func (b *packetBuffer) Release() {
b.Decrement()
if b.refCount != 0 {
panic("packetBuffer refCount not zero")
}
b.putBack()
}
// Len returns the length of Data
func (b *packetBuffer) Len() protocol.ByteCount {
return protocol.ByteCount(len(b.Data))
}
func (b *packetBuffer) putBack() {
if cap(b.Data) != int(protocol.MaxReceivePacketSize) {
panic("putPacketBuffer called with packet of wrong size!")
}
bufferPool.Put(b)
}
var bufferPool sync.Pool
func getPacketBuffer() *packetBuffer {
buf := bufferPool.Get().(*packetBuffer)
buf.refCount = 1
buf.Data = buf.Data[:0]
return buf
}
func init() {
bufferPool.New = func() interface{} {
return &packetBuffer{
Data: make([]byte, 0, protocol.MaxReceivePacketSize),
}
}
}<|fim▁end|> |
// Decrement decrements the reference counter.
// It doesn't put the buffer back into the pool. |
<|file_name|>editaddressdialog.cpp<|end_file_name|><|fim▁begin|>#include "editaddressdialog.h"
#include "ui_editaddressdialog.h"
#include "addresstablemodel.h"
#include "guiutil.h"
#include <QDataWidgetMapper>
#include <QMessageBox>
EditAddressDialog::EditAddressDialog(Mode mode, QWidget *parent) :
QDialog(parent),
ui(new Ui::EditAddressDialog), mapper(0), mode(mode), model(0)
{
ui->setupUi(this);
GUIUtil::setupAddressWidget(ui->addressEdit, this);
switch(mode)
{
case NewReceivingAddress:
setWindowTitle(tr("New receiving address"));
ui->addressEdit->setEnabled(false);
break;
case NewSendingAddress:
setWindowTitle(tr("New sending address"));
break;
case EditReceivingAddress:
setWindowTitle(tr("Edit receiving address"));
ui->addressEdit->setEnabled(false);
break;
case EditSendingAddress:
setWindowTitle(tr("Edit sending address"));
break;
}
mapper = new QDataWidgetMapper(this);
mapper->setSubmitPolicy(QDataWidgetMapper::ManualSubmit);
}
EditAddressDialog::~EditAddressDialog()
{
delete ui;
}
void EditAddressDialog::setModel(AddressTableModel *model)
{
this->model = model;
if(!model)
return;
mapper->setModel(model);
mapper->addMapping(ui->labelEdit, AddressTableModel::Label);
mapper->addMapping(ui->addressEdit, AddressTableModel::Address);
}
void EditAddressDialog::loadRow(int row)
{
mapper->setCurrentIndex(row);
}
bool EditAddressDialog::saveCurrentRow()
{
if(!model)
return false;
switch(mode)
{
case NewReceivingAddress:
case NewSendingAddress:
address = model->addRow(
mode == NewSendingAddress ? AddressTableModel::Send : AddressTableModel::Receive,
ui->labelEdit->text(),
ui->addressEdit->text());
break;
case EditReceivingAddress:
case EditSendingAddress:
if(mapper->submit())<|fim▁hole|> address = ui->addressEdit->text();
}
break;
}
return !address.isEmpty();
}
void EditAddressDialog::accept()
{
if(!model)
return;
if(!saveCurrentRow())
{
switch(model->getEditStatus())
{
case AddressTableModel::OK:
// Failed with unknown reason. Just reject.
break;
case AddressTableModel::NO_CHANGES:
// No changes were made during edit operation. Just reject.
break;
case AddressTableModel::INVALID_ADDRESS:
QMessageBox::warning(this, windowTitle(),
tr("The entered address \"%1\" is not a valid Palestinecoin address.").arg(ui->addressEdit->text()),
QMessageBox::Ok, QMessageBox::Ok);
break;
case AddressTableModel::DUPLICATE_ADDRESS:
QMessageBox::warning(this, windowTitle(),
tr("The entered address \"%1\" is already in the address book.").arg(ui->addressEdit->text()),
QMessageBox::Ok, QMessageBox::Ok);
break;
case AddressTableModel::WALLET_UNLOCK_FAILURE:
QMessageBox::critical(this, windowTitle(),
tr("Could not unlock wallet."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case AddressTableModel::KEY_GENERATION_FAILURE:
QMessageBox::critical(this, windowTitle(),
tr("New key generation failed."),
QMessageBox::Ok, QMessageBox::Ok);
break;
}
return;
}
QDialog::accept();
}
QString EditAddressDialog::getAddress() const
{
return address;
}
void EditAddressDialog::setAddress(const QString &address)
{
this->address = address;
ui->addressEdit->setText(address);
}<|fim▁end|> | { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub mod linsol;<|fim▁hole|><|fim▁end|> |
#[cfg(test)]
mod tests; |
<|file_name|>blog.js<|end_file_name|><|fim▁begin|>import React from "react";
import {graphql} from "gatsby";
import "../styles/main.scss";
<|fim▁hole|>import BlogListing from "../components/blog-post-listing";
import PageFooter from "../components/footer";
class BlogListPage extends React.Component {
render() {
const posts = this.props.data.allMarkdownRemark.edges;
return (
<div>
<CompactHeader title="Blog Articles" mood="#fdfdfd" bgUrl={BlogIndexCover}> </CompactHeader>
<main>
<div className="post-list-container">
<BlogListing posts={posts}> </BlogListing>
</div>
</main>
<PageFooter> </PageFooter>
</div>
);
}
}
export default BlogListPage;
export const pageQuery = graphql`
query {
site {
siteMetadata {
title
}
}
allMarkdownRemark(sort: { fields: [frontmatter___date], order: DESC }, limit: 20) {
edges {
node {
excerpt
frontmatter {
date(formatString: "MMMM DD, YYYY")
title
basecolor
author
enablecomments
category
bgimage
external_link
external_site_name
external_site_link
page_slug
}
}
}
}
}
`;<|fim▁end|> | import BlogIndexCover from "../assets/images/blog_covers/blog_index_cover.jpeg";
import CompactHeader from "../components/compact-header"; |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var path = require('path');
var fs = require('fs');
var Writer = require('broccoli-writer');
var Handlebars = require('handlebars');
var walkSync = require('walk-sync');
var RSVP = require('rsvp');
var helpers = require('broccoli-kitchen-sink-helpers');
var mkdirp = require('mkdirp');
var Promise = RSVP.Promise;<|fim▁hole|>
var HandlebarsWriter = function (inputTree, files, options) {
if (!(this instanceof HandlebarsWriter)) {
return new HandlebarsWriter(inputTree, files, options);
}
this.inputTree = inputTree;
this.files = files;
this.options = options || {};
this.context = this.options.context || {};
this.destFile = this.options.destFile || function (filename) {
return filename.replace(/(hbs|handlebars)$/, 'html');
};
this.handlebars = this.options.handlebars || Handlebars;
this.loadPartials();
this.loadHelpers();
};
HandlebarsWriter.prototype = Object.create(Writer.prototype);
HandlebarsWriter.prototype.constructor = HandlebarsWriter;
HandlebarsWriter.prototype.loadHelpers = function () {
var helpers = this.options.helpers;
if (!helpers) return;
if ('function' === typeof helpers) helpers = helpers();
if ('object' !== typeof helpers) {
throw Error('options.helpers must be an object or a function that returns an object');
}
this.handlebars.registerHelper(helpers);
};
HandlebarsWriter.prototype.loadPartials = function () {
var partials = this.options.partials;
var partialsPath;
var partialFiles;
if (!partials) return;
if ('string' !== typeof partials) {
throw Error('options.partials must be a string');
}
partialsPath = path.join(process.cwd(), partials);
partialFiles = walkSync(partialsPath).filter(EXTENSIONS_REGEX.test.bind(EXTENSIONS_REGEX));
partialFiles.forEach(function (file) {
var key = file.replace(partialsPath, '').replace(EXTENSIONS_REGEX, '');
var filePath = path.join(partialsPath, file);
this.handlebars.registerPartial(key, fs.readFileSync(filePath).toString());
}, this);
};
HandlebarsWriter.prototype.write = function (readTree, destDir) {
var self = this;
this.loadPartials();
this.loadHelpers();
return readTree(this.inputTree).then(function (sourceDir) {
var targetFiles = helpers.multiGlob(self.files, {cwd: sourceDir});
return RSVP.all(targetFiles.map(function (targetFile) {
function write (output) {
var destFilepath = path.join(destDir, self.destFile(targetFile));
mkdirp.sync(path.dirname(destFilepath));
var str = fs.readFileSync(path.join(sourceDir, targetFile)).toString();
var template = self.handlebars.compile(str);
fs.writeFileSync(destFilepath, template(output));
}
var output = ('function' !== typeof self.context) ? self.context : self.context(targetFile);
return Promise.resolve(output).then(write);
}));
});
};
module.exports = HandlebarsWriter;<|fim▁end|> |
var EXTENSIONS_REGEX = new RegExp('.(hbs|handlebars)'); |
<|file_name|>language.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2011 Nicolas Wack <[email protected]>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import UnicodeMixin, base_text_type, u, s
from guessit.fileutils import load_file_in_same_dir
from guessit.textutils import find_words
from guessit.country import Country
import re
import logging
__all__ = [ 'is_iso_language', 'is_language', 'lang_set', 'Language',
'ALL_LANGUAGES', 'ALL_LANGUAGES_NAMES', 'UNDETERMINED',
'search_language', 'guess_language' ]
log = logging.getLogger(__name__)
# downloaded from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
#
# Description of the fields:
# "An alpha-3 (bibliographic) code, an alpha-3 (terminologic) code (when given),
# an alpha-2 code (when given), an English name, and a French name of a language
# are all separated by pipe (|) characters."
_iso639_contents = load_file_in_same_dir(__file__, 'ISO-639-2_utf-8.txt')
# drop the BOM from the beginning of the file
_iso639_contents = _iso639_contents[1:]
language_matrix = [ l.strip().split('|')
for l in _iso639_contents.strip().split('\n') ]
# update information in the language matrix
language_matrix += [['mol', '', 'mo', 'Moldavian', 'moldave'],
['ass', '', '', 'Assyrian', 'assyrien']]
for lang in language_matrix:
# remove unused languages that shadow other common ones with a non-official form
if (lang[2] == 'se' or # Northern Sami shadows Swedish
lang[2] == 'br'): # Breton shadows Brazilian
lang[2] = ''
# add missing information
if lang[0] == 'und':
lang[2] = 'un'
if lang[0] == 'srp':
lang[1] = 'scc' # from OpenSubtitles
lng3 = frozenset(l[0] for l in language_matrix if l[0])
lng3term = frozenset(l[1] for l in language_matrix if l[1])
lng2 = frozenset(l[2] for l in language_matrix if l[2])
lng_en_name = frozenset(lng for l in language_matrix
for lng in l[3].lower().split('; ') if lng)
lng_fr_name = frozenset(lng for l in language_matrix
for lng in l[4].lower().split('; ') if lng)
lng_all_names = lng3 | lng3term | lng2 | lng_en_name | lng_fr_name
lng3_to_lng3term = dict((l[0], l[1]) for l in language_matrix if l[1])
lng3term_to_lng3 = dict((l[1], l[0]) for l in language_matrix if l[1])
lng3_to_lng2 = dict((l[0], l[2]) for l in language_matrix if l[2])
lng2_to_lng3 = dict((l[2], l[0]) for l in language_matrix if l[2])
# we only return the first given english name, hoping it is the most used one
lng3_to_lng_en_name = dict((l[0], l[3].split('; ')[0])
for l in language_matrix if l[3])
lng_en_name_to_lng3 = dict((en_name.lower(), l[0])
for l in language_matrix if l[3]
for en_name in l[3].split('; '))
# we only return the first given french name, hoping it is the most used one
lng3_to_lng_fr_name = dict((l[0], l[4].split('; ')[0])
for l in language_matrix if l[4])
lng_fr_name_to_lng3 = dict((fr_name.lower(), l[0])
for l in language_matrix if l[4]
for fr_name in l[4].split('; '))
# contains a list of exceptions: strings that should be parsed as a language
# but which are not in an ISO form
lng_exceptions = { 'unknown': ('und', None),
'inconnu': ('und', None),
'unk': ('und', None),
'un': ('und', None),
'gr': ('gre', None),
'greek': ('gre', None),
'esp': ('spa', None),
'español': ('spa', None),
'se': ('swe', None),
'po': ('pt', 'br'),
'pb': ('pt', 'br'),
'pob': ('pt', 'br'),
'br': ('pt', 'br'),
'brazilian': ('pt', 'br'),
'català': ('cat', None),
'cz': ('cze', None),
'ua': ('ukr', None),
'cn': ('chi', None),
'chs': ('chi', None),
'jp': ('jpn', None),
'scr': ('hrv', None)
}
def is_iso_language(language):
return language.lower() in lng_all_names
def is_language(language):
return is_iso_language(language) or language in lng_exceptions
def lang_set(languages, strict=False):
"""Return a set of guessit.Language created from their given string
representation.
if strict is True, then this will raise an exception if any language
could not be identified.
"""
return set(Language(l, strict=strict) for l in languages)
class Language(UnicodeMixin):
"""This class represents a human language.
You can initialize it with pretty much anything, as it knows conversion
from ISO-639 2-letter and 3-letter codes, English and French names.
You can also distinguish languages for specific countries, such as
Portuguese and Brazilian Portuguese.
There are various properties on the language object that give you the
representation of the language for a specific usage, such as .alpha3
to get the ISO 3-letter code, or .opensubtitles to get the OpenSubtitles
language code.
>>> Language('fr')
Language(French)
>>> s(Language('eng').french_name)
'anglais'
>>> s(Language('pt(br)').country.english_name)
'Brazil'
>>> s(Language('Español (Latinoamérica)').country.english_name)
'Latin America'
>>> Language('Spanish (Latin America)') == Language('Español (Latinoamérica)')
True
>>> s(Language('zz', strict=False).english_name)
'Undetermined'
>>> s(Language('pt(br)').opensubtitles)
'pob'
"""
_with_country_regexp = re.compile('(.*)\((.*)\)')
_with_country_regexp2 = re.compile('(.*)-(.*)')
def __init__(self, language, country=None, strict=False, scheme=None):
language = u(language.strip().lower())
with_country = (Language._with_country_regexp.match(language) or
Language._with_country_regexp2.match(language))
if with_country:
self.lang = Language(with_country.group(1)).lang
self.country = Country(with_country.group(2))
return
self.lang = None
self.country = Country(country) if country else None
# first look for scheme specific languages
if scheme == 'opensubtitles':
if language == 'br':
self.lang = 'bre'
return
elif language == 'se':
self.lang = 'sme'
return
elif scheme is not None:
log.warning('Unrecognized scheme: "%s" - Proceeding with standard one' % scheme)
# look for ISO language codes
if len(language) == 2:
self.lang = lng2_to_lng3.get(language)
elif len(language) == 3:
self.lang = (language
if language in lng3
else lng3term_to_lng3.get(language))
else:
self.lang = (lng_en_name_to_lng3.get(language) or
lng_fr_name_to_lng3.get(language))
# general language exceptions
if self.lang is None and language in lng_exceptions:
lang, country = lng_exceptions[language]
self.lang = Language(lang).alpha3
self.country = Country(country) if country else None
msg = 'The given string "%s" could not be identified as a language' % language
if self.lang is None and strict:
raise ValueError(msg)
if self.lang is None:
log.debug(msg)
self.lang = 'und'
@property
def alpha2(self):
return lng3_to_lng2[self.lang]
@property
def alpha3(self):
return self.lang
@property
def alpha3term(self):
return lng3_to_lng3term[self.lang]
@property
def english_name(self):
return lng3_to_lng_en_name[self.lang]
@property
def french_name(self):
return lng3_to_lng_fr_name[self.lang]
@property
def opensubtitles(self):
if self.lang == 'por' and self.country and self.country.alpha2 == 'br':
return 'pob'
elif self.lang in ['gre', 'srp']:
return self.alpha3term
return self.alpha3
@property
def tmdb(self):
if self.country:
return '%s-%s' % (self.alpha2, self.country.alpha2.upper())
return self.alpha2
def __hash__(self):
return hash(self.lang)
def __eq__(self, other):
if isinstance(other, Language):
return self.lang == other.lang
if isinstance(other, base_text_type):
try:
return self == Language(other)
except ValueError:
return False
return False
def __ne__(self, other):
return not self == other
def __nonzero__(self):
return self.lang != 'und'
def __unicode__(self):
if self.country:
return '%s(%s)' % (self.english_name, self.country.alpha2)
else:
return self.english_name
def __repr__(self):
if self.country:
return 'Language(%s, country=%s)' % (self.english_name, self.country)
else:
return 'Language(%s)' % self.english_name
UNDETERMINED = Language('und')
ALL_LANGUAGES = frozenset(Language(lng) for lng in lng_all_names) - frozenset([UNDETERMINED])
ALL_LANGUAGES_NAMES = lng_all_names
def search_language(string, lang_filter=None):
"""Looks for language patterns, and if found return the language object,
its group span and an associated confidence.
you can specify a list of allowed languages using the lang_filter argument,
as in lang_filter = [ 'fr', 'eng', 'spanish' ]
>>> search_language('movie [en].avi')
(Language(English), (7, 9), 0.8)
>>> search_language('the zen fat cat and the gay mad men got a new fan', lang_filter = ['en', 'fr', 'es'])
(None, None, None)
"""
# list of common words which could be interpreted as languages, but which
# are far too common to be able to say they represent a language in the
# middle of a string (where they most likely carry their commmon meaning)
lng_common_words = frozenset([
# english words
'is', 'it', 'am', 'mad', 'men', 'man', 'run', 'sin', 'st', 'to',
'no', 'non', 'war', 'min', 'new', 'car', 'day', 'bad', 'bat', 'fan',
'fry', 'cop', 'zen', 'gay', 'fat', 'cherokee', 'got', 'an', 'as',
'cat', 'her', 'be', 'hat', 'sun', 'may', 'my', 'mr', 'rum', 'pi',
# french words
'bas', 'de', 'le', 'son', 'vo', 'vf', 'ne', 'ca', 'ce', 'et', 'que',
'mal', 'est', 'vol', 'or', 'mon', 'se',
# spanish words
'la', 'el', 'del', 'por', 'mar',
# other
'ind', 'arw', 'ts', 'ii', 'bin', 'chan', 'ss', 'san', 'oss', 'iii',
'vi', 'ben', 'da', 'lt'
])
sep = r'[](){} \._-+'
if lang_filter:
lang_filter = lang_set(lang_filter)
slow = ' %s ' % string.lower()
confidence = 1.0 # for all of them
for lang in set(find_words(slow)) & lng_all_names:
if lang in lng_common_words:
continue
pos = slow.find(lang)
if pos != -1:
end = pos + len(lang)
# make sure our word is always surrounded by separators
if slow[pos - 1] not in sep or slow[end] not in sep:
continue
language = Language(slow[pos:end])
if lang_filter and language not in lang_filter:
continue
# only allow those languages that have a 2-letter code, those that
# don't are too esoteric and probably false matches
if language.lang not in lng3_to_lng2:
continue
# confidence depends on lng2, lng3, english name, ...
if len(lang) == 2:
confidence = 0.8
elif len(lang) == 3:
confidence = 0.9
else:
# Note: we could either be really confident that we found a
# language or assume that full language names are too
# common words and lower their confidence accordingly
confidence = 0.3 # going with the low-confidence route here
<|fim▁hole|>
def guess_language(text):
"""Guess the language in which a body of text is written.
This uses the external guess-language python module, and will fail and return
Language(Undetermined) if it is not installed.
"""
try:
from guess_language import guessLanguage
return Language(guessLanguage(text))
except ImportError:
log.error('Cannot detect the language of the given text body, missing dependency: guess-language')
log.error('Please install it from PyPI, by doing eg: pip install guess-language')
return UNDETERMINED<|fim▁end|> | return language, (pos - 1, end - 1), confidence
return None, None, None
|
<|file_name|>_purefa_facts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Simon Dodsley ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_facts
version_added: '2.6'
deprecated:
removed_in: '2.13'
why: Deprecated in favor of C(_info) module.
alternative: Use M(purefa_info) instead.
short_description: Collect facts from Pure Storage FlashArray
description:
- Collect facts information from a Pure Storage Flasharray running the
Purity//FA operating system. By default, the module will collect basic
fact information including hosts, host groups, protection
groups and volume counts. Additional fact information can be collected
based on the configured set of arguments.
author:
- Pure Storage ansible Team (@sdodsley) <[email protected]>
options:
gather_subset:
description:
- When supplied, this argument will define the facts to be collected.
Possible values for this include all, minimum, config, performance,
capacity, network, subnet, interfaces, hgroups, pgroups, hosts,
admins, volumes, snapshots, pods, vgroups, offload, apps and arrays.
type: list
required: false
default: minimum
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: collect default set of facts
purefa_facts:
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: collect configuration and capacity facts
purefa_facts:
gather_subset:
- config
- capacity
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: collect all facts
purefa_facts:
gather_subset:
- all
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
'''
RETURN = r'''
ansible_facts:
description: Returns the facts collected from the FlashArray
returned: always
type: complex
sample: {
"capacity": {},
"config": {
"directory_service": {
"array_admin_group": null,
"base_dn": null,
"bind_password": null,
"bind_user": null,
"check_peer": false,
"enabled": false,
"group_base": null,
"readonly_group": null,
"storage_admin_group": null,
"uri": []
},
"dns": {
"domain": "domain.com",
"nameservers": [
"8.8.8.8",
"8.8.4.4"
]
},
"ntp": [
"0.ntp.pool.org",
"1.ntp.pool.org",
"2.ntp.pool.org",
"3.ntp.pool.org"
],
"smtp": [
{
"enabled": true,
"name": "[email protected]"
},
{
"enabled": true,
"name": "[email protected]"
}
],
"snmp": [
{
"auth_passphrase": null,
"auth_protocol": null,
"community": null,
"host": "localhost",
"name": "localhost",
"privacy_passphrase": null,
"privacy_protocol": null,
"user": null,
"version": "v2c"
}
],
"ssl_certs": {
"country": null,
"email": null,
"issued_by": "",
"issued_to": "",
"key_size": 2048,
"locality": null,
"organization": "Acme Storage, Inc.",
"organizational_unit": "Acme Storage, Inc.",
"state": null,
"status": "self-signed",
"valid_from": "2017-08-11T23:09:06Z",
"valid_to": "2027-08-09T23:09:06Z"
},
"syslog": []
},
"default": {
"array_name": "flasharray1",
"connected_arrays": 1,
"hostgroups": 0,
"hosts": 10,
"pods": 3,
"protection_groups": 1,
"purity_version": "5.0.4",
"snapshots": 1,
"volume_groups": 2
},
"hgroups": {},
"hosts": {
"host1": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:2f6f5715a533"
],
"wwn": []
},
"host2": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:d17fb13fe0b"
],
"wwn": []
},
"host3": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:97b1351bfb2"
],
"wwn": []
},
"host4": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:dd84e9a7b2cb"
],
"wwn": [<|fim▁hole|> "10000000C96C48D2"
]
}
},
"interfaces": {
"CT0.ETH4": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682",
"CT0.ETH5": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682",
"CT1.ETH4": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682",
"CT1.ETH5": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682"
},
"network": {
"ct0.eth0": {
"address": "10.10.10.10",
"gateway": "10.10.10.1",
"hwaddr": "ec:f4:bb:c8:8a:04",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"management"
],
"speed": 1000000000
},
"ct0.eth2": {
"address": "10.10.10.11",
"gateway": null,
"hwaddr": "ec:f4:bb:c8:8a:00",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"replication"
],
"speed": 10000000000
},
"ct0.eth3": {
"address": "10.10.10.12",
"gateway": null,
"hwaddr": "ec:f4:bb:c8:8a:02",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"replication"
],
"speed": 10000000000
},
"ct0.eth4": {
"address": "10.10.10.13",
"gateway": null,
"hwaddr": "90:e2:ba:83:79:0c",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"iscsi"
],
"speed": 10000000000
},
"ct0.eth5": {
"address": "10.10.10.14",
"gateway": null,
"hwaddr": "90:e2:ba:83:79:0d",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"iscsi"
],
"speed": 10000000000
},
"vir0": {
"address": "10.10.10.20",
"gateway": "10.10.10.1",
"hwaddr": "fe:ba:e9:e7:6b:0f",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"management"
],
"speed": 1000000000
}
},
"offload": {
"nfstarget": {
"address": "10.0.2.53",
"mount_options": null,
"mount_point": "/offload",
"protocol": "nfs",
"status": "scanning"
}
},
"performance": {
"input_per_sec": 8191,
"output_per_sec": 0,
"queue_depth": 1,
"reads_per_sec": 0,
"san_usec_per_write_op": 15,
"usec_per_read_op": 0,
"usec_per_write_op": 642,
"writes_per_sec": 2
},
"pgroups": {
"consisgroup-07b6b983-986e-46f5-bdc3-deaa3dbb299e-cinder": {
"hgroups": null,
"hosts": null,
"source": "host1",
"targets": null,
"volumes": [
"volume-1"
]
}
},
"pods": {
"srm-pod": {
"arrays": [
{
"array_id": "52595f7e-b460-4b46-8851-a5defd2ac192",
"mediator_status": "online",
"name": "sn1-405-c09-37",
"status": "online"
},
{
"array_id": "a2c32301-f8a0-4382-949b-e69b552ce8ca",
"mediator_status": "online",
"name": "sn1-420-c11-31",
"status": "online"
}
],
"source": null
}
},
"snapshots": {
"consisgroup.cgsnapshot": {
"created": "2018-03-28T09:34:02Z",
"size": 13958643712,
"source": "volume-1"
}
},
"subnet": {},
"vgroups": {
"vvol--vSphere-HA-0ffc7dd1-vg": {
"volumes": [
"vvol--vSphere-HA-0ffc7dd1-vg/Config-aad5d7c6"
]
}
},
"volumes": {
"ansible_data": {
"bandwidth": null,
"hosts": [
[
"host1",
1
]
],
"serial": "43BE47C12334399B000114A6",
"size": 1099511627776,
"source": null
}
}
}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
ADMIN_API_VERSION = '1.14'
S3_REQUIRED_API_VERSION = '1.16'
LATENCY_REQUIRED_API_VERSION = '1.16'
AC_REQUIRED_API_VERSION = '1.14'
CAP_REQUIRED_API_VERSION = '1.6'
SAN_REQUIRED_API_VERSION = '1.10'
NVME_API_VERSION = '1.16'
PREFERRED_API_VERSION = '1.15'
CONN_STATUS_API_VERSION = '1.17'
def generate_default_dict(array):
default_facts = {}
defaults = array.get()
api_version = array._list_available_rest_versions()
if AC_REQUIRED_API_VERSION in api_version:
default_facts['volume_groups'] = len(array.list_vgroups())
default_facts['connected_arrays'] = len(array.list_array_connections())
default_facts['pods'] = len(array.list_pods())
default_facts['connection_key'] = array.get(connection_key=True)['connection_key']
hosts = array.list_hosts()
admins = array.list_admins()
snaps = array.list_volumes(snap=True, pending=True)
pgroups = array.list_pgroups(pending=True)
hgroups = array.list_hgroups()
# Old FA arrays only report model from the primary controller
ct0_model = array.get_hardware('CT0')['model']
if ct0_model:
model = ct0_model
else:
ct1_model = array.get_hardware('CT1')['model']
model = ct1_model
default_facts['array_model'] = model
default_facts['array_name'] = defaults['array_name']
default_facts['purity_version'] = defaults['version']
default_facts['hosts'] = len(hosts)
default_facts['snapshots'] = len(snaps)
default_facts['protection_groups'] = len(pgroups)
default_facts['hostgroups'] = len(hgroups)
default_facts['admins'] = len(admins)
return default_facts
def generate_perf_dict(array):
perf_facts = {}
api_version = array._list_available_rest_versions()
if LATENCY_REQUIRED_API_VERSION in api_version:
latency_info = array.get(action='monitor', latency=True)[0]
perf_info = array.get(action='monitor')[0]
# IOPS
perf_facts['writes_per_sec'] = perf_info['writes_per_sec']
perf_facts['reads_per_sec'] = perf_info['reads_per_sec']
# Bandwidth
perf_facts['input_per_sec'] = perf_info['input_per_sec']
perf_facts['output_per_sec'] = perf_info['output_per_sec']
# Latency
if LATENCY_REQUIRED_API_VERSION in api_version:
perf_facts['san_usec_per_read_op'] = latency_info['san_usec_per_read_op']
perf_facts['san_usec_per_write_op'] = latency_info['san_usec_per_write_op']
perf_facts['queue_usec_per_read_op'] = latency_info['queue_usec_per_read_op']
perf_facts['queue_usec_per_write_op'] = latency_info['queue_usec_per_write_op']
perf_facts['qos_rate_limit_usec_per_read_op'] = latency_info['qos_rate_limit_usec_per_read_op']
perf_facts['qos_rate_limit_usec_per_write_op'] = latency_info['qos_rate_limit_usec_per_write_op']
perf_facts['local_queue_usec_per_op'] = perf_info['local_queue_usec_per_op']
perf_facts['usec_per_read_op'] = perf_info['usec_per_read_op']
perf_facts['usec_per_write_op'] = perf_info['usec_per_write_op']
perf_facts['queue_depth'] = perf_info['queue_depth']
return perf_facts
def generate_config_dict(array):
config_facts = {}
api_version = array._list_available_rest_versions()
# DNS
config_facts['dns'] = array.get_dns()
# SMTP
config_facts['smtp'] = array.list_alert_recipients()
# SNMP
config_facts['snmp'] = array.list_snmp_managers()
config_facts['snmp_v3_engine_id'] = array.get_snmp_engine_id()['engine_id']
# DS
config_facts['directory_service'] = array.get_directory_service()
if S3_REQUIRED_API_VERSION in api_version:
config_facts['directory_service_roles'] = {}
roles = array.list_directory_service_roles()
for role in range(0, len(roles)):
role_name = roles[role]['name']
config_facts['directory_service_roles'][role_name] = {
'group': roles[role]['group'],
'group_base': roles[role]['group_base'],
}
else:
config_facts['directory_service'].update(array.get_directory_service(groups=True))
# NTP
config_facts['ntp'] = array.get(ntpserver=True)['ntpserver']
# SYSLOG
config_facts['syslog'] = array.get(syslogserver=True)['syslogserver']
# Phonehome
config_facts['phonehome'] = array.get(phonehome=True)['phonehome']
# Proxy
config_facts['proxy'] = array.get(proxy=True)['proxy']
# Relay Host
config_facts['relayhost'] = array.get(relayhost=True)['relayhost']
# Sender Domain
config_facts['senderdomain'] = array.get(senderdomain=True)['senderdomain']
# SYSLOG
config_facts['syslog'] = array.get(syslogserver=True)['syslogserver']
# Idle Timeout
config_facts['idle_timeout'] = array.get(idle_timeout=True)['idle_timeout']
# SCSI Timeout
config_facts['scsi_timeout'] = array.get(scsi_timeout=True)['scsi_timeout']
# SSL
config_facts['ssl_certs'] = array.get_certificate()
# Global Admin settings
if S3_REQUIRED_API_VERSION in api_version:
config_facts['global_admin'] = array.get_global_admin_attributes()
return config_facts
def generate_admin_dict(array):
api_version = array._list_available_rest_versions()
admin_facts = {}
if ADMIN_API_VERSION in api_version:
admins = array.list_admins()
for admin in range(0, len(admins)):
admin_name = admins[admin]['name']
admin_facts[admin_name] = {
'type': admins[admin]['type'],
'role': admins[admin]['role'],
}
return admin_facts
def generate_subnet_dict(array):
sub_facts = {}
subnets = array.list_subnets()
for sub in range(0, len(subnets)):
sub_name = subnets[sub]['name']
if subnets[sub]['enabled']:
sub_facts[sub_name] = {
'gateway': subnets[sub]['gateway'],
'mtu': subnets[sub]['mtu'],
'vlan': subnets[sub]['vlan'],
'prefix': subnets[sub]['prefix'],
'interfaces': subnets[sub]['interfaces'],
'services': subnets[sub]['services'],
}
return sub_facts
def generate_network_dict(array):
net_facts = {}
ports = array.list_network_interfaces()
for port in range(0, len(ports)):
int_name = ports[port]['name']
net_facts[int_name] = {
'hwaddr': ports[port]['hwaddr'],
'mtu': ports[port]['mtu'],
'enabled': ports[port]['enabled'],
'speed': ports[port]['speed'],
'address': ports[port]['address'],
'slaves': ports[port]['slaves'],
'services': ports[port]['services'],
'gateway': ports[port]['gateway'],
'netmask': ports[port]['netmask'],
}
if ports[port]['subnet']:
subnets = array.get_subnet(ports[port]['subnet'])
if subnets['enabled']:
net_facts[int_name]['subnet'] = {
'name': subnets['name'],
'prefix': subnets['prefix'],
'vlan': subnets['vlan'],
}
return net_facts
def generate_capacity_dict(array):
capacity_facts = {}
api_version = array._list_available_rest_versions()
if CAP_REQUIRED_API_VERSION in api_version:
volumes = array.list_volumes(pending=True)
capacity_facts['provisioned_space'] = sum(item['size'] for item in volumes)
capacity = array.get(space=True)
total_capacity = capacity[0]['capacity']
used_space = capacity[0]["total"]
capacity_facts['free_space'] = total_capacity - used_space
capacity_facts['total_capacity'] = total_capacity
capacity_facts['data_reduction'] = capacity[0]['data_reduction']
capacity_facts['system_space'] = capacity[0]['system']
capacity_facts['volume_space'] = capacity[0]['volumes']
capacity_facts['shared_space'] = capacity[0]['shared_space']
capacity_facts['snapshot_space'] = capacity[0]['snapshots']
capacity_facts['thin_provisioning'] = capacity[0]['thin_provisioning']
capacity_facts['total_reduction'] = capacity[0]['total_reduction']
return capacity_facts
def generate_snap_dict(array):
snap_facts = {}
snaps = array.list_volumes(snap=True)
for snap in range(0, len(snaps)):
snapshot = snaps[snap]['name']
snap_facts[snapshot] = {
'size': snaps[snap]['size'],
'source': snaps[snap]['source'],
'created': snaps[snap]['created'],
}
return snap_facts
def generate_vol_dict(array):
volume_facts = {}
vols = array.list_volumes()
for vol in range(0, len(vols)):
volume = vols[vol]['name']
volume_facts[volume] = {
'source': vols[vol]['source'],
'size': vols[vol]['size'],
'serial': vols[vol]['serial'],
'hosts': [],
'bandwidth': ""
}
api_version = array._list_available_rest_versions()
if AC_REQUIRED_API_VERSION in api_version:
qvols = array.list_volumes(qos=True)
for qvol in range(0, len(qvols)):
volume = qvols[qvol]['name']
qos = qvols[qvol]['bandwidth_limit']
volume_facts[volume]['bandwidth'] = qos
vvols = array.list_volumes(protocol_endpoint=True)
for vvol in range(0, len(vvols)):
volume = vvols[vvol]['name']
volume_facts[volume] = {
'source': vvols[vvol]['source'],
'serial': vvols[vvol]['serial'],
'hosts': []
}
cvols = array.list_volumes(connect=True)
for cvol in range(0, len(cvols)):
volume = cvols[cvol]['name']
voldict = [cvols[cvol]['host'], cvols[cvol]['lun']]
volume_facts[volume]['hosts'].append(voldict)
return volume_facts
def generate_host_dict(array):
api_version = array._list_available_rest_versions()
host_facts = {}
hosts = array.list_hosts()
for host in range(0, len(hosts)):
hostname = hosts[host]['name']
tports = []
host_all_info = array.get_host(hostname, all=True)
if host_all_info:
tports = host_all_info[0]['target_port']
host_facts[hostname] = {
'hgroup': hosts[host]['hgroup'],
'iqn': hosts[host]['iqn'],
'wwn': hosts[host]['wwn'],
'personality': array.get_host(hostname,
personality=True)['personality'],
'target_port': tports
}
if NVME_API_VERSION in api_version:
host_facts[hostname]['nqn'] = hosts[host]['nqn']
if PREFERRED_API_VERSION in api_version:
hosts = array.list_hosts(preferred_array=True)
for host in range(0, len(hosts)):
hostname = hosts[host]['name']
host_facts[hostname]['preferred_array'] = hosts[host]['preferred_array']
return host_facts
def generate_pgroups_dict(array):
pgroups_facts = {}
pgroups = array.list_pgroups()
for pgroup in range(0, len(pgroups)):
protgroup = pgroups[pgroup]['name']
pgroups_facts[protgroup] = {
'hgroups': pgroups[pgroup]['hgroups'],
'hosts': pgroups[pgroup]['hosts'],
'source': pgroups[pgroup]['source'],
'targets': pgroups[pgroup]['targets'],
'volumes': pgroups[pgroup]['volumes'],
}
prot_sched = array.get_pgroup(protgroup, schedule=True)
prot_reten = array.get_pgroup(protgroup, retention=True)
if prot_sched['snap_enabled'] or prot_sched['replicate_enabled']:
pgroups_facts[protgroup]['snap_freqyency'] = prot_sched['snap_frequency']
pgroups_facts[protgroup]['replicate_freqyency'] = prot_sched['replicate_frequency']
pgroups_facts[protgroup]['snap_enabled'] = prot_sched['snap_enabled']
pgroups_facts[protgroup]['replicate_enabled'] = prot_sched['replicate_enabled']
pgroups_facts[protgroup]['snap_at'] = prot_sched['snap_at']
pgroups_facts[protgroup]['replicate_at'] = prot_sched['replicate_at']
pgroups_facts[protgroup]['replicate_blackout'] = prot_sched['replicate_blackout']
pgroups_facts[protgroup]['per_day'] = prot_reten['per_day']
pgroups_facts[protgroup]['target_per_day'] = prot_reten['target_per_day']
pgroups_facts[protgroup]['target_days'] = prot_reten['target_days']
pgroups_facts[protgroup]['days'] = prot_reten['days']
pgroups_facts[protgroup]['all_for'] = prot_reten['all_for']
pgroups_facts[protgroup]['target_all_for'] = prot_reten['target_all_for']
if ":" in protgroup:
snap_transfers = array.get_pgroup(protgroup, snap=True, transfer=True)
pgroups_facts[protgroup]['snaps'] = {}
for snap_transfer in range(0, len(snap_transfers)):
snap = snap_transfers[snap_transfer]['name']
pgroups_facts[protgroup]['snaps'][snap] = {
'created': snap_transfers[snap_transfer]['created'],
'started': snap_transfers[snap_transfer]['started'],
'completed': snap_transfers[snap_transfer]['completed'],
'physical_bytes_written': snap_transfers[snap_transfer]['physical_bytes_written'],
'data_transferred': snap_transfers[snap_transfer]['data_transferred'],
'progress': snap_transfers[snap_transfer]['progress'],
}
return pgroups_facts
def generate_pods_dict(array):
pods_facts = {}
api_version = array._list_available_rest_versions()
if AC_REQUIRED_API_VERSION in api_version:
pods = array.list_pods()
for pod in range(0, len(pods)):
acpod = pods[pod]['name']
pods_facts[acpod] = {
'source': pods[pod]['source'],
'arrays': pods[pod]['arrays'],
}
return pods_facts
def generate_conn_array_dict(array):
conn_array_facts = {}
api_version = array._list_available_rest_versions()
if CONN_STATUS_API_VERSION in api_version:
carrays = array.list_connected_arrays()
for carray in range(0, len(carrays)):
arrayname = carrays[carray]['array_name']
conn_array_facts[arrayname] = {
'array_id': carrays[carray]['id'],
'throtled': carrays[carray]['throtled'],
'version': carrays[carray]['version'],
'type': carrays[carray]['type'],
'mgmt_ip': carrays[carray]['management_address'],
'repl_ip': carrays[carray]['replication_address'],
}
if CONN_STATUS_API_VERSION in api_version:
conn_array_facts[arrayname]['status'] = carrays[carray]['status']
return conn_array_facts
def generate_apps_dict(array):
apps_facts = {}
api_version = array._list_available_rest_versions()
if SAN_REQUIRED_API_VERSION in api_version:
apps = array.list_apps()
for app in range(0, len(apps)):
appname = apps[app]['name']
apps_facts[appname] = {
'version': apps[app]['version'],
'status': apps[app]['status'],
'description': apps[app]['description'],
}
return apps_facts
def generate_vgroups_dict(array):
vgroups_facts = {}
api_version = array._list_available_rest_versions()
if AC_REQUIRED_API_VERSION in api_version:
vgroups = array.list_vgroups()
for vgroup in range(0, len(vgroups)):
virtgroup = vgroups[vgroup]['name']
vgroups_facts[virtgroup] = {
'volumes': vgroups[vgroup]['volumes'],
}
return vgroups_facts
def generate_nfs_offload_dict(array):
offload_facts = {}
api_version = array._list_available_rest_versions()
if AC_REQUIRED_API_VERSION in api_version:
offload = array.list_nfs_offload()
for target in range(0, len(offload)):
offloadt = offload[target]['name']
offload_facts[offloadt] = {
'status': offload[target]['status'],
'mount_point': offload[target]['mount_point'],
'protocol': offload[target]['protocol'],
'mount_options': offload[target]['mount_options'],
'address': offload[target]['address'],
}
return offload_facts
def generate_s3_offload_dict(array):
offload_facts = {}
api_version = array._list_available_rest_versions()
if S3_REQUIRED_API_VERSION in api_version:
offload = array.list_s3_offload()
for target in range(0, len(offload)):
offloadt = offload[target]['name']
offload_facts[offloadt] = {
'status': offload[target]['status'],
'bucket': offload[target]['bucket'],
'protocol': offload[target]['protocol'],
'access_key_id': offload[target]['access_key_id'],
}
return offload_facts
def generate_hgroups_dict(array):
hgroups_facts = {}
hgroups = array.list_hgroups()
for hgroup in range(0, len(hgroups)):
hostgroup = hgroups[hgroup]['name']
hgroups_facts[hostgroup] = {
'hosts': hgroups[hgroup]['hosts'],
'pgs': [],
'vols': [],
}
pghgroups = array.list_hgroups(protect=True)
for pghg in range(0, len(pghgroups)):
pgname = pghgroups[pghg]['name']
hgroups_facts[pgname]['pgs'].append(pghgroups[pghg]['protection_group'])
volhgroups = array.list_hgroups(connect=True)
for pgvol in range(0, len(volhgroups)):
pgname = volhgroups[pgvol]['name']
volpgdict = [volhgroups[pgvol]['vol'], volhgroups[pgvol]['lun']]
hgroups_facts[pgname]['vols'].append(volpgdict)
return hgroups_facts
def generate_interfaces_dict(array):
api_version = array._list_available_rest_versions()
int_facts = {}
ports = array.list_ports()
for port in range(0, len(ports)):
int_name = ports[port]['name']
if ports[port]['wwn']:
int_facts[int_name] = ports[port]['wwn']
if ports[port]['iqn']:
int_facts[int_name] = ports[port]['iqn']
if NVME_API_VERSION in api_version:
if ports[port]['nqn']:
int_facts[int_name] = ports[port]['nqn']
return int_facts
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
gather_subset=dict(default='minimum', type='list',)
))
module = AnsibleModule(argument_spec, supports_check_mode=False)
array = get_system(module)
subset = [test.lower() for test in module.params['gather_subset']]
valid_subsets = ('all', 'minimum', 'config', 'performance', 'capacity',
'network', 'subnet', 'interfaces', 'hgroups', 'pgroups',
'hosts', 'admins', 'volumes', 'snapshots', 'pods',
'vgroups', 'offload', 'apps', 'arrays')
subset_test = (test in valid_subsets for test in subset)
if not all(subset_test):
module.fail_json(msg="value must gather_subset must be one or more of: %s, got: %s"
% (",".join(valid_subsets), ",".join(subset)))
facts = {}
if 'minimum' in subset or 'all' in subset:
facts['default'] = generate_default_dict(array)
if 'performance' in subset or 'all' in subset:
facts['performance'] = generate_perf_dict(array)
if 'config' in subset or 'all' in subset:
facts['config'] = generate_config_dict(array)
if 'capacity' in subset or 'all' in subset:
facts['capacity'] = generate_capacity_dict(array)
if 'network' in subset or 'all' in subset:
facts['network'] = generate_network_dict(array)
if 'subnet' in subset or 'all' in subset:
facts['subnet'] = generate_subnet_dict(array)
if 'interfaces' in subset or 'all' in subset:
facts['interfaces'] = generate_interfaces_dict(array)
if 'hosts' in subset or 'all' in subset:
facts['hosts'] = generate_host_dict(array)
if 'volumes' in subset or 'all' in subset:
facts['volumes'] = generate_vol_dict(array)
if 'snapshots' in subset or 'all' in subset:
facts['snapshots'] = generate_snap_dict(array)
if 'hgroups' in subset or 'all' in subset:
facts['hgroups'] = generate_hgroups_dict(array)
if 'pgroups' in subset or 'all' in subset:
facts['pgroups'] = generate_pgroups_dict(array)
if 'pods' in subset or 'all' in subset:
facts['pods'] = generate_pods_dict(array)
if 'admins' in subset or 'all' in subset:
facts['admins'] = generate_admin_dict(array)
if 'vgroups' in subset or 'all' in subset:
facts['vgroups'] = generate_vgroups_dict(array)
if 'offload' in subset or 'all' in subset:
facts['nfs_offload'] = generate_nfs_offload_dict(array)
facts['s3_offload'] = generate_s3_offload_dict(array)
if 'apps' in subset or 'all' in subset:
facts['apps'] = generate_apps_dict(array)
if 'arrays' in subset or 'all' in subset:
facts['arrays'] = generate_conn_array_dict(array)
module.exit_json(ansible_facts={'ansible_purefa_facts': facts})
if __name__ == '__main__':
main()<|fim▁end|> | "10000000C96C48D1", |
<|file_name|>cache.py<|end_file_name|><|fim▁begin|>"""
Caching utilities for zipline
"""
from collections import MutableMapping
import errno
import os
import pickle
from distutils import dir_util
from shutil import rmtree, move
from tempfile import mkdtemp, NamedTemporaryFile
import pandas as pd
from .context_tricks import nop_context
from .paths import ensure_directory
from .sentinel import sentinel
class Expired(Exception):
"""Marks that a :class:`CachedObject` has expired.
"""
ExpiredCachedObject = sentinel('ExpiredCachedObject')
AlwaysExpired = sentinel('AlwaysExpired')
class CachedObject(object):
"""
A simple struct for maintaining a cached object with an expiration date.
Parameters
----------
value : object
The object to cache.
expires : datetime-like
Expiration date of `value`. The cache is considered invalid for dates
**strictly greater** than `expires`.
Examples
--------
>>> from pandas import Timestamp, Timedelta
>>> expires = Timestamp('2014', tz='UTC')
>>> obj = CachedObject(1, expires)
>>> obj.unwrap(expires - Timedelta('1 minute'))
1
>>> obj.unwrap(expires)
1
>>> obj.unwrap(expires + Timedelta('1 minute'))
... # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
Expired: 2014-01-01 00:00:00+00:00
"""
def __init__(self, value, expires):
self._value = value
self._expires = expires
@classmethod
def expired(cls):
"""Construct a CachedObject that's expired at any time.
"""
return cls(ExpiredCachedObject, expires=AlwaysExpired)
def unwrap(self, dt):
"""
Get the cached value.
Returns
-------
value : object
The cached value.
Raises
------<|fim▁hole|> Raised when `dt` is greater than self.expires.
"""
expires = self._expires
if expires is AlwaysExpired or expires < dt:
raise Expired(self._expires)
return self._value
def _unsafe_get_value(self):
"""You almost certainly shouldn't use this."""
return self._value
class ExpiringCache(object):
"""
A cache of multiple CachedObjects, which returns the wrapped the value
or raises and deletes the CachedObject if the value has expired.
Parameters
----------
cache : dict-like, optional
An instance of a dict-like object which needs to support at least:
`__del__`, `__getitem__`, `__setitem__`
If `None`, than a dict is used as a default.
Examples
--------
>>> from pandas import Timestamp, Timedelta
>>> expires = Timestamp('2014', tz='UTC')
>>> value = 1
>>> cache = ExpiringCache()
>>> cache.set('foo', value, expires)
>>> cache.get('foo', expires - Timedelta('1 minute'))
1
>>> cache.get('foo', expires + Timedelta('1 minute'))
Traceback (most recent call last):
...
KeyError: 'foo'
"""
def __init__(self, cache=None):
if cache is not None:
self._cache = cache
else:
self._cache = {}
def get(self, key, dt):
"""Get the value of a cached object.
Parameters
----------
key : any
The key to lookup.
dt : datetime
The time of the lookup.
Returns
-------
result : any
The value for ``key``.
Raises
------
KeyError
Raised if the key is not in the cache or the value for the key
has expired.
"""
try:
return self._cache[key].unwrap(dt)
except Expired:
del self._cache[key]
raise KeyError(key)
def set(self, key, value, expiration_dt):
"""Adds a new key value pair to the cache.
Parameters
----------
key : any
The key to use for the pair.
value : any
The value to store under the name ``key``.
expiration_dt : datetime
When should this mapping expire? The cache is considered invalid
for dates **strictly greater** than ``expiration_dt``.
"""
self._cache[key] = CachedObject(value, expiration_dt)
class dataframe_cache(MutableMapping):
"""A disk-backed cache for dataframes.
``dataframe_cache`` is a mutable mapping from string names to pandas
DataFrame objects.
This object may be used as a context manager to delete the cache directory
on exit.
Parameters
----------
path : str, optional
The directory path to the cache. Files will be written as
``path/<keyname>``.
lock : Lock, optional
Thread lock for multithreaded/multiprocessed access to the cache.
If not provided no locking will be used.
clean_on_failure : bool, optional
Should the directory be cleaned up if an exception is raised in the
context manager.
serialize : {'msgpack', 'pickle:<n>'}, optional
How should the data be serialized. If ``'pickle'`` is passed, an
optional pickle protocol can be passed like: ``'pickle:3'`` which says
to use pickle protocol 3.
Notes
-----
The syntax ``cache[:]`` will load all key:value pairs into memory as a
dictionary.
The cache uses a temporary file format that is subject to change between
versions of zipline.
"""
def __init__(self,
path=None,
lock=None,
clean_on_failure=True,
serialization='msgpack'):
self.path = path if path is not None else mkdtemp()
self.lock = lock if lock is not None else nop_context
self.clean_on_failure = clean_on_failure
if serialization == 'msgpack':
self.serialize = pd.DataFrame.to_msgpack
self.deserialize = pd.read_msgpack
self._protocol = None
else:
s = serialization.split(':', 1)
if s[0] != 'pickle':
raise ValueError(
"'serialization' must be either 'msgpack' or 'pickle[:n]'",
)
self._protocol = int(s[1]) if len(s) == 2 else None
self.serialize = self._serialize_pickle
self.deserialize = pickle.load
ensure_directory(self.path)
def _serialize_pickle(self, df, path):
with open(path, 'wb') as f:
pickle.dump(df, f, protocol=self._protocol)
def _keypath(self, key):
return os.path.join(self.path, key)
def __enter__(self):
return self
def __exit__(self, type_, value, tb):
if not (self.clean_on_failure or value is None):
# we are not cleaning up after a failure and there was an exception
return
with self.lock:
rmtree(self.path)
def __getitem__(self, key):
if key == slice(None):
return dict(self.items())
with self.lock:
try:
with open(self._keypath(key), 'rb') as f:
return self.deserialize(f)
except IOError as e:
if e.errno != errno.ENOENT:
raise
raise KeyError(key)
def __setitem__(self, key, value):
with self.lock:
self.serialize(value, self._keypath(key))
def __delitem__(self, key):
with self.lock:
try:
os.remove(self._keypath(key))
except OSError as e:
if e.errno == errno.ENOENT:
# raise a keyerror if this directory did not exist
raise KeyError(key)
# reraise the actual oserror otherwise
raise
def __iter__(self):
return iter(os.listdir(self.path))
def __len__(self):
return len(os.listdir(self.path))
def __repr__(self):
return '<%s: keys={%s}>' % (
type(self).__name__,
', '.join(map(repr, sorted(self))),
)
class working_file(object):
"""A context manager for managing a temporary file that will be moved
to a non-temporary location if no exceptions are raised in the context.
Parameters
----------
final_path : str
The location to move the file when committing.
*args, **kwargs
Forwarded to NamedTemporaryFile.
Notes
-----
The file is moved on __exit__ if there are no exceptions.
``working_file`` uses :func:`shutil.move` to move the actual files,
meaning it has as strong of guarantees as :func:`shutil.move`.
"""
def __init__(self, final_path, *args, **kwargs):
self._tmpfile = NamedTemporaryFile(delete=False, *args, **kwargs)
self._final_path = final_path
@property
def path(self):
"""Alias for ``name`` to be consistent with
:class:`~zipline.utils.cache.working_dir`.
"""
return self._tmpfile.name
def _commit(self):
"""Sync the temporary file to the final path.
"""
move(self.path, self._final_path)
def __enter__(self):
self._tmpfile.__enter__()
return self
def __exit__(self, *exc_info):
self._tmpfile.__exit__(*exc_info)
if exc_info[0] is None:
self._commit()
class working_dir(object):
"""A context manager for managing a temporary directory that will be moved
to a non-temporary location if no exceptions are raised in the context.
Parameters
----------
final_path : str
The location to move the file when committing.
*args, **kwargs
Forwarded to tmp_dir.
Notes
-----
The file is moved on __exit__ if there are no exceptions.
``working_dir`` uses :func:`dir_util.copy_tree` to move the actual files,
meaning it has as strong of guarantees as :func:`dir_util.copy_tree`.
"""
def __init__(self, final_path, *args, **kwargs):
self.path = mkdtemp()
self._final_path = final_path
def ensure_dir(self, *path_parts):
"""Ensures a subdirectory of the working directory.
Parameters
----------
path_parts : iterable[str]
The parts of the path after the working directory.
"""
path = self.getpath(*path_parts)
ensure_directory(path)
return path
def getpath(self, *path_parts):
"""Get a path relative to the working directory.
Parameters
----------
path_parts : iterable[str]
The parts of the path after the working directory.
"""
return os.path.join(self.path, *path_parts)
def _commit(self):
"""Sync the temporary directory to the final path.
"""
dir_util.copy_tree(self.path, self._final_path)
def __enter__(self):
return self
def __exit__(self, *exc_info):
if exc_info[0] is None:
self._commit()
rmtree(self.path)<|fim▁end|> | Expired |
<|file_name|>build_lex_table.rs<|end_file_name|><|fim▁begin|>use super::coincident_tokens::CoincidentTokenIndex;
use super::token_conflicts::TokenConflictMap;
use crate::generate::dedup::split_state_id_groups;
use crate::generate::grammars::{LexicalGrammar, SyntaxGrammar};
use crate::generate::nfa::NfaCursor;
use crate::generate::rules::{Symbol, TokenSet};
use crate::generate::tables::{AdvanceAction, LexState, LexTable, ParseStateId, ParseTable};
use log::info;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, VecDeque};
use std::mem;
pub(crate) fn build_lex_table(
parse_table: &mut ParseTable,
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
keywords: &TokenSet,
coincident_token_index: &CoincidentTokenIndex,
token_conflict_map: &TokenConflictMap,
) -> (LexTable, LexTable) {
let keyword_lex_table;
if syntax_grammar.word_token.is_some() {
let mut builder = LexTableBuilder::new(lexical_grammar);
builder.add_state_for_tokens(keywords);
keyword_lex_table = builder.table;
} else {
keyword_lex_table = LexTable::default();
}
let mut parse_state_ids_by_token_set: Vec<(TokenSet, Vec<ParseStateId>)> = Vec::new();
for (i, state) in parse_table.states.iter().enumerate() {
let tokens = state
.terminal_entries
.keys()
.filter_map(|token| {
if token.is_terminal() {
if keywords.contains(&token) {
syntax_grammar.word_token
} else {
Some(*token)
}
} else if token.is_eof() {
Some(*token)
} else {
None
}
})
.collect();
let mut did_merge = false;
for entry in parse_state_ids_by_token_set.iter_mut() {
if merge_token_set(
&mut entry.0,
&tokens,
lexical_grammar,
token_conflict_map,
coincident_token_index,
) {
did_merge = true;
entry.1.push(i);
break;
}
}
if !did_merge {
parse_state_ids_by_token_set.push((tokens, vec![i]));
}
}
let mut builder = LexTableBuilder::new(lexical_grammar);
for (tokens, parse_state_ids) in parse_state_ids_by_token_set {
let lex_state_id = builder.add_state_for_tokens(&tokens);
for id in parse_state_ids {
parse_table.states[id].lex_state_id = lex_state_id;
}
}
let mut table = builder.table;
minimize_lex_table(&mut table, parse_table);
sort_states(&mut table, parse_table);
(table, keyword_lex_table)
}
struct QueueEntry {
state_id: usize,
nfa_states: Vec<u32>,
eof_valid: bool,
}
struct LexTableBuilder<'a> {
lexical_grammar: &'a LexicalGrammar,
cursor: NfaCursor<'a>,
table: LexTable,
state_queue: VecDeque<QueueEntry>,
state_ids_by_nfa_state_set: HashMap<(Vec<u32>, bool), usize>,
}
impl<'a> LexTableBuilder<'a> {
fn new(lexical_grammar: &'a LexicalGrammar) -> Self {
Self {
lexical_grammar,
cursor: NfaCursor::new(&lexical_grammar.nfa, vec![]),
table: LexTable::default(),
state_queue: VecDeque::new(),
state_ids_by_nfa_state_set: HashMap::new(),
}
}
fn add_state_for_tokens(&mut self, tokens: &TokenSet) -> usize {
let mut eof_valid = false;
let nfa_states = tokens
.iter()
.filter_map(|token| {
if token.is_terminal() {
Some(self.lexical_grammar.variables[token.index].start_state)
} else {
eof_valid = true;
None
}
})
.collect();
let (state_id, is_new) = self.add_state(nfa_states, eof_valid);
if is_new {
info!(
"entry point state: {}, tokens: {:?}",
state_id,
tokens
.iter()
.map(|t| &self.lexical_grammar.variables[t.index].name)
.collect::<Vec<_>>()
);
}
while let Some(QueueEntry {
state_id,
nfa_states,
eof_valid,
}) = self.state_queue.pop_front()
{
self.populate_state(state_id, nfa_states, eof_valid);
}
state_id
}
fn add_state(&mut self, nfa_states: Vec<u32>, eof_valid: bool) -> (usize, bool) {
self.cursor.reset(nfa_states);
match self
.state_ids_by_nfa_state_set
.entry((self.cursor.state_ids.clone(), eof_valid))
{
Entry::Occupied(o) => (*o.get(), false),
Entry::Vacant(v) => {
let state_id = self.table.states.len();
self.table.states.push(LexState::default());
self.state_queue.push_back(QueueEntry {
state_id,
nfa_states: v.key().0.clone(),
eof_valid,
});
v.insert(state_id);
(state_id, true)
}
}
}
fn populate_state(&mut self, state_id: usize, nfa_states: Vec<u32>, eof_valid: bool) {
self.cursor.force_reset(nfa_states);
// The EOF state is represented as an empty list of NFA states.
let mut completion = None;
for (id, prec) in self.cursor.completions() {
if let Some((prev_id, prev_precedence)) = completion {
if TokenConflictMap::prefer_token(
self.lexical_grammar,
(prev_precedence, prev_id),
(prec, id),
) {
continue;
}
}
completion = Some((id, prec));
}
let transitions = self.cursor.transitions();
let has_sep = self.cursor.transition_chars().any(|(_, sep)| sep);
// If EOF is a valid lookahead token, add a transition predicated on the null
// character that leads to the empty set of NFA states.
if eof_valid {
let (next_state_id, _) = self.add_state(Vec::new(), false);
self.table.states[state_id].eof_action = Some(AdvanceAction {
state: next_state_id,
in_main_token: true,
});
}
for transition in transitions {
if let Some((completed_id, completed_precedence)) = completion {
if !TokenConflictMap::prefer_transition(
&self.lexical_grammar,
&transition,
completed_id,
completed_precedence,
has_sep,
) {
continue;
}
}
let (next_state_id, _) =
self.add_state(transition.states, eof_valid && transition.is_separator);
self.table.states[state_id].advance_actions.push((
transition.characters,
AdvanceAction {
state: next_state_id,
in_main_token: !transition.is_separator,
},
));
}
if let Some((complete_id, _)) = completion {
self.table.states[state_id].accept_action = Some(Symbol::terminal(complete_id));
} else if self.cursor.state_ids.is_empty() {
self.table.states[state_id].accept_action = Some(Symbol::end());
}
}
}
fn merge_token_set(
tokens: &mut TokenSet,
other: &TokenSet,
lexical_grammar: &LexicalGrammar,
token_conflict_map: &TokenConflictMap,
coincident_token_index: &CoincidentTokenIndex,
) -> bool {
for i in 0..lexical_grammar.variables.len() {
let symbol = Symbol::terminal(i);
let set_without_terminal = match (tokens.contains_terminal(i), other.contains_terminal(i)) {
(true, false) => other,
(false, true) => tokens,
_ => continue,
};
for existing_token in set_without_terminal.terminals() {
if token_conflict_map.does_conflict(i, existing_token.index)
|| token_conflict_map.does_match_prefix(i, existing_token.index)
{
return false;
}
if !coincident_token_index.contains(symbol, existing_token) {
if token_conflict_map.does_overlap(existing_token.index, i)
|| token_conflict_map.does_overlap(i, existing_token.index)
{
return false;
}
}
}
}
tokens.insert_all(other);
true
}
fn minimize_lex_table(table: &mut LexTable, parse_table: &mut ParseTable) {
// Initially group the states by their accept action and their
// valid lookahead characters.
let mut state_ids_by_signature = HashMap::new();
for (i, state) in table.states.iter().enumerate() {
let signature = (
i == 0,
state.accept_action,
state.eof_action.is_some(),
state
.advance_actions
.iter()
.map(|(characters, action)| (characters.clone(), action.in_main_token))
.collect::<Vec<_>>(),
);
state_ids_by_signature
.entry(signature)
.or_insert(Vec::new())
.push(i);
}
let mut state_ids_by_group_id = state_ids_by_signature
.into_iter()
.map(|e| e.1)
.collect::<Vec<_>>();
state_ids_by_group_id.sort();
let error_group_index = state_ids_by_group_id
.iter()
.position(|g| g.contains(&0))
.unwrap();
state_ids_by_group_id.swap(error_group_index, 0);
let mut group_ids_by_state_id = vec![0; table.states.len()];
for (group_id, state_ids) in state_ids_by_group_id.iter().enumerate() {
for state_id in state_ids {
group_ids_by_state_id[*state_id] = group_id;
}
}
while split_state_id_groups(
&table.states,
&mut state_ids_by_group_id,
&mut group_ids_by_state_id,
1,
lex_states_differ,
) {
continue;
}
let mut new_states = Vec::with_capacity(state_ids_by_group_id.len());
for state_ids in &state_ids_by_group_id {
let mut new_state = LexState::default();
mem::swap(&mut new_state, &mut table.states[state_ids[0]]);
for (_, advance_action) in new_state.advance_actions.iter_mut() {
advance_action.state = group_ids_by_state_id[advance_action.state];
}
if let Some(eof_action) = &mut new_state.eof_action {
eof_action.state = group_ids_by_state_id[eof_action.state];
}
new_states.push(new_state);
}
for state in parse_table.states.iter_mut() {
state.lex_state_id = group_ids_by_state_id[state.lex_state_id];
}
table.states = new_states;
}
fn lex_states_differ(
left: &LexState,
right: &LexState,
group_ids_by_state_id: &Vec<usize>,
) -> bool {
left.advance_actions
.iter()
.zip(right.advance_actions.iter())
.any(|(left, right)| {
group_ids_by_state_id[left.1.state] != group_ids_by_state_id[right.1.state]
})
}
fn sort_states(table: &mut LexTable, parse_table: &mut ParseTable) {
// Get a mapping of old state index -> new_state_index
let mut old_ids_by_new_id = (0..table.states.len()).collect::<Vec<_>>();
old_ids_by_new_id[1..].sort_by_key(|id| &table.states[*id]);<|fim▁hole|>
// Get the inverse mapping
let mut new_ids_by_old_id = vec![0; old_ids_by_new_id.len()];
for (id, old_id) in old_ids_by_new_id.iter().enumerate() {
new_ids_by_old_id[*old_id] = id;
}
// Reorder the parse states and update their references to reflect
// the new ordering.
table.states = old_ids_by_new_id
.iter()
.map(|old_id| {
let mut state = LexState::default();
mem::swap(&mut state, &mut table.states[*old_id]);
for (_, advance_action) in state.advance_actions.iter_mut() {
advance_action.state = new_ids_by_old_id[advance_action.state];
}
if let Some(eof_action) = &mut state.eof_action {
eof_action.state = new_ids_by_old_id[eof_action.state];
}
state
})
.collect();
// Update the parse table's lex state references
for state in parse_table.states.iter_mut() {
state.lex_state_id = new_ids_by_old_id[state.lex_state_id];
}
}<|fim▁end|> | |
<|file_name|>benchmarks_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2018 MBT Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package lib
import (
"fmt"
"testing"
)
func benchmarkReduceToDiff(modulesCount, deltaCount int, b *testing.B) {
clean()
defer clean()
repo := NewTestRepoForBench(b, ".tmp/repo")
for i := 0; i < modulesCount; i++ {
err := repo.InitModule(fmt.Sprintf("app-%v", i))
if err != nil {
b.Fatalf("%v", err)
}
}<|fim▁hole|> }
c1 := repo.LastCommit
for i := 0; i < deltaCount; i++ {
err = repo.WriteContent(fmt.Sprintf("content/file-%v", i), "sample content")
if err != nil {
b.Fatalf("%v", err)
}
}
repo.Commit("second")
c2 := repo.LastCommit
world := NewBenchmarkWorld(b, ".tmp/repo")
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err = world.System.ManifestByDiff(c1.String(), c2.String())
if err != nil {
b.Fatalf("%v", err)
}
}
b.StopTimer()
}
func BenchmarkReduceToDiff10(b *testing.B) {
benchmarkReduceToDiff(10, 10, b)
}
func BenchmarkReduceToDiff100(b *testing.B) {
benchmarkReduceToDiff(100, 100, b)
}
func BenchmarkReduceToDiff1000(b *testing.B) {
benchmarkReduceToDiff(1000, 1000, b)
}
func BenchmarkReduceToDiff10000(b *testing.B) {
benchmarkReduceToDiff(10000, 10000, b)
}<|fim▁end|> |
err := repo.Commit("first")
if err != nil {
b.Fatalf("%v", err) |
<|file_name|>socket_server.go<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 @ z3q.net.
* name :
* author : jarryliu
* date : 2013-12-16 19:03
* description :
* history :
*/
package app
import (
"fmt"
"github.com/jsix/gof"
"go2o/src/core"
"go2o/src/core/infrastructure"
"go2o/src/core/service"
"os"
"strconv"
)
func RunSocket(ctx gof.App, port int, debug, trace bool) {
if gcx, ok := ctx.(*core.MainApp); ok {
if !gcx.Loaded {<|fim▁hole|> fmt.Println("app context err")
os.Exit(1)
return
}
if debug {
fmt.Println("[Started]:Socket server (with debug) running on port [" +
strconv.Itoa(port) + "]:")
infrastructure.DebugMode = true
} else {
fmt.Println("[Started]:Socket server running on port [" +
strconv.Itoa(port) + "]:")
}
service.ServerListen("tcp", ":"+strconv.Itoa(port), ctx)
}<|fim▁end|> | gcx.Init(debug, trace)
}
} else { |
<|file_name|>generate.js<|end_file_name|><|fim▁begin|>var fs = require('fs');
var path = require('path');
var md_parser = require('./md_parser');
var $ = require('./helper');
var rootDir = path.join(__dirname, '../') + path.sep;
var assetsDir = path.join(rootDir, 'assets') + path.sep;
var templateDir = path.join(rootDir, 'template') + path.sep;
//1. 只导出文件nodeppt generate file.md
//2. 导出文件+目录 nodeppt generate ./ --all -o publish
module.exports = function(filepath, outputDir, isAll) {
filepath = fs.realpathSync(filepath);
outputDir = outputDir ? $.getDirPath(outputDir) : $.getDirPath(path.join(process.cwd(), './publish'));
isAll = !!isAll;
if (isAll) {
//1.导出assets
$.copy(assetsDir, outputDir, function(filename, dir, subdir) {
if (!subdir || subdir === 'scss') {
//不复制scss
return false;
}
return true;
});
}
//2.导出复制filepath除根目录下img、css和js等到assets,遇见/*.md就解析
generate(filepath, outputDir);
console.log('生成结束!'.bold.green + require('path').relative('b:/', outputDir).yellow);
};
function parser(content, template) {
try {
var html = md_parser(content, null, null, null, {
generate: true
});
return html;
} catch (e) {
console.log('ERROR: '.bold.red + e.toString());
}<|fim▁hole|> return false;
}
/**
* 生成
* @param {[type]} filepath [description]
* @param {[type]} outputDir [description]
* @return {[type]} [description]
*/
function generate(filepath, outputDir) {
var filename = '';
var templateMd = $.readFile(templateDir + 'markdown.ejs');
var templateList = $.readFile(templateDir + 'list.ejs');
if ($.isDir(filepath, true)) {
//遍历目录生成htm
var indexList = '';
$.copy(filepath, outputDir, function(filename, dir, subdir) {
if (!subdir && /\.(?:md|markdown)$/i.test(filename)) {
var content = $.readFile(path.join(filepath, filename));
var html = parser(content);
if (html) {
var title = html.match(/<title>(.*?)<\/title>/);
if (title[1]) {
title = title[1];
} else {
title = filename;
}
var url = filename.replace(/\.(?:md|markdown)$/i, '.htm');
indexList += '<li><a class="star" href="' + url + '" target="_blank">' + title + '</a> [<a href="' + url + '?_multiscreen=1" target="_blank" title="多窗口打开">多窗口</a>]</li>';
copyLinkToOutput(content, filepath, outputDir);
html = handlerHTML(html);
$.writeFile(path.join(outputDir, filename.replace(/\.(?:md|markdown)$/i, '.htm')), html);
}
return false;
}
return true;
});
//输出index文件
var packageJson = require(rootDir + 'package.json');
var data = {
version: packageJson.version,
site: packageJson.site,
date: Date.now(),
list: indexList,
dir: '/'
};
indexList = $.renderStr(templateList, data);
$.writeFile(path.join(outputDir, 'index.html'), indexList);
} else {
var content;
if ($.exists(filepath)) {
content = $.readFile(filepath);
} else {
return console.log('ERROR: '.bold.red + filepath + ' is not exists!');
}
filename = path.basename(filepath);
copyLinkToOutput(content, filepath, outputDir);
var html = parser(content);
if (html) {
html = handlerHTML(html);
$.writeFile(path.join(outputDir, filename.replace(/\.(?:md|markdown)$/i, '.htm')), html);
}
}
}
//处理绝对路径的url
function handlerHTML(html) {
html = html.replace(/([src|href])=["']\//gi, '$1="./')
.replace("loadJS('/js", "loadJS('./js").replace("dir: '/js/',", "dir: './js/',");
return html;
}
//处理页面相对url,到目标文件夹
function copyLinkToOutput(content, filepath, outputDir) {
//[inline模式](/assets/box-fe-road/img/inline-mode.png)
var files = [];
content.replace(/\[.+?\]\(\s?(.*?)\s?\)/g, function(i, file) {
files.push(file);
}).replace(/href=(['"])(.+?)\1/g, function(i, q, file) {
files.push(file);
});
//解析cover
var json = md_parser.parseCover(content.split(/\[slide.*\]/i)[0]);
if (json.files) {
files = files.concat(json.files.split(/\s?,\s?/));
}
files.filter(function(f) {
return !/^http[s]?:\/\//.test(f);
}).forEach(function(f) {
var topath = path.join(outputDir, f);
var realpath = path.join(path.dirname(filepath), f);
if ($.exists(realpath)) {
var data = fs.readFileSync(String(realpath));
$.writeFile(topath, data);
}
});
}<|fim▁end|> | |
<|file_name|>webPublico.js<|end_file_name|><|fim▁begin|>'use strict';
// WEB PUBLICO
// =============================================================================
var express = require('express');
var router = express.Router();
//var request = require('request');
var Model = require('../../models/jugando.js');
/***********************alarma***************************
var http = require('http');
var url = require('url');
var SerialPort = require("serialport");
var com = new SerialPort("COM13");
var usuario1 = '_e4_0e_09_6f';
var alarma = 0;
/*****************alarma*******************
http.createServer(function(peticion, respuesta){
var query = url.parse(peticion.url,true).query;
var puerta = query.puerta;
var codigo = query.codigo;
var actual = query.actual;
console.log("puertaaaaaaaaaaaaaaaaaaaaaaaaaaaa",puerta);
console.log("codigoooooooooooooooooooooooooooo",codigo);
console.log("codigoooooooooooooooooooooooooooo",actual);
respuesta.writeHead(200, {'Content-Type': 'text/html'});
respuesta.end(puerta);
if (puerta === '0') {
console.log('Puerta abierta');
if (alarma === 0) {
console.log('Entrada habilitada.');
} else {
com.write('<1');
/*************************************
console.log('Alarma activada!.');
var nombre = "Alarma Activada!. La Puerta está abierta";
var alarma = "Alarma de Portón";
var index = Model.Alarma.build({
nombre: nombre,
alarma: alarma
});
index.add(function (success) {
console.log('Se guardo la alarma');
},
function (err) {
console.log(err);
});
/*************************************
}
}
if (puerta === '1') {
console.log('Puerta igual a cerrada');
alarma = 1;
}
if (codigo ==='a' && puerta === '3') {
if (actual === '0') {
console.log('Puerta abierta');
com.write('<1');
}
if (actual === '1') {
console.log('Puerta cerrada');
alarma = 1;
}
}
/*************************************leyendo para el lector********************************************************
var empleado = Model.Empleado.build();
console.log("codigo",codigo);
//************************************
empleado.retrieveByCodigo(codigo, function (empleadooq) {
if (empleadooq) {
alarma = 0;
console.log('Usuario registrado. Alarma desbloqueada.');
com.write('<0');
var f = new Date();
//new Date().toJSON().slice(0,10)
var fechaIngreso = f.getFullYear() + "/" + (f.getMonth() +1) + "/" + f.getDate();
var horaIngreso = f.getHours()+":"+f.getMinutes()+":"+f.getSeconds();
var observacionIngreso = "Ingreso del Usuario al Corral";
var EmpleadoIdEmpleado= empleadooq.idEmpleado;
var index = Model.IngresoCorral.build({
fechaIngreso: fechaIngreso,
horaIngreso: horaIngreso,
observacionIngreso: observacionIngreso,
EmpleadoIdEmpleado: EmpleadoIdEmpleado
});
index.add(function (success) {
console.log('Se guardo el acceso');
},
function (err) {
console.log(err);
});
} else{
alarma = 1;
console.log('Alarma activada!.');
}
}, function (error) {
console.log('Empleado no encontrado',error);
});
}).listen(8000);
console.log('Servidor iniciado.');
com.on('error', function(err){
console.log('Error: ', err.message);
});
/*******************************sector lector**************************************************/
var idlector = "";
var valor ="";
var SerialPort = require('serialport');
var serialport = new SerialPort("/COM12", {
baudRate: 115200
});
var buffer3 = new Buffer(6);
buffer3[0] = 0xA0;
buffer3[1] = 0x04;
buffer3[2] = 0x01;
buffer3[3] = 0x89;
buffer3[4] = 0x01;
buffer3[5] = 0xD1;
serialport.on('data', function(data) {
var buff = new Buffer(data, 'utf8');
var imprimir = buff.toString('hex');
var cmd = imprimir.charAt(3);
var enviar = imprimir.slice(14,-4);
if(cmd == 3){
console.log('este es cmd********', cmd);
idlector = enviar.trim();
console.log('soy id del lector',idlector);
var animal = Model.Animal.build();
console.log('estoy adentro y tengo el id:',idlector);
animal.retrieveByTag(idlector, function (animales) {
if (animales) {
//console.log(animales);
valor = animales.idAnimal;
console.log('soy animalid--------',valor);
}else{
console.log("error");
serialport.write(buffer3);
}
});
}
});
// open errors will be emitted as an error event
serialport.on('error', function(err) {
console.log('Error: ', err.message);
});
/******************************************************************************/
var horaC="";
var horasC="";
var nivelC="";
var pesoBatea="";
var pesoRacionC="";
var pesoBateaC="";
var idInsumoC="";
var consumoId="";
var niv = 5;
var SerialPort = require('serialport');
var parsers = require('serialport').parsers;
var port = new SerialPort("/COM14", {
baudRate: 9600,
parser: parsers.readline('\r\n')
});
leerNivel();
leerPesoyRacion();
leerHora();
/************************leer el nivel actual del comedero***********************/
function leerNivel(){
port.on('open', function() {
port.write('main screen turn on', function(err) {
if (err) {
return console.log('Error: ', err.message);
}
console.log('mensaje 2 escrito');
});
setTimeout(function(){
port.write('>2', function(err) {
if (err) {
return console.log('Error: ', err.message);
}
console.log('cmd 2');
});
}, 1000);
});
}
/*************leer la hora actual del comedero*********************/
function leerHora(){
port.on('open', function() {
port.write('main screen turn on', function(err) {
if (err) {
return console.log('Error: ', err.message);
}
console.log('mensaje 1 escrito');
});
setTimeout(function(){
port.write('>1', function(err) {
if (err) {
return console.log('Error: ', err.message);
}
console.log('cmd 1');
});
}, 3000);
});
}
/*************leer el peso actual del comedero********************/
function leerPesoyRacion(){
port.on('open', function() {
port.write('main screen turn on', function(err) {
if (err) {
return console.log('Error: ', err.message);
}
console.log('mensaje 4 escrito');
});
setTimeout(function(){
port.write('>4', function(err) {
if (err) {
return console.log('Error: ', err.message);
}
console.log('cmd 4');
});
}, 2000);
});
}
/***************funcion para leer datos recibidos del comedero*******************/
//setTimeout(function(){
port.on('data', function(data) {
var imprimir = data.toString();
var cmd = imprimir.charAt(0);
var enviar = imprimir.substring(1);
console.log('valor**************', imprimir);
if (cmd == 1) {
console.log('dentro de cmd 1',cmd);
horaC = enviar.trim();
console.log('hora:', horaC);
} else if(cmd == 2){
console.log('dentro de cmd 2',cmd);
nivelC = enviar.trim();
console.log('nivel:', nivelC);
} else if(cmd == 4){
console.log('dentro de cmd 4',cmd);
pesoRacionC = '2';
console.log('pesoRacion:', pesoRacionC);
}else if(cmd == 3){
console.log('dentro de cmd 3',cmd);
pesoBatea = enviar.trim();
console.log('pesoBatea:', pesoBatea);
var detalleConsumo = Model.DetalleConsumo.build();
detalleConsumo.retrieveId(function (detalleQ) {
if (detalleQ) {
console.log('dentro de detalleQ ultimo id del detalle consumo>', detalleQ[0].dataValues['idDetalleConsumo']);
var idDetalleQ = detalleQ[0].dataValues['idDetalleConsumo'];
detalleConsumo.updateById2(idDetalleQ,pesoBatea,function (success) {
if (success) {
console.log('se guardo la sobra');
} else {
console.log('Detalle Consumo1 no encontrado');
}
}, function (error) {
console.log('Detalle Consumo2 no encontrado');
});
} else {
console.log('Detalle Consumo3 no encontrado');
}
}, function (error) {
res.send('Detalle Consumo4 no encontrado');
});
} else if(cmd == "x"){
//serialport.write(buffer3);
console.log('dentro de cmd x',cmd);
horasC = imprimir.substring(1,9);
console.log('horas:', horasC);
//pesoBateaC = imprimir.slice(9,-1);
pesoBateaC = '2';
console.log('pesoBatea:', '2');
idInsumoC = imprimir.slice(-1);
console.log('idInsumo:', idInsumoC);
serialport.write(buffer3);
var f = new Date();
var fecha = f.getFullYear() + "/" + (f.getMonth() +1) + "/" + f.getDate();
var consumo = Model.Consumo.build();
var stock = Model.Stock.build();
serialport.write(buffer3);
var index = Model.Consumo.build({
fechaConsumo: fecha,
horaConsumo: horasC,
InsumoIdInsumo: idInsumoC
});
serialport.write(buffer3);
index.add(function (success) {
console.log("listo cabecera");
serialport.write(buffer3);
consumo.retrieveId(function (consumoQ) {
if (consumoQ) {
consumoId = consumoQ[0].dataValues['idConsumo'];
console.log("soy consumoId*********", consumoId);
var index2 = Model.DetalleConsumo.build({
cantidad: 2,
observacion: "Consumo de Balanceados",
AnimalIdAnimal: valor,
ConsumoIdConsumo: consumoId
});
index2.add(function (success) {
console.log("dentro");
stock.retrieveByInsumo(consumoId, pesoBateaC, function (detalleConsumos) {
if (detalleConsumos) {
console.log("listo xfin");
index2.guardar(consumoId, function (detalleConsumoss) {
if (detalleConsumoss) {
console.log('se guardo el total del consumo');
} else {
console.log('No se puede cargar el total del consumo');
}
},function (err) {
console.log('Error al intentar cargar el total del consumo',err);
});
} else {
console.log('No se encontraron detalles');
}
}, function (error) {
console.log('Detalle no encontrado');
});
},
function (err) {
console.log('error aca', err);
});
}else {
console.log('No se encontraron Consumos');
}
});
},
function (err) {
console.log(err);
});
}
});
//}, 1000);
/*********************************************************************/
router.get('/abrir', function (req, res) {
console.log('dentro de abrir');
port.write('>i');
});
/*router.get('/abrir', function (req, res) {
console.log('dentro de abrir');
port.write('<1');
});
/********************************************************************/
router.get('/cerrar', function (req, res) {
console.log('dentro de cerrar');
port.write('>j');
});
/*
router.get('/cerrar', function (req, res) {
console.log('dentro de cerrar');
port.write('<0');
});
/********************************************************************/
router.get('/liberar', function (req, res) {
console.log('dentro de liberar');
console.log('obteniendo id del animal');
port.write('>x');
});
/********************************************************************/
router.get('/sobra', function (req, res) {
console.log('dentro de sobra');
console.log('obteniendo id del animal');
port.write('>3');
});
/***************************************************************************/
router.get('/', function (req, res) {
req.session.destroy(function(err) {
if(err) {
console.log(err);
} else {
res.render('publico/home/indexa.jade');
}
});
});
/****************************************************************************/
router.get('/perfil', function (req, res) {
//************************************
var mensaje = Model.Mensaje.build();
//************************************
var alarma = Model.Alarma.build();
//************************************
if(!req.session.user){
res.render('web/index/404.jade');
}
var nivelUsuario = req.session.user.Nivel['nivel'];
console.log('soy nivelUsuario', nivelUsuario);
if(nivelUsuario =='admin'){
mensaje.retriveCount(function (mensaje1) {
console.log('mensaje1', mensaje1);
if (mensaje1) {
mensaje.retrieveAll(function (mensaje2) {
console.log('mensaje2', mensaje2);
if (mensaje2) {
console.log(req.body);
alarma.retriveCount(function (alarma1) {
console.log('alarma1', alarma1);
if (alarma1) {
alarma.retrieveAll(function (alarma2) {
console.log('alarma2', alarma2);
if (alarma2) {
console.log(req.session);
var usuario = req.session.user.usuario;
var pass = req.session.user.pass;
var fechaCreacion = req.session.user.fechaCreacion;
res.render('web/index/perfil.jade',{
alarmas1: alarma1,
alarmas2: alarma2,
mensajes: mensaje1,
mensajeria: mensaje2,
usuarios: usuario,
passs: pass,
fechaCreacions: fechaCreacion
});
}else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
} else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
}else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
} else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
} else{
mensaje.retriveCount(function (mensaje1) {
console.log('mensaje1', mensaje1);
if (mensaje1) {
mensaje.retrieveAll(function (mensaje2) {
console.log('mensaje2', mensaje2);
if (mensaje2) {
console.log(req.body);
alarma.retriveCount(function (alarma1) {
console.log('alarma1', alarma1);
if (alarma1) {
alarma.retrieveAll(function (alarma2) {
console.log('alarma2', alarma2);
if (alarma2) {
console.log(req.body);
var usuario = req.session.user.usuario;
var pass = req.session.user.pass;
var fechaCreacion = req.session.user.fechaCreacion;
res.render('web/index/errores.jade',{
alarmas1: alarma1,
alarmas2: alarma2,
mensajes: mensaje1,
mensajeria: mensaje2,
usuarios: usuario,
passs: pass,
fechaCreacions: fechaCreacion
});
}else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
} else {<|fim▁hole|> }
}, function (error) {
res.send('Alarma no encontrado');
});
}else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
} else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje1 no encontrado');
});
}
});
/****************************************************************************/
router.get('/reportes', function (req, res) {
//************************************
var mensaje = Model.Mensaje.build();
//************************************
var alarma = Model.Alarma.build();
//************************************
if(!req.session.user){
res.render('web/index/404.jade');
}
var nivelUsuario = req.session.user.Nivel['nivel'];
console.log('soy nivelUsuario', nivelUsuario);
if(nivelUsuario =='admin'){
mensaje.retriveCount(function (mensaje1) {
console.log('mensaje1', mensaje1);
if (mensaje1) {
mensaje.retrieveAll(function (mensaje2) {
console.log('mensaje2', mensaje2);
if (mensaje2) {
console.log(req.body);
alarma.retriveCount(function (alarma1) {
console.log('alarma1', alarma1);
if (alarma1) {
alarma.retrieveAll(function (alarma2) {
console.log('alarma2', alarma2);
if (alarma2) {
console.log(req.body);
var usuario = req.session.user.usuario;
var pass = req.session.user.pass;
var fechaCreacion = req.session.user.fechaCreacion;
console.log('soy nivelUsuario', nivelUsuario);
res.render('web/index/reportes.jade',{
alarmas1: alarma1,
alarmas2: alarma2,
mensajes: mensaje1,
mensajeria: mensaje2,
usuarios: usuario,
passs: pass,
fechaCreacions: fechaCreacion
});
}else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
} else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
}else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
} else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje1 no encontrado');
});
}else{
mensaje.retriveCount(function (mensaje1) {
console.log('mensaje1', mensaje1);
if (mensaje1) {
mensaje.retrieveAll(function (mensaje2) {
console.log('mensaje2', mensaje2);
if (mensaje2) {
console.log(req.body);
alarma.retriveCount(function (alarma1) {
console.log('alarma1', alarma1);
if (alarma1) {
alarma.retrieveAll(function (alarma2) {
console.log('alarma2', alarma2);
if (alarma2) {
console.log(req.body);
var usuario = req.session.user.usuario;
var pass = req.session.user.pass;
var fechaCreacion = req.session.user.fechaCreacion;
res.render('web/index/errores.jade',{
alarmas1: alarma1,
alarmas2: alarma2,
mensajes: mensaje1,
mensajeria: mensaje2,
usuarios: usuario,
passs: pass,
fechaCreacions: fechaCreacion
});
}else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
} else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
}else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
} else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje1 no encontrado');
});
}
});
/*ruta para redireccionar al comedero donde al renderizar la pagina le paso la
variable enviar a una variable de la vista llamada horas*/
router.get('/comedero', function(req, res) {
var mensaje = Model.Mensaje.build();
var alarma = Model.Alarma.build();
if(!req.session.user){
res.render('web/index/404.jade');
}
mensaje.retriveCount(function (mensaje1) {
console.log('mensaje1', mensaje1);
if (mensaje1) {
mensaje.retrieveAll(function (mensaje2) {
console.log('mensaje2', mensaje2);
if (mensaje2) {
alarma.retriveCount(function (alarma1) {
console.log('alarma1', alarma1);
if (alarma1) {
alarma.retrieveAll(function (alarma2) {
console.log('alarma2', alarma2);
if (alarma2) {
var usuario = req.session.user.usuario;
var pass = req.session.user.pass;
var fechaCreacion = req.session.user.fechaCreacion;
res.render('web/index/Comedero.jade',{
alarmas1: alarma1,
alarmas2: alarma2,
mensajes: mensaje1,
mensajeria: mensaje2,
niveles: nivelC,
horas: horasC,
pesoRacion: 2,
usuarios: usuario,
passs: pass,
fechaCreacions: fechaCreacion
});
}else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
} else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
}else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensajes no encontrado');
});
} else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
});
//página principal del admin, panel de administración
router.get('/principal', function (req, res) {
var mensaje = Model.Mensaje.build();
var stock = Model.Stock.build();
var consumo = Model.Consumo.build();
var pesaje = Model.Pesaje.build();
var muerte = Model.Muertes.build();
var extraviado = Model.Extraviado.build();
var sanitacion = Model.Sanitacion.build();
var vacunacion = Model.Vacunacion.build();
var ventas = Model.FacturaVenta.build();
//************************************
var alarma = Model.Alarma.build();
if(!req.session.user){
console.log('dentro');
res.render('web/index/404.jade');
}
leerCantidadMinima();
leerHerramienta();
if(niv <= "5"){
leerComederoMinima();
}
mensaje.retriveCount(function (mensaje1) {
console.log('mensaje1', mensaje1);
if (mensaje1) {
mensaje.retrieveAll(function (mensaje2) {
console.log('mensaje2', mensaje2);
if (mensaje2) {
stock.retrieveAll(function (stockQ) {
console.log('stockQ', stockQ);
if (stockQ) {
consumo.retrieveBar(function (consumobar) {
console.log('consumobar', consumobar);
if (consumobar) {
pesaje.retrieveLine2(function (pesaje2) {
console.log('pesaje2', pesaje2);
if (pesaje2) {
consumo.retrieveBar2(function (consumobar2) {
console.log('consumobar2', consumobar2);
if (consumobar2) {
consumo.retrieveBar3(function (consumobar3) {
console.log('consumobar3', consumobar3);
if (consumobar3) {
consumo.retrieveBar4(function (consumobar4) {
console.log('consumobar4', consumobar4);
if (consumobar4) {
consumo.retrieveBar5(function (consumobar5) {
console.log('consumobar5', consumobar5);
if (consumobar5) {
consumo.retrieveBar6(function (consumobar6) {
console.log('consumobar6', consumobar6);
if (consumobar6) {
consumo.retrieveBar7(function (consumobar7) {
console.log('consumobar7', consumobar7);
if (consumobar7) {
pesaje.retrieveLine3(function (pesaje3) {
console.log('pesaje3', pesaje3);
if (pesaje3) {
pesaje.retrieveLine4(function (pesaje4) {
console.log('pesaje4', pesaje4);
if (pesaje4) {
pesaje.retrieveLine5(function (pesaje5) {
console.log('pesaje5', pesaje5);
if (pesaje5) {
pesaje.retrieveLine6(function (pesaje6) {
console.log('pesaje6', pesaje6);
if (pesaje6) {
pesaje.retrieveLine7(function (pesaje7) {
console.log('pesaje7', pesaje7);
if (pesaje7) {
stock.retrieveAll2(function (stockN) {
console.log('stockN', stockN);
if (stockN) {
stock.retrieveAll3(function (stockL) {
console.log('stockL', stockL);
if (stockL) {
stock.retrieveAll4(function (stockO) {
console.log('stockO', stockO);
if (stockO) {
consumo.retrievePie(function (consumir) {
console.log('consumir', consumir);
if (consumir) {
consumo.retrievePie2(function (consumir2) {
console.log('consumir2', consumir2);
if (consumir2) {
stock.retrieveSAnimal2(function (animal2) {
console.log('animal2', animal2);
if (animal2) {
pesaje.retrieveLine(function (pesaje) {
console.log('pesaje', pesaje);
if (pesaje) {
muerte.retrieveSMuerte2(function (muertes) {
console.log('muertes', muertes);
if (muertes) {
extraviado.retrieveExtraviado(function (extraviado) {
console.log('extraviado', extraviado);
if (extraviado) {
sanitacion.retrieveSanitacion(function (sanitacion) {
console.log('sanitacion', sanitacion);
if (sanitacion) {
vacunacion.retrieveVacunacion(function (vacunacion) {
console.log('vacunacion', vacunacion);
if (vacunacion) {
ventas.retrieveVenta(function (ventas) {
console.log('ventas', ventas);
if (ventas) {
alarma.retriveCount(function (alarma1) {
console.log('alarma1', alarma1);
if (alarma1) {
alarma.retrieveAll(function (alarma2) {
console.log('alarma2', alarma2);
if (alarma2) {
console.log(req.body);
console.log(req.session.user);
var usuario = req.session.user.usuario;
var pass = req.session.user.pass;
var fechaCreacion = req.session.user.fechaCreacion;
res.render('web/index/PaginaPrincipal',{
usuarios: usuario,
passs: pass,
fechaCreacions: fechaCreacion,
mensajes: mensaje1,
mensajeria: mensaje2,
peso2: pesaje2,
peso3: pesaje3,
peso4: pesaje4,
peso5: pesaje5,
peso6: pesaje6,
peso7: pesaje7,
consumoBar: consumobar,
consumoBar2: consumobar2,
consumoBar3: consumobar3,
consumoBar4: consumobar4,
consumoBar5: consumobar5,
consumoBar6: consumobar6,
consumoBar7: consumobar7,
stock: stockQ,
Stock2: stockN,
Vtock3: stockL,
consumiendo: consumir,
Otock4: stockO,
consusal: consumir2,
animal2: animal2,
peso: pesaje,
muerted: muertes,
extraviados: extraviado,
sanitaciones: sanitacion,
vacunaciones: vacunacion,
alarmas1: alarma1,
alarmas2: alarma2,
ventass: ventas
});
}else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
} else {
res.send(401, 'No se encontraron Alarmas');
}
}, function (error) {
res.send('Alarma no encontrado');
});
} else {
res.send(401, 'No se Encontraron Ventas de Animales');
}
}, function (error) {
res.send('Ventas no encontrado');
});
} else {
res.send(401, 'No se Encontraron Vacunacion de Animales');
}
}, function (error) {
res.send('Vacunacion no encontrado');
});
} else {
res.send(401, 'No se Encontraron Sanitacion de Animales');
}
}, function (error) {
res.send('Sanitacion no encontrado');
});
} else {
res.send(401, 'No se Encontraron Extraviados de Animales');
}
}, function (error) {
res.send('Extraviados no encontrado');
});
} else {
res.send(401, 'No se Encontraron Muertes de Animales');
}
}, function (error) {
res.send('Muerte no encontrado');
});
} else {
res.send(401, 'No se Encontraron Pesajes de Animales');
}
}, function (error) {
res.send('Pesaje no encontrado');
});
} else {
res.send(401, 'No se Encontraron Stock de Animales');
}
}, function (error) {
res.send('Stock no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos de Sal Mineral');
}
}, function (error) {
res.send('Consumo de sal no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos de Balanceados');
}
}, function (error) {
res.send('Consumo no encontrado');
});
} else {
res.send(401, 'No se Encontraron Insumos de Medicamento');
}
}, function (error) {
res.send('Insumo de Medicamento no encontrado');
});
} else {
res.send(401, 'No se Encontraron Insumos de Medicamento');
}
}, function (error) {
res.send('Insumo de Medicamento no encontrado');
});
} else {
res.send(401, 'No se Encontraron Insumos de Sal');
}
}, function (error) {
res.send('Insumo de Sal no encontrado');
});
} else {
res.send(401, 'No se Encontraron Pesajes7');
}
}, function (error) {
res.send('Pesaje7 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Pesajes6');
}
}, function (error) {
res.send('Pesaje6 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Pesajes 5');
}
}, function (error) {
res.send('Pesaje5 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Pesajes4');
}
}, function (error) {
res.send('Pesaje4 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Pesajes3');
}
}, function (error) {
res.send('Pesaje3 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos7');
}
}, function (error) {
res.send('ConsumoBar7 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos6');
}
}, function (error) {
res.send('ConsumoBar6 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos5');
}
}, function (error) {
res.send('ConsumoBar5 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos4');
}
}, function (error) {
res.send('ConsumoBar4 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos3');
}
}, function (error) {
res.send('ConsumoBar3 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos2');
}
}, function (error) {
res.send('ConsumoBar2 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Pesajes');
}
}, function (error) {
res.send('Pesaje2 no encontrado');
});
} else {
res.send(401, 'No se Encontraron Consumos');
}
}, function (error) {
res.send('ConsumoBar no encontrado');
});
}else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
} else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
} else {
res.send(401, 'No se encontraron Mensajes');
}
}, function (error) {
res.send('Mensaje no encontrado');
});
});
//---------------------------alarmas
function leerCantidadMinima(){
var stockA = Model.Stock.build();
var alarmaA = Model.Alarma.build();
stockA.retrieveAlarma(function (stock) {
if (stock) {
console.log("soy stock*********", stock[0].Insumo.nombreInsumo);
alarmaA.retrieveByAlarma(stock[0].Insumo.nombreInsumo, function (alarma1) {
if (alarma1) {
console.log("ya existe la alarma");
}else {
console.log("no existe la alarma, guardando...", stock[0].Insumo.nombreInsumo);
var alarma2 = Model.Alarma.build({
nombre: "Cantidad Minima Alcanzada",
alarma: stock[0].Insumo.nombreInsumo
});
alarma2.add(function (success) {
console.log("Se guardo alarma");
},
function (err) {
console.log(err);
});
}
}, function (error) {
console.log('Alarma no encontrado');
});
}else {
console.log(401, 'No se encontraron Alarmas');
}
}, function (error) {
console.log('Cantidad Minima no encontrado');
});
}
function leerHerramienta(){
var herramienta = Model.Herramienta.build();
var alarmaA = Model.Alarma.build();
var mantenimiento = new Date().toJSON().slice(0,10);
console.log("soy mantenimiento*********", mantenimiento);
herramienta.retrieveByAlarma(mantenimiento, function (herramientasq) {
if (herramientasq) {
console.log("soy herramienta*********", herramientasq.nombre);
alarmaA.retrieveByAlarma(herramientasq.nombre, function (alarma1) {
if (alarma1) {
console.log("ya existe la alarma");
}else {
console.log("no existe la alarma, guardando...", herramientasq.nombre);
var alarma2 = Model.Alarma.build({
nombre: "Realizar Mantenimiento",
alarma: herramientasq.nombre
});
alarma2.add(function (success) {
console.log("Se guardo la alarma");
},
function (err) {
console.log(err);
});
}
}, function (error) {
console.log('Alarma no encontrado');
});
}else {
console.log(401, 'No se encontraron Herramientas');
}
}, function (error) {
console.log('Herramienta no encontrado');
});
}
function leerComederoMinima(){
var stockA = Model.Stock.build();
var alarmaA = Model.Alarma.build();
stockA.retrieveAlarma(function (stock) {
if (stock) {
var comedero = 'Comedero Vacio'
alarmaA.retrieveByAlarma(comedero, function (alarma1) {
if (alarma1) {
console.log("ya existe la alarma");
}else {
console.log("no existe la alarma, guardando...", comedero);
var alarma2 = Model.Alarma.build({
nombre: "Nivel Mínima Alcanzado",
alarma: comedero
});
alarma2.add(function (success) {
console.log("Se guardo alarma");
},
function (err) {
console.log(err);
});
}
}, function (error) {
console.log('Alarma no encontrado');
});
}else {
console.log(401, 'No se encontraron Alarmas');
}
}, function (error) {
console.log('Cantidad Minima no encontrado');
});
}
module.exports = router;<|fim▁end|> | res.send(401, 'No se encontraron Alarmas'); |
<|file_name|>goftphelp.go<|end_file_name|><|fim▁begin|>package goftp
import (
"fmt"
"strings"
)
//ftp客户端命令的帮助信息
var FTP_CLIENT_CMD_HELP = map[string]string{
FCC_HELP: "print local help information",
FCC_QUESTION_MARK: "print local help information",
FCC_CD: "change remote working directory",
FCC_LS: "list contents of remote path",
FCC_LCD: "change local working directory",
FCC_OPEN: "connect to remote ftp server",
FCC_USER: "send new user information",
FCC_USAGE: "show usage of ftp command",
}
//ftp客户端命令的使用方法
//其中带`[]`的参数都是可选参数
var FTP_CLIENT_CMD_USAGE = map[string]string{
FCC_HELP: "help [cmd1],[cmd2],...",
FCC_QUESTION_MARK: "? [cmd1],[cmd2],...",
FCC_CD: "cd remote_dir",
FCC_LS: "ls [remote_dir|remote_file] [local_output_file]",
FCC_LCD: "lcd [local_directory]",
FCC_OPEN: "open remote_host [port]",
FCC_USER: "user username [password] [account]",
}
type GoFtpClientHelp struct {
}
func (this *GoFtpClientHelp) version() {
fmt.Println("GoFtpClient v1.0\r\n多科学堂出品\r\nhttps://github.com/jemygraw/goftp")
}
func (this *GoFtpClientHelp) help() {
}
func (this *GoFtpClientHelp) cmdHelp(cmdNames ...string) {
for _, cmdName := range cmdNames {
cmdName = strings.ToLower(cmdName)
if cmdHelpDoc, ok := FTP_CLIENT_CMD_HELP[cmdName]; ok {
fmt.Println(cmdName, "\t", cmdHelpDoc)
} else {
fmt.Println("?Invalid help command `", cmdName, "'")
}
}
}
func (this *GoFtpClientHelp) cmdUsage(cmdNames ...string) {
for _, cmdName := range cmdNames {
cmdName = strings.ToLower(cmdName)
if cmdUsageDoc, ok := FTP_CLIENT_CMD_USAGE[cmdName]; ok {
fmt.Println("Usage:", cmdUsageDoc)
} else {
fmt.Println("?Invalid usage command `", cmdName, "'")
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>test_www_service.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import calendar
import datetime
import jwt
import mock
from twisted.cred import strcred
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.internet import defer
from twisted.trial import unittest
from twisted.web._auth.wrapper import HTTPAuthSessionWrapper
from twisted.web.server import Request
from buildbot.test.unit import test_www_hooks_base
from buildbot.test.util import www
from buildbot.www import auth
from buildbot.www import change_hook
from buildbot.www import resource
from buildbot.www import rest
from buildbot.www import service
class NeedsReconfigResource(resource.Resource):
needsReconfig = True
reconfigs = 0
def reconfigResource(self, config):
NeedsReconfigResource.reconfigs += 1
class Test(www.WwwTestMixin, unittest.TestCase):
def setUp(self):
self.master = self.make_master(url='h:/a/b/')
self.svc = self.master.www = service.WWWService()
self.svc.setServiceParent(self.master)
def makeConfig(self, **kwargs):<|fim▁hole|> new_config = mock.Mock()
new_config.www = w
new_config.buildbotURL = 'h:/'
self.master.config = new_config
return new_config
def test_reconfigService_no_port(self):
new_config = self.makeConfig()
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def check(_):
self.assertEqual(self.svc.site, None)
return d
@defer.inlineCallbacks
def test_reconfigService_reconfigResources(self):
new_config = self.makeConfig(port=8080)
self.patch(rest, 'RestRootResource', NeedsReconfigResource)
NeedsReconfigResource.reconfigs = 0
# first time, reconfigResource gets called along with setupSite
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
self.assertEqual(NeedsReconfigResource.reconfigs, 1)
# and the next time, setupSite isn't called, but reconfigResource is
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
self.assertEqual(NeedsReconfigResource.reconfigs, 2)
def test_reconfigService_port(self):
new_config = self.makeConfig(port=20)
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def check(_):
self.assertNotEqual(self.svc.site, None)
self.assertNotEqual(self.svc.port_service, None)
self.assertEqual(self.svc.port, 20)
return d
def test_reconfigService_expiration_time(self):
new_config = self.makeConfig(port=80, cookie_expiration_time=datetime.timedelta(minutes=1))
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def check(_):
self.assertNotEqual(self.svc.site, None)
self.assertNotEqual(self.svc.port_service, None)
self.assertEqual(service.BuildbotSession.expDelay, datetime.timedelta(minutes=1))
return d
def test_reconfigService_port_changes(self):
new_config = self.makeConfig(port=20)
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def reconfig(_):
newer_config = self.makeConfig(port=999)
return self.svc.reconfigServiceWithBuildbotConfig(newer_config)
@d.addCallback
def check(_):
self.assertNotEqual(self.svc.site, None)
self.assertNotEqual(self.svc.port_service, None)
self.assertEqual(self.svc.port, 999)
return d
def test_reconfigService_port_changes_to_none(self):
new_config = self.makeConfig(port=20)
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def reconfig(_):
newer_config = self.makeConfig()
return self.svc.reconfigServiceWithBuildbotConfig(newer_config)
@d.addCallback
def check(_):
# (note the site sticks around)
self.assertEqual(self.svc.port_service, None)
self.assertEqual(self.svc.port, None)
return d
def test_setupSite(self):
self.svc.setupSite(self.makeConfig())
site = self.svc.site
# check that it has the right kind of resources attached to its
# root
root = site.resource
req = mock.Mock()
self.assertIsInstance(root.getChildWithDefault(b'api', req),
rest.RestRootResource)
def test_setupSiteWithProtectedHook(self):
checker = InMemoryUsernamePasswordDatabaseDontUse()
checker.addUser("guest", "password")
self.svc.setupSite(self.makeConfig(
change_hook_dialects={'base': True},
change_hook_auth=[checker]))
site = self.svc.site
# check that it has the right kind of resources attached to its
# root
root = site.resource
req = mock.Mock()
self.assertIsInstance(root.getChildWithDefault(b'change_hook', req),
HTTPAuthSessionWrapper)
@defer.inlineCallbacks
def test_setupSiteWithHook(self):
new_config = self.makeConfig(
change_hook_dialects={'base': True})
self.svc.setupSite(new_config)
site = self.svc.site
# check that it has the right kind of resources attached to its
# root
root = site.resource
req = mock.Mock()
ep = root.getChildWithDefault(b'change_hook', req)
self.assertIsInstance(ep,
change_hook.ChangeHookResource)
# not yet configured
self.assertEqual(ep.dialects, {})
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
# now configured
self.assertEqual(ep.dialects, {'base': True})
rsrc = self.svc.site.resource.getChildWithDefault(b'change_hook', mock.Mock())
path = b'/change_hook/base'
request = test_www_hooks_base._prepare_request({})
self.master.addChange = mock.Mock()
yield self.render_resource(rsrc, path, request=request)
self.master.addChange.assert_called()
@defer.inlineCallbacks
def test_setupSiteWithHookAndAuth(self):
fn = self.mktemp()
with open(fn, 'w') as f:
f.write("user:pass")
new_config = self.makeConfig(
port=8080,
plugins={},
change_hook_dialects={'base': True},
change_hook_auth=[strcred.makeChecker("file:" + fn)])
self.svc.setupSite(new_config)
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
rsrc = self.svc.site.resource.getChildWithDefault(b'', mock.Mock())
res = yield self.render_resource(rsrc, b'')
self.assertIn(b'{"type": "file"}', res)
rsrc = self.svc.site.resource.getChildWithDefault(
b'change_hook', mock.Mock())
res = yield self.render_resource(rsrc, b'/change_hook/base')
# as UnauthorizedResource is in private namespace, we cannot use
# assertIsInstance :-(
self.assertIn('UnauthorizedResource', repr(res))
class TestBuildbotSite(unittest.SynchronousTestCase):
SECRET = 'secret'
def setUp(self):
self.site = service.BuildbotSite(None, "logs", 0, 0)
self.site.setSessionSecret(self.SECRET)
def test_getSession_from_bad_jwt(self):
""" if the cookie is bad (maybe from previous version of buildbot),
then we should raise KeyError for consumption by caller,
and log the JWT error
"""
self.assertRaises(KeyError, self.site.getSession, "xxx")
self.flushLoggedErrors(jwt.exceptions.DecodeError)
def test_getSession_from_correct_jwt(self):
payload = {'user_info': {'some': 'payload'}}
uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM)
session = self.site.getSession(uid)
self.assertEqual(session.user_info, {'some': 'payload'})
def test_getSession_from_expired_jwt(self):
# expired one week ago
exp = datetime.datetime.utcnow() - datetime.timedelta(weeks=1)
exp = calendar.timegm(datetime.datetime.timetuple(exp))
payload = {'user_info': {'some': 'payload'}, 'exp': exp}
uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM)
self.assertRaises(KeyError, self.site.getSession, uid)
def test_getSession_with_no_user_info(self):
payload = {'foo': 'bar'}
uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM)
self.assertRaises(KeyError, self.site.getSession, uid)
def test_makeSession(self):
session = self.site.makeSession()
self.assertEqual(session.user_info, {'anonymous': True})
def test_updateSession(self):
session = self.site.makeSession()
class FakeChannel(object):
transport = None
def isSecure(self):
return False
request = Request(FakeChannel(), False)
request.sitepath = [b"bb"]
session.updateSession(request)
self.assertEqual(len(request.cookies), 1)
name, value = request.cookies[0].split(b";")[0].split(b"=")
decoded = jwt.decode(value, self.SECRET,
algorithm=service.SESSION_SECRET_ALGORITHM)
self.assertEqual(decoded['user_info'], {'anonymous': True})
self.assertIn('exp', decoded)<|fim▁end|> | w = dict(port=None, auth=auth.NoAuth(), logfileName='l')
w.update(kwargs) |
<|file_name|>ai_player.rs<|end_file_name|><|fim▁begin|>//! Provides `game::IsPlayer<::OtherAction>` types.
use {Result, Action};
use rand::thread_rng;
use rand::distributions::{IndependentSample, Range};
use rayon::prelude::*;
use reversi::{board, turn, game, Side, ReversiError};
use reversi::board::Coord;
use std::cmp::Ordering;
const RANDOMNESS: f64 = 0.05f64;
const WEAK: u32 = 100;
const MEDIUM: u32 = 10000;
const STRONG: u32 = 1000000;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Score {
Running(f64),
Ended(i16),
}
impl PartialOrd for Score {
fn partial_cmp(&self, other: &Score) -> Option<Ordering> {
if match (*self, *other) {
(Score::Running(val1), Score::Running(val2)) => val1 > val2,
(Score::Running(val1), Score::Ended(scr2)) => scr2 < 0i16 || (scr2 == 0i16 && val1 > 0f64),
(Score::Ended(scr1), Score::Running(val2)) => scr1 > 0i16 || (scr1 == 0i16 && val2 < 0f64),
(Score::Ended(scr1), Score::Ended(scr2)) => scr1 > scr2,
} {
Some(Ordering::Greater)
} else {
Some(Ordering::Less)
}
}
}
impl Eq for Score {}
impl Ord for Score {
fn cmp(&self, other: &Score) -> Ordering {
if *self == *other {
Ordering::Equal
} else {
self.partial_cmp(other).expect("Should be ordered")
}
}
}
pub enum AiPlayer {
Weak,
Medium,
Strong,
}
impl game::IsPlayer<::OtherAction> for AiPlayer {
/// Calls `find_best_move` with suitable parameters
fn make_move(&self, turn: &turn::Turn) -> Result<Action> {
Ok(game::PlayerAction::Move(try!(match *self {
AiPlayer::Weak => AiPlayer::find_best_move(turn, WEAK),
AiPlayer::Medium => AiPlayer::find_best_move(turn, MEDIUM),
AiPlayer::Strong => AiPlayer::find_best_move(turn, STRONG),
})))
}
}
impl AiPlayer {
/// Find best moves among the legal ones.
/// Each possibility is evaluated by a method depending on the value of `self` and confronted with the others.
pub fn find_best_move(turn: &turn::Turn, comps: u32) -> Result<board::Coord> {
// If everything is alright, turn shouldn't be ended
let side = turn.get_state()
.ok_or_else(|| ReversiError::EndedGame(*turn))?;
// Finds all possible legal moves and records their coordinates
let mut moves: Vec<Coord> = Vec::new();
for row in 0..board::BOARD_SIZE {
for col in 0..board::BOARD_SIZE {
let coord = board::Coord::new(row, col);
if turn.check_move(coord).is_ok() {
moves.push(coord);
}
}
}
match moves.len() {
0 => unreachable!("Game is not ended!"), // Game can't be ended
1 => Ok(moves[0]), // If there is only one possible move, there's no point in evaluating it.
num_moves => {
// Each move has to be evaluated in order to find the best one
let moves_and_scores = moves
.par_iter()
.map(|&coord| {
let mut turn_after_move = *turn;
turn_after_move
.make_move(coord)
.expect("The move was checked, but something went wrong!");
let score = AiPlayer::ai_eval(&turn_after_move, comps / num_moves as u32)
.expect("Something went wrong with `AiPlayer::ai_eval`!");
(coord, score)
});
let best_move_and_score = match side {
Side::Dark => moves_and_scores.min_by_key(|&(_, score)| score),
Side::Light => moves_and_scores.max_by_key(|&(_, score)| score),
}
.expect("No best move found!");
Ok(best_move_and_score.0)
}
}
}
fn ai_eval(turn: &turn::Turn, comps: u32) -> Result<Score> {
if turn.get_state().is_none() {
Ok(Score::Ended(turn.get_score_diff()))
} else {
let mut score = try!(AiPlayer::ai_eval_with_leftover(turn, comps)).0;
// Add some randomness
let between = Range::new(-RANDOMNESS, RANDOMNESS);
let mut rng = thread_rng();
score = match score {
Score::Running(val) => Score::Running(val * (1.0 + between.ind_sample(&mut rng))),
_ => score,
};
// Done, return
Ok(score)<|fim▁hole|>
fn ai_eval_with_leftover(turn: &turn::Turn, comps: u32) -> Result<(Score, u32)> {
// If everything is alright, turn shouldn't be ended
// assert!(!this_turn.is_endgame());
// Finds all possible legal moves and records their coordinates
let mut moves: Vec<Coord>;
let mut turn = *turn;
loop {
moves = Vec::new();
for row in 0..board::BOARD_SIZE {
for col in 0..board::BOARD_SIZE {
let coord = board::Coord::new(row, col);
if turn.check_move(coord).is_ok() {
moves.push(coord);
}
}
}
match moves.len() {
0 => unreachable!("Endgame should have been detected earlier: here it's a waste of computations!"),
1 => {
turn.make_move(moves[0])?; //.expect("There is one move and it should be legit");
if turn.get_state().is_none() {
return Ok((Score::Ended(turn.get_score_diff()), comps));
}
}
_num_moves => {
break;
// let scores = moves.par_iter().map(|&coord| {
// let mut turn_after_move = turn;
// turn_after_move.make_move(coord)
// .expect("The move was checked, but something went wrong!");
// match turn_after_move.get_state() {
// None => Score::Ended(turn_after_move.get_score_diff()),
// Some(_) if leftover < turns_left => Score::Running(try!(AiPlayer::heavy_eval(&turn_after_move))),
// _ => {
// let new_comps = leftover / turns_left; // since leftover >= turns_left, then new_comps >= 1
// let new_score_leftover = try!(AiPlayer::ai_eval_with_leftover(&turn_after_move, new_comps));
// leftover += new_score_leftover.1;
// leftover -= new_comps; // since leftover >= turns_left, leftover - newcomps >= 0
// new_score_leftover.0
// }
// }
// });
// let side = turn.get_state().ok_or_else(|| ReversiError::EndedGame(turn))?;
// return match side {
// Side::Dark => scores.min(),
// Side::Light => scores.max(),
// }.ok_or_else(|| panic!("No best move found!"))
}
}
}
// If everything is alright, turn shouldn't be ended
// assert!(!turn.is_endgame());
let mut scores: Vec<Score> = Vec::new();
let mut leftover = comps.checked_sub(moves.len() as u32).unwrap_or(0);
while let Some(coord) = moves.pop() {
let mut turn_after_move = turn;
turn_after_move.make_move(coord)?;
let turns_left = (moves.len() + 1) as u32;
scores.push(match turn_after_move.get_state() {
None => Score::Ended(turn_after_move.get_score_diff()),
Some(_) if leftover < turns_left => Score::Running(try!(AiPlayer::heavy_eval(&turn_after_move))),
_ => {
let new_comps = leftover / turns_left; // since leftover >= turns_left, then new_comps >= 1
let new_score_leftover = try!(AiPlayer::ai_eval_with_leftover(&turn_after_move, new_comps));
leftover += new_score_leftover.1;
leftover -= new_comps; // since leftover >= turns_left, leftover - newcomps >= 0
new_score_leftover.0
}
});
}
Ok((match turn.get_state() {
Some(Side::Dark) => scores.into_iter().min().expect("Why should this fail?"),
Some(Side::Light) => scores.into_iter().max().expect("Why should this fail?"),
None => unreachable!("turn is ended but it should not be"),
},
leftover))
}
fn heavy_eval(turn: &turn::Turn) -> Result<f64> {
// Weights
const CORNER_BONUS: u16 = 50;
const ODD_CORNER_MALUS: u16 = 20;
const EVEN_CORNER_BONUS: u16 = 10;
const ODD_MALUS: u16 = 7; // x2
const EVEN_BONUS: u16 = 3; // x2
// ------------------------ Sum = 100
let sides: [(Coord, Coord, Coord, Coord, Coord, Coord, Coord); 4] = [(/* NW corner */
Coord::new(0, 0),
Coord::new(0, 1),
Coord::new(1, 1),
Coord::new(0, 2),
Coord::new(2, 2),
Coord::new(1, 0),
Coord::new(2, 0)),
(/* NE corner */
Coord::new(0, 7),
Coord::new(1, 7),
Coord::new(1, 6),
Coord::new(2, 7),
Coord::new(2, 5),
Coord::new(0, 6),
Coord::new(0, 5)),
(/* SW corner */
Coord::new(7, 0),
Coord::new(6, 0),
Coord::new(6, 1),
Coord::new(5, 0),
Coord::new(5, 2),
Coord::new(7, 1),
Coord::new(7, 2)),
(/* SE corner */
Coord::new(7, 7),
Coord::new(6, 7),
Coord::new(6, 6),
Coord::new(5, 7),
Coord::new(5, 5),
Coord::new(7, 6),
Coord::new(7, 5))];
let mut score_light: u16 = 0;
let mut score_dark: u16 = 0;
for &(corner, odd, odd_corner, even, even_corner, counter_odd, counter_even) in &sides {
if let Some(disk) = *turn.get_cell(corner)? {
match disk.get_side() {
Side::Light => score_light += CORNER_BONUS,
Side::Dark => score_dark += CORNER_BONUS,
}
} else {
for &(coord_odd, coord_even) in &[(odd, even), (counter_odd, counter_even)] {
if let Some(disk) = *turn.get_cell(coord_odd)? {
match disk.get_side() {
Side::Light => score_dark += ODD_MALUS,
Side::Dark => score_light += ODD_MALUS,
}
} else if let Some(disk) = *turn.get_cell(coord_even)? {
match disk.get_side() {
Side::Light => score_light += EVEN_BONUS,
Side::Dark => score_dark += EVEN_BONUS,
}
}
}
if let Some(disk) = *turn.get_cell(odd_corner)? {
match disk.get_side() {
Side::Light => score_dark += ODD_CORNER_MALUS,
Side::Dark => score_light += ODD_CORNER_MALUS,
}
} else if let Some(disk) = *turn.get_cell(even_corner)? {
match disk.get_side() {
Side::Light => score_light += EVEN_CORNER_BONUS,
Side::Dark => score_dark += EVEN_CORNER_BONUS,
}
}
}
}
Ok(score_light as f64 - score_dark as f64)
}
}<|fim▁end|> | }
} |
<|file_name|>assets.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2013 - 2021 Tim Düsterhus
// Copyright (C) 2021 Maximilian Mader
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use crate::http::{
error::{Error, Error::FileNotFound},
header::not_modified,
};
use actix_web::{
get,
http::header::{CacheControl, CacheDirective, ETag},
web, HttpRequest, HttpResponse, Responder,
};
#[get("/favicon.ico")]
pub async fn favicon(req: HttpRequest) -> impl Responder {
serve_asset(req, "favicon.ico")
}
#[get("/static/{filename:.+}")]
pub async fn assets(req: HttpRequest, filename: web::Path<String>) -> impl Responder {
serve_asset(req, &filename)
}
<|fim▁hole|> crate::SOURCE_FILES
.get(file.as_str())
.map(|source_file| {
let etag = ETag::from(source_file);
if not_modified(&req, Some(&etag), None) {
return HttpResponse::NotModified()
.insert_header(etag)
.insert_header(CacheControl(vec![CacheDirective::Public]))
.body(()); // None
}
let content_type = if filename.ends_with(".js.map") {
"application/json".to_owned()
} else {
mime_guess::from_path(filename)
.first_or_octet_stream()
.to_string()
};
HttpResponse::Ok()
.insert_header(etag)
.insert_header(CacheControl(vec![CacheDirective::Public]))
.content_type(content_type)
.body(source_file.contents)
})
.ok_or(FileNotFound(req, file))
}<|fim▁end|> | fn serve_asset(req: HttpRequest, filename: &str) -> Result<impl Responder, Error> {
let file = format!("assets/static/{}", filename);
|
<|file_name|>opt_vec.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
*
* Defines a type OptVec<T> that can be used in place of ~[T].
* OptVec avoids the need for allocation for empty vectors.
* OptVec implements the iterable interface as well as
* other useful things like `push()` and `len()`.
*/
use core::prelude::*;
use core::old_iter;
use core::old_iter::BaseIter;
#[deriving(Encodable, Decodable)]
pub enum OptVec<T> {
Empty,
Vec(~[T])
}
pub fn with<T>(t: T) -> OptVec<T> {
Vec(~[t])
}
pub fn from<T>(t: ~[T]) -> OptVec<T> {
if t.len() == 0 {
Empty
} else {
Vec(t)
}
}
impl<T> OptVec<T> {
fn push(&mut self, t: T) {
match *self {
Vec(ref mut v) => {
v.push(t);
return;
}
Empty => {}
}
// FIXME(#5074): flow insensitive means we can't move
// assignment inside `match`
*self = Vec(~[t]);
}
fn map<U>(&self, op: &fn(&T) -> U) -> OptVec<U> {
match *self {
Empty => Empty,
Vec(ref v) => Vec(v.map(op))
}
}
fn get<'a>(&'a self, i: uint) -> &'a T {
match *self {
Empty => fail!("Invalid index %u", i),
Vec(ref v) => &v[i]
}
}
fn is_empty(&self) -> bool {
self.len() == 0
}
fn len(&self) -> uint {
match *self {
Empty => 0,
Vec(ref v) => v.len()
}
}
}
pub fn take_vec<T>(v: OptVec<T>) -> ~[T] {
match v {
Empty => ~[],
Vec(v) => v
}
}
impl<T:Copy> OptVec<T> {
fn prepend(&self, t: T) -> OptVec<T> {
let mut v0 = ~[t];
match *self {
Empty => {}
Vec(ref v1) => { v0.push_all(*v1); }
}
return Vec(v0);
}
fn push_all<I: BaseIter<T>>(&mut self, from: &I) {
for from.each |e| {
self.push(copy *e);
}
}
#[inline(always)]
fn mapi_to_vec<B>(&self, op: &fn(uint, &T) -> B) -> ~[B] {
let mut index = 0;
old_iter::map_to_vec(self, |a| {
let i = index;
index += 1;
op(i, a)
})
}
}
impl<A:Eq> Eq for OptVec<A> {
fn eq(&self, other: &OptVec<A>) -> bool {
// Note: cannot use #[deriving(Eq)] here because
// (Empty, Vec(~[])) ought to be equal.
match (self, other) {
(&Empty, &Empty) => true,
(&Empty, &Vec(ref v)) => v.is_empty(),
(&Vec(ref v), &Empty) => v.is_empty(),
(&Vec(ref v1), &Vec(ref v2)) => *v1 == *v2
}
}
fn ne(&self, other: &OptVec<A>) -> bool {
!self.eq(other)
}
}
impl<A> BaseIter<A> for OptVec<A> {
#[cfg(stage0)]
fn each(&self, blk: &fn(v: &A) -> bool) {
match *self {
Empty => {}
Vec(ref v) => v.each(blk)
}
}
#[cfg(not(stage0))]
fn each(&self, blk: &fn(v: &A) -> bool) -> bool {
match *self {
Empty => true,
Vec(ref v) => v.each(blk)
}
}
fn size_hint(&self) -> Option<uint> {
Some(self.len())
}
}
impl<A> old_iter::ExtendedIter<A> for OptVec<A> {
#[inline(always)]
#[cfg(stage0)]
fn eachi(&self, blk: &fn(v: uint, v: &A) -> bool) {
old_iter::eachi(self, blk)
}
#[inline(always)]
#[cfg(not(stage0))]
fn eachi(&self, blk: &fn(v: uint, v: &A) -> bool) -> bool {
old_iter::eachi(self, blk)
}
#[inline(always)]
fn all(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::all(self, blk)
}
#[inline(always)]
fn any(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::any(self, blk)
}
#[inline(always)]
fn foldl<B>(&self, b0: B, blk: &fn(&B, &A) -> B) -> B {
old_iter::foldl(self, b0, blk)
}
#[inline(always)]
fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
old_iter::position(self, f)
}
#[inline(always)]
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
old_iter::map_to_vec(self, op)
}
#[inline(always)]
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
old_iter::flat_map_to_vec(self, op)<|fim▁hole|>}
impl<A: Eq> old_iter::EqIter<A> for OptVec<A> {
#[inline(always)]
fn contains(&self, x: &A) -> bool { old_iter::contains(self, x) }
#[inline(always)]
fn count(&self, x: &A) -> uint { old_iter::count(self, x) }
}
impl<A: Copy> old_iter::CopyableIter<A> for OptVec<A> {
#[inline(always)]
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
old_iter::filter_to_vec(self, pred)
}
#[inline(always)]
fn to_vec(&self) -> ~[A] { old_iter::to_vec(self) }
#[inline(always)]
fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
old_iter::find(self, f)
}
}
impl<A: Copy+Ord> old_iter::CopyableOrderedIter<A> for OptVec<A> {
#[inline(always)]
fn min(&self) -> A { old_iter::min(self) }
#[inline(always)]
fn max(&self) -> A { old_iter::max(self) }
}<|fim▁end|> | }
|
<|file_name|>rawverse.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
*
* rawverse.cpp - code for class 'RawVerse'- a module that reads raw text
* files: ot and nt using indexs ??.bks ??.cps ??.vss
* and provides lookup and parsing functions based on
* class VerseKey
*
* $Id$
*
* Copyright 1997-2013 CrossWire Bible Society (http://www.crosswire.org)
* CrossWire Bible Society
* P. O. Box 2528
* Tempe, AZ 85280-2528
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation version 2.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
*/
#include <ctype.h>
#include <stdio.h>
#include <fcntl.h>
#include <errno.h>
#include <utilstr.h>
#include <rawverse.h>
#include <versekey.h>
#include <sysdata.h>
#include <filemgr.h>
#include <swbuf.h>
SWORD_NAMESPACE_START
/******************************************************************************
* RawVerse Statics
*/
int RawVerse::instance = 0;
const char RawVerse::nl = '\n';
/******************************************************************************
* RawVerse Constructor - Initializes data for instance of RawVerse
*
* ENT: ipath - path of the directory where data and index files are located.
* be sure to include the trailing separator (e.g. '/' or '\')
* (e.g. 'modules/texts/rawtext/webster/')
*/
RawVerse::RawVerse(const char *ipath, int fileMode)
{
SWBuf buf;
path = 0;
stdstr(&path, ipath);
if ((path[strlen(path)-1] == '/') || (path[strlen(path)-1] == '\\'))
path[strlen(path)-1] = 0;
if (fileMode == -1) { // try read/write if possible
fileMode = FileMgr::RDWR;
}
buf.setFormatted("%s/ot.vss", path);
idxfp[0] = FileMgr::getSystemFileMgr()->open(buf, fileMode, true);
buf.setFormatted("%s/nt.vss", path);
idxfp[1] = FileMgr::getSystemFileMgr()->open(buf, fileMode, true);
buf.setFormatted("%s/ot", path);
textfp[0] = FileMgr::getSystemFileMgr()->open(buf, fileMode, true);
buf.setFormatted("%s/nt", path);
textfp[1] = FileMgr::getSystemFileMgr()->open(buf, fileMode, true);
instance++;
}
/******************************************************************************
* RawVerse Destructor - Cleans up instance of RawVerse
*/
RawVerse::~RawVerse()
{
int loop1;
if (path)
delete [] path;
--instance;
for (loop1 = 0; loop1 < 2; loop1++) {
FileMgr::getSystemFileMgr()->close(idxfp[loop1]);
FileMgr::getSystemFileMgr()->close(textfp[loop1]);
}
}
/******************************************************************************
* RawVerse::findoffset - Finds the offset of the key verse from the indexes
*
* ENT: testmt - testament to find (0 - Bible/module introduction)
* idxoff - offset into .vss
* start - address to store the starting offset
* size - address to store the size of the entry
*/
void RawVerse::findOffset(char testmt, long idxoff, long *start, unsigned short *size) const {
idxoff *= 6;
if (!testmt)
testmt = ((idxfp[1]) ? 1:2);
if (idxfp[testmt-1]->getFd() >= 0) {
idxfp[testmt-1]->seek(idxoff, SEEK_SET);
__s32 tmpStart;
__u16 tmpSize;
idxfp[testmt-1]->read(&tmpStart, 4);
long len = idxfp[testmt-1]->read(&tmpSize, 2); // read size
*start = swordtoarch32(tmpStart);
*size = swordtoarch16(tmpSize);
if (len < 2) {
*size = (unsigned short)((*start) ? (textfp[testmt-1]->seek(0, SEEK_END) - (long)*start) : 0); // if for some reason we get an error reading size, make size to end of file
}
}
else {
*start = 0;
*size = 0;
}
}
/******************************************************************************
* RawVerse::readtext - gets text at a given offset
*
* ENT: testmt - testament file to search in (0 - Old; 1 - New)
* start - starting offset where the text is located in the file
* size - size of text entry + 2 (null)(null)
* buf - buffer to store text
*
*/
void RawVerse::readText(char testmt, long start, unsigned short size, SWBuf &buf) const {
buf = "";
buf.setFillByte(0);
buf.setSize(size + 1);
if (!testmt)
testmt = ((idxfp[1]) ? 1:2);
if (size) {
if (textfp[testmt-1]->getFd() >= 0) {
textfp[testmt-1]->seek(start, SEEK_SET);
textfp[testmt-1]->read(buf.getRawData(), (int)size);
}
}
}
/******************************************************************************
* RawVerse::settext - Sets text for current offset
*
* ENT: testmt - testament to find (0 - Bible/module introduction)
* idxoff - offset into .vss
* buf - buffer to store
* len - length of buffer (0 - null terminated)
*/
void RawVerse::doSetText(char testmt, long idxoff, const char *buf, long len)
{
__s32 start;
__u16 size;
idxoff *= 6;
if (!testmt)
testmt = ((idxfp[1]) ? 1:2);
size = (len < 0) ? strlen(buf) : len;
start = (__s32)textfp[testmt-1]->seek(0, SEEK_END);
idxfp[testmt-1]->seek(idxoff, SEEK_SET);
if (size) {
textfp[testmt-1]->seek(start, SEEK_SET);
textfp[testmt-1]->write(buf, (int)size);
// add a new line to make data file easier to read in an editor
textfp[testmt-1]->write(&nl, 1);
}
else {
start = 0;
}
start = archtosword32(start);
size = archtosword16(size);
idxfp[testmt-1]->write(&start, 4);
idxfp[testmt-1]->write(&size, 2);
}
/******************************************************************************
* RawVerse::linkentry - links one entry to another
*
* ENT: testmt - testament to find (0 - Bible/module introduction)<|fim▁hole|> */
void RawVerse::doLinkEntry(char testmt, long destidxoff, long srcidxoff) {
__s32 start;
__u16 size;
destidxoff *= 6;
srcidxoff *= 6;
if (!testmt)
testmt = ((idxfp[1]) ? 1:2);
// get source
idxfp[testmt-1]->seek(srcidxoff, SEEK_SET);
idxfp[testmt-1]->read(&start, 4);
idxfp[testmt-1]->read(&size, 2);
// write dest
idxfp[testmt-1]->seek(destidxoff, SEEK_SET);
idxfp[testmt-1]->write(&start, 4);
idxfp[testmt-1]->write(&size, 2);
}
/******************************************************************************
* RawVerse::createModule - Creates new module files
*
* ENT: path - directory to store module files
* RET: error status
*/
char RawVerse::createModule(const char *ipath, const char *v11n)
{
char *path = 0;
char *buf = new char [ strlen (ipath) + 20 ];
FileDesc *fd, *fd2;
stdstr(&path, ipath);
if ((path[strlen(path)-1] == '/') || (path[strlen(path)-1] == '\\'))
path[strlen(path)-1] = 0;
sprintf(buf, "%s/ot", path);
FileMgr::removeFile(buf);
fd = FileMgr::getSystemFileMgr()->open(buf, FileMgr::CREAT|FileMgr::WRONLY, FileMgr::IREAD|FileMgr::IWRITE);
fd->getFd();
FileMgr::getSystemFileMgr()->close(fd);
sprintf(buf, "%s/nt", path);
FileMgr::removeFile(buf);
fd = FileMgr::getSystemFileMgr()->open(buf, FileMgr::CREAT|FileMgr::WRONLY, FileMgr::IREAD|FileMgr::IWRITE);
fd->getFd();
FileMgr::getSystemFileMgr()->close(fd);
sprintf(buf, "%s/ot.vss", path);
FileMgr::removeFile(buf);
fd = FileMgr::getSystemFileMgr()->open(buf, FileMgr::CREAT|FileMgr::WRONLY, FileMgr::IREAD|FileMgr::IWRITE);
fd->getFd();
sprintf(buf, "%s/nt.vss", path);
FileMgr::removeFile(buf);
fd2 = FileMgr::getSystemFileMgr()->open(buf, FileMgr::CREAT|FileMgr::WRONLY, FileMgr::IREAD|FileMgr::IWRITE);
fd2->getFd();
VerseKey vk;
vk.setVersificationSystem(v11n);
vk.setIntros(1);
__s32 offset = 0;
__u16 size = 0;
offset = archtosword32(offset);
size = archtosword16(size);
for (vk = TOP; !vk.popError(); vk++) {
if (vk.getTestament() < 2) {
fd->write(&offset, 4);
fd->write(&size, 2);
}
else {
fd2->write(&offset, 4);
fd2->write(&size, 2);
}
}
fd2->write(&offset, 4);
fd2->write(&size, 2);
FileMgr::getSystemFileMgr()->close(fd);
FileMgr::getSystemFileMgr()->close(fd2);
delete [] path;
delete [] buf;
return 0;
}
SWORD_NAMESPACE_END<|fim▁end|> | * destidxoff - dest offset into .vss
* srcidxoff - source offset into .vss |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import * as walkSync from 'walk-sync';
import * as fs from 'fs';
import create from './offers';
import db from '../';
function mockup() {
const path = `${__dirname}`;
const paths = walkSync(`${path}`, { globs: ['*.json'] });
const promises = [];<|fim▁hole|> const json = fs.readFileSync(`${path}/${file}`, 'utf-8');
try {
const data = JSON.parse(json);
const model = file.replace('.json', '');
promises.push(db[model].bulkCreate(data).catch(err => console.log('Bulk create', err)));
} catch (err) {
console.log(`Error with ${file}`, err);
}
});
return Promise.all(promises);
}
function createAll() {
if (process.env.NODE_ENV !== 'test') return false;
return db['sequelize'].sync({ force: true })
.then(mockup)
.then(create);
}
createAll();<|fim▁end|> |
paths.forEach((file) => { |
<|file_name|>test_riak.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
'''Make sure the Riak client is sane'''
import unittest
from test import BaseTest
from simhash_db import Client
<|fim▁hole|>
class RiakTest(BaseTest, unittest.TestCase):
'''Test the Riak client'''
def make_client(self, name, num_blocks, num_bits):
return Client('riak', name, num_blocks, num_bits)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>driver.rs<|end_file_name|><|fim▁begin|>#[cfg(not(any(test, rustdoc)))]
use alloc::prelude::v1::*;
#[cfg(any(test, rustdoc))]
use std::prelude::v1::*;
use core::fmt::Write;
use crate::debugshell::{Command, DebugShell};
use crate::DRIVERS;
pub fn builtin_drivers(_sh: &mut DebugShell, _args: Vec<String>) -> String {
let mut out = String::new();
for _driver in DRIVERS.get().drivers.iter() {
let driver = _driver.get();
writeln!(out, " * {} [ID {}]", driver.name(), driver.driver_id()).unwrap();
}
out
}<|fim▁hole|> name: String::from("drivers"),
func: builtin_drivers,
});
}<|fim▁end|> |
pub fn register_builtins(sh: &mut DebugShell) {
sh.commands.push(Command { |
<|file_name|>ccp-IN.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
function plural(n: number): number {
return 5;
}
export default [
'ccp-IN',
[['AM', 'PM'], u, u],
u,
[
['𑄢𑄧', '𑄥𑄧', '𑄟𑄧', '𑄝𑄪', '𑄝𑄳𑄢𑄨', '𑄥𑄪', '𑄥𑄧'],
[
'𑄢𑄧𑄝𑄨', '𑄥𑄧𑄟𑄴', '𑄟𑄧𑄁𑄉𑄧𑄣𑄴', '𑄝𑄪𑄖𑄴',
'𑄝𑄳𑄢𑄨𑄥𑄪𑄛𑄴', '𑄥𑄪𑄇𑄴𑄇𑄮𑄢𑄴', '𑄥𑄧𑄚𑄨'
],
[
'𑄢𑄧𑄝𑄨𑄝𑄢𑄴', '𑄥𑄧𑄟𑄴𑄝𑄢𑄴',
'𑄟𑄧𑄁𑄉𑄧𑄣𑄴𑄝𑄢𑄴', '𑄝𑄪𑄖𑄴𑄝𑄢𑄴',
'𑄝𑄳𑄢𑄨𑄥𑄪𑄛𑄴𑄝𑄢𑄴',
'𑄥𑄪𑄇𑄴𑄇𑄮𑄢𑄴𑄝𑄢𑄴', '𑄥𑄧𑄚𑄨𑄝𑄢𑄴'
],
[
'𑄢𑄧𑄝𑄨', '𑄥𑄧𑄟𑄴', '𑄟𑄧𑄁𑄉𑄧𑄣𑄴', '𑄝𑄪𑄖𑄴',
'𑄝𑄳𑄢𑄨𑄥𑄪𑄛𑄴', '𑄥𑄪𑄇𑄴𑄇𑄮𑄢𑄴', '𑄥𑄧𑄚𑄨'
]
],
u,
[
[
'𑄎', '𑄜𑄬', '𑄟', '𑄃𑄬', '𑄟𑄬', '𑄎𑄪𑄚𑄴', '𑄎𑄪', '𑄃',<|fim▁hole|> '𑄥𑄬', '𑄃𑄧', '𑄚𑄧', '𑄓𑄨'
],
[
'𑄎𑄚𑄪', '𑄜𑄬𑄛𑄴', '𑄟𑄢𑄴𑄌𑄧',
'𑄃𑄬𑄛𑄳𑄢𑄨𑄣𑄴', '𑄟𑄬', '𑄎𑄪𑄚𑄴', '𑄎𑄪𑄣𑄭',
'𑄃𑄉𑄧𑄌𑄴𑄑𑄴', '𑄥𑄬𑄛𑄴𑄑𑄬𑄟𑄴𑄝𑄧𑄢𑄴',
'𑄃𑄧𑄇𑄴𑄑𑄮𑄝𑄧𑄢𑄴', '𑄚𑄧𑄞𑄬𑄟𑄴𑄝𑄧𑄢𑄴',
'𑄓𑄨𑄥𑄬𑄟𑄴𑄝𑄢𑄴'
],
[
'𑄎𑄚𑄪𑄠𑄢𑄨', '𑄜𑄬𑄛𑄴𑄝𑄳𑄢𑄪𑄠𑄢𑄨',
'𑄟𑄢𑄴𑄌𑄧', '𑄃𑄬𑄛𑄳𑄢𑄨𑄣𑄴', '𑄟𑄬', '𑄎𑄪𑄚𑄴',
'𑄎𑄪𑄣𑄭', '𑄃𑄉𑄧𑄌𑄴𑄑𑄴',
'𑄥𑄬𑄛𑄴𑄑𑄬𑄟𑄴𑄝𑄧𑄢𑄴',
'𑄃𑄧𑄇𑄴𑄑𑄬𑄝𑄧𑄢𑄴', '𑄚𑄧𑄞𑄬𑄟𑄴𑄝𑄧𑄢𑄴',
'𑄓𑄨𑄥𑄬𑄟𑄴𑄝𑄧𑄢𑄴'
]
],
[
[
'𑄎', '𑄜𑄬', '𑄟', '𑄃𑄬', '𑄟𑄬', '𑄎𑄪𑄚𑄴', '𑄎𑄪', '𑄃',
'𑄥𑄬', '𑄃𑄧', '𑄚𑄧', '𑄓𑄨'
],
[
'𑄎𑄚𑄪𑄠𑄢𑄨', '𑄜𑄬𑄛𑄴𑄝𑄳𑄢𑄪𑄠𑄢𑄨',
'𑄟𑄢𑄴𑄌𑄧', '𑄃𑄬𑄛𑄳𑄢𑄨𑄣𑄴', '𑄟𑄬', '𑄎𑄪𑄚𑄴',
'𑄎𑄪𑄣𑄭', '𑄃𑄉𑄧𑄌𑄴𑄑𑄴',
'𑄥𑄬𑄛𑄴𑄑𑄬𑄟𑄴𑄝𑄧𑄢𑄴',
'𑄃𑄧𑄇𑄴𑄑𑄮𑄝𑄧𑄢𑄴', '𑄚𑄧𑄞𑄬𑄟𑄴𑄝𑄧𑄢𑄴',
'𑄓𑄨𑄥𑄬𑄟𑄴𑄝𑄧𑄢𑄴'
],
u
],
[
[
'𑄈𑄳𑄢𑄨𑄌𑄴𑄑𑄴𑄛𑄫𑄢𑄴𑄝𑄧',
'𑄈𑄳𑄢𑄨𑄌𑄴𑄑𑄛𑄴𑄘𑄧'
],
u, u
],
0,
[0, 0],
['d/M/yy', 'd MMM, y', 'd MMMM, y', 'EEEE, d MMMM, y'],
['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'],
['{1} {0}', u, u, u],
['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##,##0.###', '#,##,##0%', '#,##,##0.00¤', '#E0'],
'₹',
'𑄃𑄨𑄚𑄴𑄘𑄨𑄠𑄚𑄴 𑄢𑄪𑄛𑄨',
{
'BDT': ['৳'],
'JPY': ['JP¥', '¥'],
'STD': [u, 'Db'],
'THB': ['฿'],
'TWD': ['NT$'],
'USD': ['US$', '$']
},
'ltr',
plural
];<|fim▁end|> | |
<|file_name|>vec.rs<|end_file_name|><|fim▁begin|>#[no_std];
#[no_core];
use zero;
pub trait OwnedVector<T> {
unsafe fn push_fast(&mut self, t: T);
unsafe fn len(&self) -> uint;
unsafe fn set_len(&mut self, newlen: uint);
unsafe fn as_mut_buf<U>(&self, f: &fn(*mut T, uint) -> U) -> U;
unsafe fn data(&self) -> *u8;
}
pub struct Vec<T> {
fill: uint,
alloc: uint,
data: T
}
impl<T> OwnedVector<T> for ~[T] {
//FIXME: Does not check to see if we have space
// See: https://github.com/mozilla/rust/blob/master/src/libstd/vec.rs#L1317
#[inline]
unsafe fn push_fast(&mut self, t: T) {
let repr: **mut Vec<u8> = zero::transmute(self);
let fill = (**repr).fill;
(**repr).fill += zero::size_of::<T>();
let p = &(**repr).data as *u8 as uint;
let mut i = 0;
while i < zero::size_of::<T>() {
*((p+fill+i) as *mut u8) = *((&t as *T as uint + i) as *mut u8);
i += 1;
}
}
unsafe fn len(&self) -> uint {
let repr: **Vec<u8> = zero::transmute(self);
((**repr).fill / zero::size_of::<T>()) as uint
}
unsafe fn set_len(&mut self, newlen: uint) {
let repr: **mut Vec<u8> = zero::transmute(self);<|fim▁hole|>
unsafe fn as_mut_buf<U>(&self, f: &fn(*mut T, uint) -> U) -> U {
let repr: **mut Vec<T> = zero::transmute(self);
f(&mut (**repr).data as *mut T, (**repr).fill / zero::size_of::<T>())
}
unsafe fn data(&self) -> *u8 {
let repr: **mut Vec<u8> = zero::transmute(self);
&(**repr).data as *u8
}
}<|fim▁end|> | (**repr).fill = zero::size_of::<T>() * newlen;
} |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod pool;
pub mod state;
pub mod prelude {<|fim▁hole|><|fim▁end|> | pub use super::pool::{ResourceLoader, ResourcePool};
pub use super::state::ResourceState;
} |
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use State;
use GLOBALSINDEX;
use Type;
use raw;
use libc;
use std::task;
use std::any::AnyRefExt;
#[test]
fn test_state_init() {
let mut _s = State::new();
}
#[test]
#[should_fail]
fn test_error() {
let mut s = State::new();
s.pushinteger(42);
s.error()
}
#[test]
fn test_errorstr() {
let res = task::try::<()>(proc() {
let mut s = State::new();
s.errorstr("some err");
});
let err = res.unwrap_err();
let expected = "unprotected error in call to Lua API (some err)";
let s = err.as_ref::<String>();
if s.is_some() {
assert_eq!(s.unwrap().as_slice(), expected);
} else {
let s = err.as_ref::<&'static str>();
if s.is_some() {
assert_eq!(*s.unwrap(), expected);
} else {
fail!("unexpected failure result");
}
}
}
#[test]
fn test_describe() {
let mut s = State::new();
assert_eq!(s.typename(1), "no value");
s.pushnil();
assert_eq!(s.typename(-1), "nil");
s.pushinteger(42);
assert_eq!(s.typename(-1), "number");
s.pushstring("test");
assert_eq!(s.typename(-1), "string");
s.pushboolean(true);
assert_eq!(s.typename(-1), "boolean");
s.pushcfunction(dummy);
assert_eq!(s.typename(-1), "function");
extern "C" fn dummy(_L: *mut ::raw::lua_State) -> ::libc::c_int {
0
}
}
#[test]
fn test_openlibs() {
let mut s = State::new();
s.openlibs();
s.getfield(GLOBALSINDEX, "table");
assert_eq!(s.type_(-1), Some(Type::Table));
}
#[deriving(PartialEq,Eq,Show)]
enum CheckOptionEnum {
COEOne,
COETwo,
COEThree
}
#[test]
fn test_checkoption() {
let lst = [("one", COEOne), ("two", COETwo), ("three", COEThree)];
let mut s = State::new();
for &(k,ref v) in lst.iter() {
s.pushstring(k);
assert_eq!(*s.checkoption(1, None, lst), *v);
s.pop(1);
}
assert_eq!(*s.checkoption(1, Some("three"), lst), COEThree);
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, None, lst);
});
assert!(res.is_err(), "expected error from checkoption");
<|fim▁hole|> let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, Some("four"), lst);
});
assert!(res.is_err(), "expected error from checkoption");
}
#[test]
fn test_tocfunction() {
let mut s = State::new();
// extern "C" fns don't implement Eq, so cast them to a pointer instead
s.pushstring("foo");
assert_eq!(s.tocfunction(1).map(|f| f as *()), None);
s.pushcfunction(cfunc);
assert_eq!(s.tocfunction(2).map(|f| f as *()), Some(cfunc as *()));
extern "C" fn cfunc(_L: *mut raw::lua_State) -> libc::c_int { 0 }
}
#[test]
fn test_gsub() {
// do some pretty basic gsub tests
let mut L = State::new();
assert_eq!(L.gsub("foobar", "bar", "quux"), "fooquux");
assert_eq!(L.gsub("foo", "o", "ö"), "föö");
assert_eq!(L.gsub("test", "a", "b"), "test");
assert_eq!(L.gsub("a b c d e", " ", "."), "a.b.c.d.e");
}<|fim▁end|> | |
<|file_name|>tenant.py<|end_file_name|><|fim▁begin|># Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from connector import channel
from google3.cloud.graphite.mmv2.services.google.identity_toolkit import tenant_pb2
from google3.cloud.graphite.mmv2.services.google.identity_toolkit import tenant_pb2_grpc
from typing import List
class Tenant(object):
def __init__(
self,
name: str = None,
display_name: str = None,
allow_password_signup: bool = None,<|fim▁hole|> mfa_config: dict = None,
test_phone_numbers: dict = None,
project: str = None,
service_account_file: str = "",
):
channel.initialize()
self.name = name
self.display_name = display_name
self.allow_password_signup = allow_password_signup
self.enable_email_link_signin = enable_email_link_signin
self.disable_auth = disable_auth
self.enable_anonymous_user = enable_anonymous_user
self.mfa_config = mfa_config
self.test_phone_numbers = test_phone_numbers
self.project = project
self.service_account_file = service_account_file
def apply(self):
stub = tenant_pb2_grpc.IdentitytoolkitBetaTenantServiceStub(channel.Channel())
request = tenant_pb2.ApplyIdentitytoolkitBetaTenantRequest()
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
request.resource.display_name = Primitive.to_proto(self.display_name)
if Primitive.to_proto(self.allow_password_signup):
request.resource.allow_password_signup = Primitive.to_proto(
self.allow_password_signup
)
if Primitive.to_proto(self.enable_email_link_signin):
request.resource.enable_email_link_signin = Primitive.to_proto(
self.enable_email_link_signin
)
if Primitive.to_proto(self.disable_auth):
request.resource.disable_auth = Primitive.to_proto(self.disable_auth)
if Primitive.to_proto(self.enable_anonymous_user):
request.resource.enable_anonymous_user = Primitive.to_proto(
self.enable_anonymous_user
)
if TenantMfaConfig.to_proto(self.mfa_config):
request.resource.mfa_config.CopyFrom(
TenantMfaConfig.to_proto(self.mfa_config)
)
else:
request.resource.ClearField("mfa_config")
if Primitive.to_proto(self.test_phone_numbers):
request.resource.test_phone_numbers = Primitive.to_proto(
self.test_phone_numbers
)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
request.service_account_file = self.service_account_file
response = stub.ApplyIdentitytoolkitBetaTenant(request)
self.name = Primitive.from_proto(response.name)
self.display_name = Primitive.from_proto(response.display_name)
self.allow_password_signup = Primitive.from_proto(
response.allow_password_signup
)
self.enable_email_link_signin = Primitive.from_proto(
response.enable_email_link_signin
)
self.disable_auth = Primitive.from_proto(response.disable_auth)
self.enable_anonymous_user = Primitive.from_proto(
response.enable_anonymous_user
)
self.mfa_config = TenantMfaConfig.from_proto(response.mfa_config)
self.test_phone_numbers = Primitive.from_proto(response.test_phone_numbers)
self.project = Primitive.from_proto(response.project)
def delete(self):
stub = tenant_pb2_grpc.IdentitytoolkitBetaTenantServiceStub(channel.Channel())
request = tenant_pb2.DeleteIdentitytoolkitBetaTenantRequest()
request.service_account_file = self.service_account_file
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
request.resource.display_name = Primitive.to_proto(self.display_name)
if Primitive.to_proto(self.allow_password_signup):
request.resource.allow_password_signup = Primitive.to_proto(
self.allow_password_signup
)
if Primitive.to_proto(self.enable_email_link_signin):
request.resource.enable_email_link_signin = Primitive.to_proto(
self.enable_email_link_signin
)
if Primitive.to_proto(self.disable_auth):
request.resource.disable_auth = Primitive.to_proto(self.disable_auth)
if Primitive.to_proto(self.enable_anonymous_user):
request.resource.enable_anonymous_user = Primitive.to_proto(
self.enable_anonymous_user
)
if TenantMfaConfig.to_proto(self.mfa_config):
request.resource.mfa_config.CopyFrom(
TenantMfaConfig.to_proto(self.mfa_config)
)
else:
request.resource.ClearField("mfa_config")
if Primitive.to_proto(self.test_phone_numbers):
request.resource.test_phone_numbers = Primitive.to_proto(
self.test_phone_numbers
)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
response = stub.DeleteIdentitytoolkitBetaTenant(request)
@classmethod
def list(self, project, service_account_file=""):
stub = tenant_pb2_grpc.IdentitytoolkitBetaTenantServiceStub(channel.Channel())
request = tenant_pb2.ListIdentitytoolkitBetaTenantRequest()
request.service_account_file = service_account_file
request.Project = project
return stub.ListIdentitytoolkitBetaTenant(request).items
def to_proto(self):
resource = tenant_pb2.IdentitytoolkitBetaTenant()
if Primitive.to_proto(self.name):
resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
resource.display_name = Primitive.to_proto(self.display_name)
if Primitive.to_proto(self.allow_password_signup):
resource.allow_password_signup = Primitive.to_proto(
self.allow_password_signup
)
if Primitive.to_proto(self.enable_email_link_signin):
resource.enable_email_link_signin = Primitive.to_proto(
self.enable_email_link_signin
)
if Primitive.to_proto(self.disable_auth):
resource.disable_auth = Primitive.to_proto(self.disable_auth)
if Primitive.to_proto(self.enable_anonymous_user):
resource.enable_anonymous_user = Primitive.to_proto(
self.enable_anonymous_user
)
if TenantMfaConfig.to_proto(self.mfa_config):
resource.mfa_config.CopyFrom(TenantMfaConfig.to_proto(self.mfa_config))
else:
resource.ClearField("mfa_config")
if Primitive.to_proto(self.test_phone_numbers):
resource.test_phone_numbers = Primitive.to_proto(self.test_phone_numbers)
if Primitive.to_proto(self.project):
resource.project = Primitive.to_proto(self.project)
return resource
class TenantMfaConfig(object):
def __init__(self, state: str = None, enabled_providers: list = None):
self.state = state
self.enabled_providers = enabled_providers
@classmethod
def to_proto(self, resource):
if not resource:
return None
res = tenant_pb2.IdentitytoolkitBetaTenantMfaConfig()
if TenantMfaConfigStateEnum.to_proto(resource.state):
res.state = TenantMfaConfigStateEnum.to_proto(resource.state)
if TenantMfaConfigEnabledProvidersEnumArray.to_proto(
resource.enabled_providers
):
res.enabled_providers.extend(
TenantMfaConfigEnabledProvidersEnumArray.to_proto(
resource.enabled_providers
)
)
return res
@classmethod
def from_proto(self, resource):
if not resource:
return None
return TenantMfaConfig(
state=TenantMfaConfigStateEnum.from_proto(resource.state),
enabled_providers=TenantMfaConfigEnabledProvidersEnumArray.from_proto(
resource.enabled_providers
),
)
class TenantMfaConfigArray(object):
@classmethod
def to_proto(self, resources):
if not resources:
return resources
return [TenantMfaConfig.to_proto(i) for i in resources]
@classmethod
def from_proto(self, resources):
return [TenantMfaConfig.from_proto(i) for i in resources]
class TenantMfaConfigStateEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigStateEnum.Value(
"IdentitytoolkitBetaTenantMfaConfigStateEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigStateEnum.Name(resource)[
len("IdentitytoolkitBetaTenantMfaConfigStateEnum") :
]
class TenantMfaConfigEnabledProvidersEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum.Value(
"IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum.Name(
resource
)[len("IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum") :]
class Primitive(object):
@classmethod
def to_proto(self, s):
if not s:
return ""
return s
@classmethod
def from_proto(self, s):
return s<|fim▁end|> | enable_email_link_signin: bool = None,
disable_auth: bool = None,
enable_anonymous_user: bool = None, |
<|file_name|>Devanagari-Extended-regex.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | // Regular expression that matches all symbols in the Devanagari Extended block as per Unicode v6.0.0:
/[\uA8E0-\uA8FF]/; |
<|file_name|>Leverman.java<|end_file_name|><|fim▁begin|>package de.lman;
import de.lman.engine.Colors;
import de.lman.engine.Game;
import de.lman.engine.InputState;
import de.lman.engine.Keys;
import de.lman.engine.Mouse;
import de.lman.engine.math.Mat2f;
import de.lman.engine.math.Scalar;
import de.lman.engine.math.Transform;
import de.lman.engine.math.Vec2f;
import de.lman.engine.physics.Body;
import de.lman.engine.physics.ContactStatePair;
import de.lman.engine.physics.GeometryUtils;
import de.lman.engine.physics.Physics;
import de.lman.engine.physics.contacts.Contact;
import de.lman.engine.physics.contacts.ContactListener;
import de.lman.engine.physics.shapes.BoxShape;
import de.lman.engine.physics.shapes.EdgeShape;
import de.lman.engine.physics.shapes.PhysicsMaterial;
import de.lman.engine.physics.shapes.PlaneShape;
import de.lman.engine.physics.shapes.PolygonShape;
import de.lman.engine.physics.shapes.Shape;
/*
Probleme lösen:
- Linien-Zeichnen korrigieren
Aufgaben:
- Tastatureingaben robuster machen (ist gedrückt, war gedrückt)
- Bitmap laden, konvertieren und zeichnen
- Bitmap transformiert zeichnen (Rotation)
- Bitmap fonts generieren
- Bitmap fonts zeichnen
- Bitmap Bilinär filtern
- Debug Informationen
- Fps / Framedauer anzeigen
- Zeitmessungen
- Speichern für mehrere Frames
- Visualisierung (Bar, Line)
- UI
- Panel
- Button
- Label
- Checkbox
- Radiobutton
- Sensor-Flag für Shape
- ECS
- Integrierter-Level-Editor
- Skalierung
- Propertionales vergrößern von Seiten
- Verschiebung von Eckpunkten
- Löschen
- Kopieren / Einfügen
- Gitter-Snap
- Mehrere Shapetypen + Auswahlmöglichkeit:
- Kreis
- Linien-Segment
- Polygone
- Boxen
- Ebenen
- Laden / Speichern
- Körper & Formen serialisieren und deserialisieren (JSON)
- Rotationsdynamik:
- Kreuzprodukt
- updateAABB in Body robuster machen
- getSupportPoints vereinfachen / robuster machen
- Massenzentrum (COM)
- Traegheitsmoment (Inertia, AngularVelocity)
- Kontaktausschnitt
- Asset-Management
- Preloading
- Erstellung von Kontakt-Szenarien vereinfachen -> offset()
*/
public class Leverman extends Game implements ContactListener {
public Leverman() {
super("Leverman");
}
public static void main(String[] args) {
Leverman game = new Leverman();
game.run();
}
private Physics physics;
private boolean showContacts = true;
private boolean showAABBs = false;
private boolean physicsSingleStep = false;
public final static PhysicsMaterial MAT_STATIC = new PhysicsMaterial(0f, 0.1f);
public final static PhysicsMaterial MAT_DYNAMIC = new PhysicsMaterial(1f, 0.1f);
private Body playerBody;
private boolean playerOnGround = false;
private boolean playerJumping = false;
private int playerGroundHash = 0;
private final Vec2f groundNormal = new Vec2f(0, 1);
@Override
public void physicsBeginContact(int hash, ContactStatePair pair) {
Contact contact = pair.pair.contacts[pair.contactIndex];
Vec2f normal = contact.normal;
Body player = null;
if (pair.pair.a.id == playerBody.id) {
player = pair.pair.a;
normal = new Vec2f(contact.normal).invert();
} else if (pair.pair.b.id == playerBody.id) {
player = pair.pair.b;
}
float d = normal.dot(groundNormal);
if (d > 0) {
if (player != null && (!playerOnGround)) {
playerOnGround = true;
playerGroundHash = hash;
}
}
}
@Override
public void physicsEndContact(int hash, ContactStatePair pair) {
Contact contact = pair.pair.contacts[pair.contactIndex];
Vec2f normal = contact.normal;
Body player = null;
if (pair.pair.a.id == playerBody.id) {
player = pair.pair.a;
normal = new Vec2f(contact.normal).invert();
} else if (pair.pair.b.id == playerBody.id) {
player = pair.pair.b;
}
float d = normal.dot(groundNormal);
if (d > 0) {
if (player != null && playerOnGround && (playerGroundHash == hash)) {
playerOnGround = false;
playerGroundHash = 0;
}
}
}
private void addPlatform(float x, float y, float rx, float ry) {
Body body;
physics.addBody(body = new Body().addShape(new BoxShape(new Vec2f(rx, ry)).setMaterial(MAT_STATIC)));
body.pos.set(x, y);
}
private void addBox(float x, float y, float rx, float ry) {
Body body;
physics.addBody(body = new Body().addShape(new BoxShape(new Vec2f(rx, ry)).setMaterial(MAT_DYNAMIC)));
body.pos.set(x, y);
}
protected void initGame() {
physics = new Physics(this);
physics.enableSingleStepMode(physicsSingleStep);
Body body;
physics.addBody(body = new Body().addShape(new PlaneShape(viewport.y).rotation(Scalar.PI * 0f).setMaterial(MAT_STATIC)));
body.pos.set(-halfWidth + 0.5f, 0);
physics.addBody(body = new Body().addShape(new PlaneShape(viewport.y).rotation(Scalar.PI * 1f).setMaterial(MAT_STATIC)));
body.pos.set(halfWidth - 0.5f, 0);
physics.addBody(body = new Body().addShape(new PlaneShape(viewport.x).rotation(Scalar.PI * 0.5f).setMaterial(MAT_STATIC)));
body.pos.set(0, -halfHeight + 0.5f);
physics.addBody(body = new Body().addShape(new PlaneShape(viewport.x).rotation(Scalar.PI * 1.5f).setMaterial(MAT_STATIC)));
body.pos.set(0, halfHeight - 0.5f);
addPlatform(0 - 2.9f, 0 - 2.0f, 0.6f, 0.1f);
addPlatform(0, 0 - 1.3f, 0.7f, 0.1f);
addPlatform(0 + 2.9f, 0 - 0.8f, 0.6f, 0.1f);
//addBox(0, -0.5f, 0.2f, 0.2f);
addPlatform(0 + 2.0f, -halfHeight + 0.2f + 0.5f, 0.2f, 0.2f);
addPlatform(0 + 2.4f, -halfHeight + 0.2f + 0.5f, 0.2f, 0.2f);
addPlatform(0 + 2.8f, -halfHeight + 0.2f + 0.5f, 0.2f, 0.2f);
addPlatform(0 + 3.2f, -halfHeight + 0.2f + 0.5f, 0.2f, 0.2f);
playerBody = new Body();
BoxShape playerBox = (BoxShape) new BoxShape(new Vec2f(0.2f, 0.4f)).setMaterial(MAT_DYNAMIC);
playerBody.addShape(playerBox);
playerBody.pos.set(0, -halfHeight + playerBox.radius.y + 0.5f);
physics.addBody(playerBody);
Vec2f[] polyVerts = new Vec2f[]{
new Vec2f(0, 0.5f),
new Vec2f(-0.5f, -0.5f),
new Vec2f(0.5f, -0.5f),
};
physics.addBody(body = new Body().addShape(new PolygonShape(polyVerts).setMaterial(MAT_STATIC)));
body.pos.set(0, 0);
}
<|fim▁hole|> private Vec2f dragStart = new Vec2f();
private Body dragBody = null;
private void updateGameInput(float dt, InputState inputState) {
boolean leftMousePressed = inputState.isMouseDown(Mouse.LEFT);
if (!dragging) {
if (leftMousePressed) {
dragBody = null;
for (int i = 0; i < physics.numBodies; i++) {
Body body = physics.bodies[i];
if (body.invMass > 0) {
if (GeometryUtils.isPointInAABB(inputState.mousePos.x, inputState.mousePos.y, body.aabb)) {
dragging = true;
dragStart.set(inputState.mousePos);
dragBody = body;
break;
}
}
}
}
} else {
if (leftMousePressed) {
float dx = inputState.mousePos.x - dragStart.x;
float dy = inputState.mousePos.y - dragStart.y;
dragBody.vel.x += dx * 0.1f;
dragBody.vel.y += dy * 0.1f;
dragStart.set(inputState.mousePos);
} else {
dragging = false;
}
}
// Kontakte ein/ausschalten
if (inputState.isKeyDown(Keys.F2)) {
showContacts = !showContacts;
inputState.setKeyDown(Keys.F2, false);
}
// AABBs ein/ausschalten
if (inputState.isKeyDown(Keys.F3)) {
showAABBs = !showAABBs;
inputState.setKeyDown(Keys.F3, false);
}
// Einzelschritt-Physik-Modus ein/ausschalten
if (inputState.isKeyDown(Keys.F5)) {
physicsSingleStep = !physicsSingleStep;
physics.enableSingleStepMode(physicsSingleStep);
inputState.setKeyDown(Keys.F5, false);
}
if (inputState.isKeyDown(Keys.F6)) {
if (physicsSingleStep) {
physics.nextStep();
}
inputState.setKeyDown(Keys.F6, false);
}
// Player bewegen
if (inputState.isKeyDown(Keys.W)) {
if (!playerJumping && playerOnGround) {
playerBody.acc.y += 4f / dt;
playerJumping = true;
}
} else {
if (playerJumping && playerOnGround) {
playerJumping = false;
}
}
if (inputState.isKeyDown(Keys.A)) {
playerBody.acc.x -= 0.1f / dt;
} else if (inputState.isKeyDown(Keys.D)) {
playerBody.acc.x += 0.1f / dt;
}
}
private final Editor editor = new Editor();
private boolean editorWasShownAABB = false;
protected void updateInput(float dt, InputState input) {
// Editormodus ein/ausschalten
if (input.isKeyDown(Keys.F4)) {
editor.active = !editor.active;
if (editor.active) {
editorWasShownAABB = showAABBs;
showAABBs = true;
editor.init(physics);
} else {
showAABBs = editorWasShownAABB;
}
input.setKeyDown(Keys.F4, false);
}
if (editor.active) {
editor.updateInput(dt, physics, input);
} else {
updateGameInput(dt, input);
}
}
@Override
protected String getAdditionalTitle() {
return String.format(" [Frames: %d, Bodies: %d, Contacts: %d]", numFrames, physics.numBodies, physics.numContacts);
}
protected void updateGame(float dt) {
if (!editor.active) {
physics.step(dt);
}
}
private void renderEditor(float dt) {
// TODO: Move to editor class
clear(0x000000);
for (int i = 0; i < viewport.x / Editor.GRID_SIZE; i++) {
drawLine(-halfWidth + i * Editor.GRID_SIZE, -halfHeight, -halfWidth + i * Editor.GRID_SIZE, halfHeight, Colors.DarkSlateGray);
}
for (int i = 0; i < viewport.y / Editor.GRID_SIZE; i++) {
drawLine(-halfWidth, -halfHeight + i * Editor.GRID_SIZE, halfWidth, -halfHeight + i * Editor.GRID_SIZE, Colors.DarkSlateGray);
}
drawBodies(physics.numBodies, physics.bodies);
if (editor.selectedBody != null) {
Editor.DragSide[] dragSides = editor.getDragSides(editor.selectedBody);
Shape shape = editor.selectedBody.shapes[0];
if (dragSides.length > 0 && shape instanceof EdgeShape) {
EdgeShape edgeShape = (EdgeShape) shape;
Transform t = new Transform(shape.localPos, shape.localRotation).offset(editor.selectedBody.pos);
Vec2f[] localVertices = edgeShape.getLocalVertices();
for (int i = 0; i < dragSides.length; i++) {
Vec2f dragPoint = dragSides[i].center;
drawPoint(dragPoint, Editor.DRAGPOINT_RADIUS, Colors.White);
if (editor.resizeSideIndex == i) {
Vec2f v0 = new Vec2f(localVertices[dragSides[i].index0]).transform(t);
Vec2f v1 = new Vec2f(localVertices[dragSides[i].index1]).transform(t);
drawPoint(v0, Editor.DRAGPOINT_RADIUS, Colors.GoldenRod);
drawPoint(v1, Editor.DRAGPOINT_RADIUS, Colors.GoldenRod);
}
}
}
Mat2f mat = new Mat2f(shape.localRotation).transpose();
drawNormal(editor.selectedBody.pos, mat.col1, DEFAULT_ARROW_RADIUS, DEFAULT_ARROW_LENGTH, Colors.Red);
}
drawPoint(inputState.mousePos.x, inputState.mousePos.y, DEFAULT_POINT_RADIUS, 0x0000FF);
}
private void renderInternalGame(float dt) {
clear(0x000000);
drawBodies(physics.numBodies, physics.bodies);
if (showAABBs) {
drawAABBs(physics.numBodies, physics.bodies);
}
if (showContacts) {
drawContacts(dt, physics.numPairs, physics.pairs, false, true);
}
}
protected void renderGame(float dt) {
if (editor.active) {
renderEditor(dt);
} else {
renderInternalGame(dt);
}
}
}<|fim▁end|> | private boolean dragging = false; |
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>use chrono::prelude::*;
use serde::{Deserialize, Serialize};
// use eyre::{
// // eyre,
// Result,
// // Context as _,
// };
use printspool_json_store::{ Record };
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Server {
pub id: crate::DbId,
pub version: i32,
pub created_at: DateTime<Utc>,
pub deleted_at: Option<DateTime<Utc>>,
// Foreign Keys
// Props
pub name: String,
/// True if the server row is the self-reference to this instance
pub is_self: bool,
}
#[async_trait::async_trait]
impl Record for Server {
const TABLE: &'static str = "servers";
fn id(&self) -> &crate::DbId {
&self.id
}
<|fim▁hole|> }
fn version_mut(&mut self) -> &mut printspool_json_store::Version {
&mut self.version
}
fn created_at(&self) -> DateTime<Utc> {
self.created_at
}
fn deleted_at(&self) -> Option<DateTime<Utc>> {
self.deleted_at
}
fn deleted_at_mut(&mut self) -> &mut Option<DateTime<Utc>> {
&mut self.deleted_at
}
}<|fim▁end|> | fn version(&self) -> printspool_json_store::Version {
self.version |
<|file_name|>test_quota_utils.py<|end_file_name|><|fim▁begin|># Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinder import context
from cinder import exception
from cinder import quota_utils
from cinder import test
from keystoneclient import exceptions
from oslo_config import cfg
from oslo_config import fixture as config_fixture
CONF = cfg.CONF
class QuotaUtilsTest(test.TestCase):
class FakeProject(object):
def __init__(self, id='foo', parent_id=None):
self.id = id
self.parent_id = parent_id
self.subtree = None
self.parents = None
self.domain_id = 'default'
def setUp(self):
super(QuotaUtilsTest, self).setUp()
self.auth_url = 'http://localhost:5000'
self.context = context.RequestContext('fake_user', 'fake_proj_id')
self.fixture = self.useFixture(config_fixture.Config(CONF))
self.fixture.config(auth_uri=self.auth_url, group='keystone_authtoken')
@mock.patch('keystoneclient.client.Client')
@mock.patch('keystoneclient.session.Session')
def test_keystone_client_instantiation(self, ksclient_session,
ksclient_class):
quota_utils._keystone_client(self.context)
ksclient_class.assert_called_once_with(auth_url=self.auth_url,
session=ksclient_session(),
version=(3, 0))
@mock.patch('keystoneclient.client.Client')
def test_get_project_keystoneclient_v2(self, ksclient_class):
keystoneclient = ksclient_class.return_value
keystoneclient.version = 'v2.0'
expected_project = quota_utils.GenericProjectInfo(
self.context.project_id, 'v2.0')
project = quota_utils.get_project_hierarchy(
self.context, self.context.project_id)
self.assertEqual(expected_project.__dict__, project.__dict__)
@mock.patch('keystoneclient.client.Client')
def test_get_project_keystoneclient_v3(self, ksclient_class):
keystoneclient = ksclient_class.return_value
keystoneclient.version = 'v3'
returned_project = self.FakeProject(self.context.project_id, 'bar')
del returned_project.subtree
keystoneclient.projects.get.return_value = returned_project
expected_project = quota_utils.GenericProjectInfo(
self.context.project_id, 'v3', 'bar')
project = quota_utils.get_project_hierarchy(
self.context, self.context.project_id)
self.assertEqual(expected_project.__dict__, project.__dict__)
@mock.patch('keystoneclient.client.Client')
def test_get_project_keystoneclient_v3_with_subtree(self, ksclient_class):
keystoneclient = ksclient_class.return_value
keystoneclient.version = 'v3'
returned_project = self.FakeProject(self.context.project_id, 'bar')
subtree_dict = {'baz': {'quux': None}}
returned_project.subtree = subtree_dict
keystoneclient.projects.get.return_value = returned_project
expected_project = quota_utils.GenericProjectInfo(
self.context.project_id, 'v3', 'bar', subtree_dict)
project = quota_utils.get_project_hierarchy(
self.context, self.context.project_id, subtree_as_ids=True)
keystoneclient.projects.get.assert_called_once_with(
self.context.project_id, parents_as_ids=False, subtree_as_ids=True)
self.assertEqual(expected_project.__dict__, project.__dict__)
def _setup_mock_ksclient(self, mock_client, version='v3',
subtree=None, parents=None):
keystoneclient = mock_client.return_value
keystoneclient.version = version
proj = self.FakeProject(self.context.project_id)
proj.subtree = subtree
if parents:
proj.parents = parents
proj.parent_id = next(iter(parents.keys()))
keystoneclient.projects.get.return_value = proj
@mock.patch('keystoneclient.client.Client')
def test__filter_domain_id_from_parents_domain_as_parent(
self, mock_client):
# Test with a top level project (domain is direct parent)
self._setup_mock_ksclient(mock_client, parents={'default': None})
project = quota_utils.get_project_hierarchy(
self.context, self.context.project_id, parents_as_ids=True)
self.assertIsNone(project.parent_id)
self.assertIsNone(project.parents)<|fim▁hole|> def test__filter_domain_id_from_parents_domain_as_grandparent(
self, mock_client):
# Test with a child project (domain is more than a parent)
self._setup_mock_ksclient(mock_client,
parents={'bar': {'default': None}})
project = quota_utils.get_project_hierarchy(
self.context, self.context.project_id, parents_as_ids=True)
self.assertEqual('bar', project.parent_id)
self.assertEqual({'bar': None}, project.parents)
@mock.patch('keystoneclient.client.Client')
def test__filter_domain_id_from_parents_no_domain_in_parents(
self, mock_client):
# Test that if top most parent is not a domain (to simulate an older
# keystone version) nothing gets removed from the tree
parents = {'bar': {'foo': None}}
self._setup_mock_ksclient(mock_client, parents=parents)
project = quota_utils.get_project_hierarchy(
self.context, self.context.project_id, parents_as_ids=True)
self.assertEqual('bar', project.parent_id)
self.assertEqual(parents, project.parents)
@mock.patch('keystoneclient.client.Client')
def test__filter_domain_id_from_parents_no_parents(
self, mock_client):
# Test that if top no parents are present (to simulate an older
# keystone version) things don't blow up
self._setup_mock_ksclient(mock_client)
project = quota_utils.get_project_hierarchy(
self.context, self.context.project_id, parents_as_ids=True)
self.assertIsNone(project.parent_id)
self.assertIsNone(project.parents)
@mock.patch('cinder.quota_utils._keystone_client')
def test_validate_nested_projects_with_keystone_v2(self, _keystone_client):
_keystone_client.side_effect = exceptions.VersionNotAvailable
self.assertRaises(exception.CinderException,
quota_utils.validate_setup_for_nested_quota_use,
self.context, [], None)
@mock.patch('cinder.quota_utils._keystone_client')
def test_validate_nested_projects_non_cloud_admin(self, _keystone_client):
# Covers not cloud admin or using old policy.json
_keystone_client.side_effect = exceptions.Forbidden
self.assertRaises(exception.CinderException,
quota_utils.validate_setup_for_nested_quota_use,
self.context, [], None)
def _process_reserve_over_quota(self, overs, usages, quotas,
expected_ex,
resource='volumes'):
ctxt = context.get_admin_context()
ctxt.project_id = 'fake'
size = 1
kwargs = {'overs': overs,
'usages': usages,
'quotas': quotas}
exc = exception.OverQuota(**kwargs)
self.assertRaises(expected_ex,
quota_utils.process_reserve_over_quota,
ctxt, exc,
resource=resource,
size=size)
def test_volume_size_exceed_quota(self):
overs = ['gigabytes']
usages = {'gigabytes': {'reserved': 1, 'in_use': 9}}
quotas = {'gigabytes': 10, 'snapshots': 10}
self._process_reserve_over_quota(
overs, usages, quotas,
exception.VolumeSizeExceedsAvailableQuota)
def test_snapshot_limit_exceed_quota(self):
overs = ['snapshots']
usages = {'snapshots': {'reserved': 1, 'in_use': 9}}
quotas = {'gigabytes': 10, 'snapshots': 10}
self._process_reserve_over_quota(
overs, usages, quotas,
exception.SnapshotLimitExceeded,
resource='snapshots')
def test_backup_gigabytes_exceed_quota(self):
overs = ['backup_gigabytes']
usages = {'backup_gigabytes': {'reserved': 1, 'in_use': 9}}
quotas = {'backup_gigabytes': 10}
self._process_reserve_over_quota(
overs, usages, quotas,
exception.VolumeBackupSizeExceedsAvailableQuota,
resource='backups')
def test_backup_limit_quota(self):
overs = ['backups']
usages = {'backups': {'reserved': 1, 'in_use': 9}}
quotas = {'backups': 9}
self._process_reserve_over_quota(
overs, usages, quotas,
exception.BackupLimitExceeded,
resource='backups')
def test_volumes_limit_quota(self):
overs = ['volumes']
usages = {'volumes': {'reserved': 1, 'in_use': 9}}
quotas = {'volumes': 9}
self._process_reserve_over_quota(
overs, usages, quotas,
exception.VolumeLimitExceeded)
def test_unknown_quota(self):
overs = ['unknown']
usages = {'volumes': {'reserved': 1, 'in_use': 9}}
quotas = {'volumes': 9}
self._process_reserve_over_quota(
overs, usages, quotas,
exception.UnexpectedOverQuota)
def test_unknown_quota2(self):
overs = ['volumes']
usages = {'volumes': {'reserved': 1, 'in_use': 9}}
quotas = {'volumes': 9}
self._process_reserve_over_quota(
overs, usages, quotas,
exception.UnexpectedOverQuota,
resource='snapshots')<|fim▁end|> |
@mock.patch('keystoneclient.client.Client') |
<|file_name|>test_school_house.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
<|fim▁hole|>class TestSchoolHouse(unittest.TestCase):
pass<|fim▁end|> | |
<|file_name|>dropck-eyepatch-extern-crate.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:dropck_eyepatch_extern_crate.rs
extern crate dropck_eyepatch_extern_crate as other;
use other::{Dt,Dr,Pt,Pr,St,Sr};
fn main() {
use std::cell::RefCell;
struct CheckOnDrop(RefCell<String>, &'static str);
impl Drop for CheckOnDrop {
fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); }
}
let c_long;
let (c, dt, dr, pt, pr, st, sr)
: (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>);
c_long = CheckOnDrop(RefCell::new("c_long".to_string()),
"c_long|pr|pt|dr|dt");
c = CheckOnDrop(RefCell::new("c".to_string()),
"c");
// No error: sufficiently long-lived state can be referenced in dtors
dt = Dt("dt", &c_long.0);
dr = Dr("dr", &c_long.0);
// No error: Drop impl asserts .1 (A and &'a _) are not accessed
pt = Pt("pt", &c.0, &c_long.0);
pr = Pr("pr", &c.0, &c_long.0);
// No error: St and Sr have no destructor.
st = St("st", &c.0);<|fim▁hole|> assert_eq!(*c_long.0.borrow(), "c_long");
assert_eq!(*c.0.borrow(), "c");
}<|fim▁end|> | sr = Sr("sr", &c.0);
println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0)); |
<|file_name|>locksmithctl.go<|end_file_name|><|fim▁begin|>// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"crypto/tls"
"crypto/x509"
"flag"
"fmt"
"io/ioutil"
"net"
"net/http"
"os"
"path"
"strings"
"text/tabwriter"
"time"
"github.com/coreos/locksmith/lock"
"github.com/coreos/locksmith/version"
"github.com/coreos/locksmith/Godeps/_workspace/src/github.com/coreos/etcd/client"
)
const (
cliName = "locksmithctl"
cliDescription = `Manage the cluster wide reboot lock.`
)
var (
out *tabwriter.Writer
commands []*Command
globalFlagSet *flag.FlagSet = flag.NewFlagSet("locksmithctl", flag.ExitOnError)
globalFlags = struct {
Debug bool
Endpoints endpoints
EtcdKeyFile string
EtcdCertFile string
EtcdCAFile string
EtcdUsername string
EtcdPassword string
Group string
Version bool
}{}
defaultEndpoints = []string{
"http://127.0.0.1:2379",
"http://127.0.0.1:4001",
}
)
type endpoints []string
func (e *endpoints) String() string {
if len(*e) == 0 {
return strings.Join(defaultEndpoints, ",")
}
return strings.Join(*e, ",")
}
func (e *endpoints) Set(value string) error {
for _, url := range strings.Split(value, ",") {
*e = append(*e, strings.TrimSpace(url))
}
return nil
}
func init() {
out = new(tabwriter.Writer)
out.Init(os.Stdout, 0, 8, 1, '\t', 0)
globalFlagSet.BoolVar(&globalFlags.Debug, "debug", false, "Print out debug information to stderr.")
globalFlagSet.Var(&globalFlags.Endpoints, "endpoint", "etcd endpoint for locksmith. Specify multiple times to use multiple endpoints.")
globalFlagSet.StringVar(&globalFlags.EtcdKeyFile, "etcd-keyfile", "", "etcd key file authentication")
globalFlagSet.StringVar(&globalFlags.EtcdCertFile, "etcd-certfile", "", "etcd cert file authentication")
globalFlagSet.StringVar(&globalFlags.EtcdCAFile, "etcd-cafile", "", "etcd CA file authentication")
globalFlagSet.StringVar(&globalFlags.EtcdUsername, "etcd-username", "", "username for secure etcd communication")
globalFlagSet.StringVar(&globalFlags.EtcdPassword, "etcd-password", "", "password for secure etcd communication")
globalFlagSet.StringVar(&globalFlags.Group, "group", "", "locksmith group")
globalFlagSet.BoolVar(&globalFlags.Version, "version", false, "Print the version and exit.")
commands = []*Command{<|fim▁hole|> cmdHelp,
cmdLock,
cmdReboot,
cmdSendNeedReboot,
cmdSetMax,
cmdStatus,
cmdUnlock,
}
}
type Command struct {
Name string // Name of the Command and the string to use to invoke it
Summary string // One-sentence summary of what the Command does
Usage string // Usage options/arguments
Description string // Detailed description of command
Flags flag.FlagSet // Set of flags associated with this command
Run func(args []string) int // Run a command with the given arguments, return exit status
}
func getAllFlags() (flags []*flag.Flag) {
return getFlags(globalFlagSet)
}
func getFlags(flagSet *flag.FlagSet) (flags []*flag.Flag) {
flags = make([]*flag.Flag, 0)
flagSet.VisitAll(func(f *flag.Flag) {
flags = append(flags, f)
})
return
}
func main() {
globalFlagSet.Parse(os.Args[1:])
var args = globalFlagSet.Args()
if len(globalFlags.Endpoints) == 0 {
globalFlags.Endpoints = defaultEndpoints
}
progName := path.Base(os.Args[0])
if globalFlags.Version {
fmt.Printf("%s version %s\n", progName, version.Version)
os.Exit(0)
}
if progName == "locksmithd" {
flagsFromEnv("LOCKSMITHD", globalFlagSet)
os.Exit(runDaemon())
}
// no command specified - trigger help
if len(args) < 1 {
args = append(args, "help")
}
flagsFromEnv("LOCKSMITHCTL", globalFlagSet)
var cmd *Command
// determine which Command should be run
for _, c := range commands {
if c.Name == args[0] {
cmd = c
if err := c.Flags.Parse(args[1:]); err != nil {
fmt.Println(err.Error())
os.Exit(2)
}
break
}
}
if cmd == nil {
fmt.Printf("%v: unknown subcommand: %q\n", cliName, args[0])
fmt.Printf("Run '%v help' for usage.\n", cliName)
os.Exit(2)
}
os.Exit(cmd.Run(cmd.Flags.Args()))
}
// getLockClient returns an initialized EtcdLockClient, using an etcd
// client configured from the global etcd flags
func getClient() (*lock.EtcdLockClient, error) {
// copy of github.com/coreos/etcd/client.DefaultTransport so that
// TLSClientConfig can be overridden.
transport := &http.Transport{
Proxy: http.ProxyFromEnvironment,
Dial: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).Dial,
TLSHandshakeTimeout: 10 * time.Second,
}
if globalFlags.EtcdCAFile != "" || globalFlags.EtcdCertFile != "" || globalFlags.EtcdKeyFile != "" {
cert, err := tls.LoadX509KeyPair(globalFlags.EtcdCertFile, globalFlags.EtcdKeyFile)
if err != nil {
return nil, err
}
ca, err := ioutil.ReadFile(globalFlags.EtcdCAFile)
if err != nil {
return nil, err
}
capool := x509.NewCertPool()
capool.AppendCertsFromPEM(ca)
tlsconf := &tls.Config{
Certificates: []tls.Certificate{cert},
RootCAs: capool,
}
tlsconf.BuildNameToCertificate()
transport.TLSClientConfig = tlsconf
}
cfg := client.Config{
Endpoints: globalFlags.Endpoints,
Transport: transport,
Username: globalFlags.EtcdUsername,
Password: globalFlags.EtcdPassword,
}
ec, err := client.New(cfg)
if err != nil {
return nil, err
}
kapi := client.NewKeysAPI(ec)
lc, err := lock.NewEtcdLockClient(kapi, globalFlags.Group)
if err != nil {
return nil, err
}
return lc, err
}
// flagsFromEnv parses all registered flags in the given flagSet,
// and if they are not already set it attempts to set their values from
// environment variables. Environment variables take the name of the flag but
// are UPPERCASE, have the given prefix, and any dashes are replaced by
// underscores - for example: some-flag => PREFIX_SOME_FLAG
func flagsFromEnv(prefix string, fs *flag.FlagSet) {
alreadySet := make(map[string]bool)
fs.Visit(func(f *flag.Flag) {
alreadySet[f.Name] = true
})
fs.VisitAll(func(f *flag.Flag) {
if !alreadySet[f.Name] {
key := strings.ToUpper(prefix + "_" + strings.Replace(f.Name, "-", "_", -1))
val := os.Getenv(key)
if val != "" {
fs.Set(f.Name, val)
}
}
})
}<|fim▁end|> | |
<|file_name|>ci.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// +build none
/*
The ci command is called from Continuous Integration scripts.
Usage: go run ci.go <command> <command flags/arguments>
Available commands are:
install [-arch architecture] [ packages... ] -- builds packages and executables
test [ -coverage ] [ -vet ] [ packages... ] -- runs the tests
archive [-arch architecture] [ -type zip|tar ] [ -signer key-envvar ] [ -upload dest ] -- archives build artefacts
importkeys -- imports signing keys from env
debsrc [ -signer key-id ] [ -upload dest ] -- creates a debian source package
nsis -- creates a Windows NSIS installer
aar [ -sign key-id ] [-deploy repo] [ -upload dest ] -- creates an Android archive
xcode [ -sign key-id ] [-deploy repo] [ -upload dest ] -- creates an iOS XCode framework
xgo [ options ] -- cross builds according to options
For all commands, -n prevents execution of external programs (dry run mode).
*/
package main
import (
"bufio"
"bytes"
"encoding/base64"
"flag"
"fmt"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"strings"
"time"
"github.com/kejace/go-ethereum/internal/build"
)
var (
// Files that end up in the geth*.zip archive.
gethArchiveFiles = []string{
"COPYING",
executablePath("geth"),
}
// Files that end up in the geth-alltools*.zip archive.
allToolsArchiveFiles = []string{
"COPYING",
executablePath("abigen"),
executablePath("evm"),
executablePath("geth"),
executablePath("rlpdump"),
}
// A debian package is created for all executables listed here.
debExecutables = []debExecutable{
{
Name: "geth",
Description: "Ethereum CLI client.",
},
{
Name: "rlpdump",
Description: "Developer utility tool that prints RLP structures.",
},
{
Name: "evm",
Description: "Developer utility version of the EVM (Ethereum Virtual Machine) that is capable of running bytecode snippets within a configurable environment and execution mode.",
},
{
Name: "abigen",
Description: "Source code generator to convert Ethereum contract definitions into easy to use, compile-time type-safe Go packages.",
},
}
// Distros for which packages are created.
// Note: vivid is unsupported because there is no golang-1.6 package for it.
debDistros = []string{"trusty", "wily", "xenial", "yakkety"}
)
var GOBIN, _ = filepath.Abs(filepath.Join("build", "bin"))
func executablePath(name string) string {
if runtime.GOOS == "windows" {
name += ".exe"
}
return filepath.Join(GOBIN, name)
}
func main() {
log.SetFlags(log.Lshortfile)
if _, err := os.Stat(filepath.Join("build", "ci.go")); os.IsNotExist(err) {
log.Fatal("this script must be run from the root of the repository")
}
if len(os.Args) < 2 {
log.Fatal("need subcommand as first argument")
}
switch os.Args[1] {
case "install":
doInstall(os.Args[2:])
case "test":
doTest(os.Args[2:])
case "archive":
doArchive(os.Args[2:])
case "debsrc":
doDebianSource(os.Args[2:])
case "nsis":
doWindowsInstaller(os.Args[2:])
case "aar":
doAndroidArchive(os.Args[2:])
case "xcode":
doXCodeFramework(os.Args[2:])
case "xgo":
doXgo(os.Args[2:])
default:
log.Fatal("unknown command ", os.Args[1])
}
}
// Compiling
func doInstall(cmdline []string) {
var (
arch = flag.String("arch", "", "Architecture to cross build for")
)
flag.CommandLine.Parse(cmdline)
env := build.Env()
// Check Go version. People regularly open issues about compilation
// failure with outdated Go. This should save them the trouble.
if runtime.Version() < "go1.4" && !strings.HasPrefix(runtime.Version(), "devel") {
log.Println("You have Go version", runtime.Version())
log.Println("go-ethereum requires at least Go version 1.4 and cannot")
log.Println("be compiled with an earlier version. Please upgrade your Go installation.")
os.Exit(1)
}
// Compile packages given as arguments, or everything if there are no arguments.
packages := []string{"./..."}
if flag.NArg() > 0 {
packages = flag.Args()
}
if *arch == "" || *arch == runtime.GOARCH {
goinstall := goTool("install", buildFlags(env)...)
goinstall.Args = append(goinstall.Args, "-v")
goinstall.Args = append(goinstall.Args, packages...)
build.MustRun(goinstall)
return
}
// If we are cross compiling to ARMv5 ARMv6 or ARMv7, clean any prvious builds
if *arch == "arm" {
os.RemoveAll(filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_arm"))
for _, path := range filepath.SplitList(build.GOPATH()) {
os.RemoveAll(filepath.Join(path, "pkg", runtime.GOOS+"_arm"))
}
}
// Seems we are cross compiling, work around forbidden GOBIN
goinstall := goToolArch(*arch, "install", buildFlags(env)...)
goinstall.Args = append(goinstall.Args, "-v")
goinstall.Args = append(goinstall.Args, []string{"-buildmode", "archive"}...)
goinstall.Args = append(goinstall.Args, packages...)
build.MustRun(goinstall)
if cmds, err := ioutil.ReadDir("cmd"); err == nil {
for _, cmd := range cmds {
pkgs, err := parser.ParseDir(token.NewFileSet(), filepath.Join(".", "cmd", cmd.Name()), nil, parser.PackageClauseOnly)
if err != nil {
log.Fatal(err)
}
for name, _ := range pkgs {
if name == "main" {
gobuild := goToolArch(*arch, "build", buildFlags(env)...)
gobuild.Args = append(gobuild.Args, "-v")
gobuild.Args = append(gobuild.Args, []string{"-o", executablePath(cmd.Name())}...)
gobuild.Args = append(gobuild.Args, "."+string(filepath.Separator)+filepath.Join("cmd", cmd.Name()))
build.MustRun(gobuild)
break
}
}
}
}
}
func buildFlags(env build.Environment) (flags []string) {
if os.Getenv("GO_OPENCL") != "" {
flags = append(flags, "-tags", "opencl")
}
// Since Go 1.5, the separator char for link time assignments
// is '=' and using ' ' prints a warning. However, Go < 1.5 does
// not support using '='.
sep := " "
if runtime.Version() > "go1.5" || strings.Contains(runtime.Version(), "devel") {
sep = "="
}
// Set gitCommit constant via link-time assignment.
if env.Commit != "" {
flags = append(flags, "-ldflags", "-X main.gitCommit"+sep+env.Commit)
}
return flags
}
func goTool(subcmd string, args ...string) *exec.Cmd {
return goToolArch(runtime.GOARCH, subcmd, args...)
}
func goToolArch(arch string, subcmd string, args ...string) *exec.Cmd {
gocmd := filepath.Join(runtime.GOROOT(), "bin", "go")
cmd := exec.Command(gocmd, subcmd)
cmd.Args = append(cmd.Args, args...)
cmd.Env = []string{
"GO15VENDOREXPERIMENT=1",
"GOPATH=" + build.GOPATH(),
}
if arch == "" || arch == runtime.GOARCH {
cmd.Env = append(cmd.Env, "GOBIN="+GOBIN)
} else {
cmd.Env = append(cmd.Env, "CGO_ENABLED=1")
cmd.Env = append(cmd.Env, "GOARCH="+arch)
}
for _, e := range os.Environ() {
if strings.HasPrefix(e, "GOPATH=") || strings.HasPrefix(e, "GOBIN=") {
continue
}
cmd.Env = append(cmd.Env, e)
}
return cmd
}
// Running The Tests
//
// "tests" also includes static analysis tools such as vet.
func doTest(cmdline []string) {
var (
vet = flag.Bool("vet", false, "Whether to run go vet")
coverage = flag.Bool("coverage", false, "Whether to record code coverage")
)
flag.CommandLine.Parse(cmdline)
packages := []string{"./..."}
if len(flag.CommandLine.Args()) > 0 {
packages = flag.CommandLine.Args()
}
if len(packages) == 1 && packages[0] == "./..." {
// Resolve ./... manually since go vet will fail on vendored stuff
out, err := goTool("list", "./...").CombinedOutput()
if err != nil {
log.Fatalf("package listing failed: %v\n%s", err, string(out))
}
packages = []string{}
for _, line := range strings.Split(string(out), "\n") {
if !strings.Contains(line, "vendor") {
packages = append(packages, strings.TrimSpace(line))
}
}
}
// Run analysis tools before the tests.
if *vet {
build.MustRun(goTool("vet", packages...))
}
// Run the actual tests.
gotest := goTool("test")
// Test a single package at a time. CI builders are slow
// and some tests run into timeouts under load.
gotest.Args = append(gotest.Args, "-p", "1")
if *coverage {
gotest.Args = append(gotest.Args, "-covermode=atomic", "-cover")
}
gotest.Args = append(gotest.Args, packages...)
build.MustRun(gotest)
}
// Release Packaging
func doArchive(cmdline []string) {
var (
arch = flag.String("arch", runtime.GOARCH, "Architecture cross packaging")
atype = flag.String("type", "zip", "Type of archive to write (zip|tar)")
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. LINUX_SIGNING_KEY)`)
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
ext string
)
flag.CommandLine.Parse(cmdline)
switch *atype {
case "zip":
ext = ".zip"
case "tar":
ext = ".tar.gz"
default:
log.Fatal("unknown archive type: ", atype)
}
var (
env = build.Env()
base = archiveBasename(*arch, env)
geth = "geth-" + base + ext
alltools = "geth-alltools-" + base + ext
)
maybeSkipArchive(env)
if err := build.WriteArchive(geth, gethArchiveFiles); err != nil {
log.Fatal(err)
}
if err := build.WriteArchive(alltools, allToolsArchiveFiles); err != nil {
log.Fatal(err)
}
for _, archive := range []string{geth, alltools} {
if err := archiveUpload(archive, *upload, *signer); err != nil {
log.Fatal(err)
}
}
}
func archiveBasename(arch string, env build.Environment) string {
platform := runtime.GOOS + "-" + arch
if arch == "arm" {
platform += os.Getenv("GOARM")
}
if arch == "android" {
platform = "android-all"
}
if arch == "ios" {
platform = "ios-all"
}
return platform + "-" + archiveVersion(env)
}
func archiveVersion(env build.Environment) string {
version := build.VERSION()
if isUnstableBuild(env) {
version += "-unstable"
}
if env.Commit != "" {
version += "-" + env.Commit[:8]
}
return version
}
func archiveUpload(archive string, blobstore string, signer string) error {
// If signing was requested, generate the signature files
if signer != "" {
pgpkey, err := base64.StdEncoding.DecodeString(os.Getenv(signer))
if err != nil {
return fmt.Errorf("invalid base64 %s", signer)
}
if err := build.PGPSignFile(archive, archive+".asc", string(pgpkey)); err != nil {
return err
}
}
// If uploading to Azure was requested, push the archive possibly with its signature
if blobstore != "" {
auth := build.AzureBlobstoreConfig{
Account: strings.Split(blobstore, "/")[0],
Token: os.Getenv("AZURE_BLOBSTORE_TOKEN"),
Container: strings.SplitN(blobstore, "/", 2)[1],
}
if err := build.AzureBlobstoreUpload(archive, filepath.Base(archive), auth); err != nil {
return err
}
if signer != "" {
if err := build.AzureBlobstoreUpload(archive+".asc", filepath.Base(archive+".asc"), auth); err != nil {
return err
}
}
}
return nil
}
// skips archiving for some build configurations.
func maybeSkipArchive(env build.Environment) {
if env.IsPullRequest {
log.Printf("skipping because this is a PR build")
os.Exit(0)
}
if env.Branch != "master" && !strings.HasPrefix(env.Tag, "v1.") {
log.Printf("skipping because branch %q, tag %q is not on the whitelist", env.Branch, env.Tag)
os.Exit(0)
}
}
// Debian Packaging
func doDebianSource(cmdline []string) {
var (
signer = flag.String("signer", "", `Signing key name, also used as package author`)
upload = flag.String("upload", "", `Where to upload the source package (usually "ppa:ethereum/ethereum")`)
workdir = flag.String("workdir", "", `Output directory for packages (uses temp dir if unset)`)
now = time.Now()
)
flag.CommandLine.Parse(cmdline)
*workdir = makeWorkdir(*workdir)
env := build.Env()
maybeSkipArchive(env)
// Import the signing key.
if b64key := os.Getenv("PPA_SIGNING_KEY"); b64key != "" {
key, err := base64.StdEncoding.DecodeString(b64key)
if err != nil {
log.Fatal("invalid base64 PPA_SIGNING_KEY")
}
gpg := exec.Command("gpg", "--import")
gpg.Stdin = bytes.NewReader(key)
build.MustRun(gpg)
}
// Create the packages.
for _, distro := range debDistros {
meta := newDebMetadata(distro, *signer, env, now)
pkgdir := stageDebianSource(*workdir, meta)
debuild := exec.Command("debuild", "-S", "-sa", "-us", "-uc")
debuild.Dir = pkgdir
build.MustRun(debuild)
changes := fmt.Sprintf("%s_%s_source.changes", meta.Name(), meta.VersionString())
changes = filepath.Join(*workdir, changes)
if *signer != "" {
build.MustRunCommand("debsign", changes)
}
if *upload != "" {
build.MustRunCommand("dput", *upload, changes)
}
}
}
func makeWorkdir(wdflag string) string {
var err error
if wdflag != "" {
err = os.MkdirAll(wdflag, 0744)
} else {
wdflag, err = ioutil.TempDir("", "geth-build-")
}
if err != nil {
log.Fatal(err)
}
return wdflag
}
func isUnstableBuild(env build.Environment) bool {
if env.Branch != "master" && env.Tag != "" {
return false
}
return true
}
type debMetadata struct {
Env build.Environment
// go-ethereum version being built. Note that this
// is not the debian package version. The package version
// is constructed by VersionString.
Version string
Author string // "name <email>", also selects signing key
Distro, Time string
Executables []debExecutable
}
type debExecutable struct {
Name, Description string
}
func newDebMetadata(distro, author string, env build.Environment, t time.Time) debMetadata {
if author == "" {
// No signing key, use default author.
author = "Ethereum Builds <[email protected]>"
}
return debMetadata{
Env: env,
Author: author,
Distro: distro,
Version: build.VERSION(),
Time: t.Format(time.RFC1123Z),
Executables: debExecutables,
}
}
// Name returns the name of the metapackage that depends
// on all executable packages.
func (meta debMetadata) Name() string {<|fim▁hole|> if isUnstableBuild(meta.Env) {
return "ethereum-unstable"
}
return "ethereum"
}
// VersionString returns the debian version of the packages.
func (meta debMetadata) VersionString() string {
vsn := meta.Version
if meta.Env.Buildnum != "" {
vsn += "+build" + meta.Env.Buildnum
}
if meta.Distro != "" {
vsn += "+" + meta.Distro
}
return vsn
}
// ExeList returns the list of all executable packages.
func (meta debMetadata) ExeList() string {
names := make([]string, len(meta.Executables))
for i, e := range meta.Executables {
names[i] = meta.ExeName(e)
}
return strings.Join(names, ", ")
}
// ExeName returns the package name of an executable package.
func (meta debMetadata) ExeName(exe debExecutable) string {
if isUnstableBuild(meta.Env) {
return exe.Name + "-unstable"
}
return exe.Name
}
// ExeConflicts returns the content of the Conflicts field
// for executable packages.
func (meta debMetadata) ExeConflicts(exe debExecutable) string {
if isUnstableBuild(meta.Env) {
// Set up the conflicts list so that the *-unstable packages
// cannot be installed alongside the regular version.
//
// https://www.debian.org/doc/debian-policy/ch-relationships.html
// is very explicit about Conflicts: and says that Breaks: should
// be preferred and the conflicting files should be handled via
// alternates. We might do this eventually but using a conflict is
// easier now.
return "ethereum, " + exe.Name
}
return ""
}
func stageDebianSource(tmpdir string, meta debMetadata) (pkgdir string) {
pkg := meta.Name() + "-" + meta.VersionString()
pkgdir = filepath.Join(tmpdir, pkg)
if err := os.Mkdir(pkgdir, 0755); err != nil {
log.Fatal(err)
}
// Copy the source code.
build.MustRunCommand("git", "checkout-index", "-a", "--prefix", pkgdir+string(filepath.Separator))
// Put the debian build files in place.
debian := filepath.Join(pkgdir, "debian")
build.Render("build/deb.rules", filepath.Join(debian, "rules"), 0755, meta)
build.Render("build/deb.changelog", filepath.Join(debian, "changelog"), 0644, meta)
build.Render("build/deb.control", filepath.Join(debian, "control"), 0644, meta)
build.Render("build/deb.copyright", filepath.Join(debian, "copyright"), 0644, meta)
build.RenderString("8\n", filepath.Join(debian, "compat"), 0644, meta)
build.RenderString("3.0 (native)\n", filepath.Join(debian, "source/format"), 0644, meta)
for _, exe := range meta.Executables {
install := filepath.Join(debian, meta.ExeName(exe)+".install")
docs := filepath.Join(debian, meta.ExeName(exe)+".docs")
build.Render("build/deb.install", install, 0644, exe)
build.Render("build/deb.docs", docs, 0644, exe)
}
return pkgdir
}
// Windows installer
func doWindowsInstaller(cmdline []string) {
// Parse the flags and make skip installer generation on PRs
var (
arch = flag.String("arch", runtime.GOARCH, "Architecture for cross build packaging")
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. WINDOWS_SIGNING_KEY)`)
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
workdir = flag.String("workdir", "", `Output directory for packages (uses temp dir if unset)`)
)
flag.CommandLine.Parse(cmdline)
*workdir = makeWorkdir(*workdir)
env := build.Env()
maybeSkipArchive(env)
// Aggregate binaries that are included in the installer
var (
devTools []string
allTools []string
gethTool string
)
for _, file := range allToolsArchiveFiles {
if file == "COPYING" { // license, copied later
continue
}
allTools = append(allTools, filepath.Base(file))
if filepath.Base(file) == "geth.exe" {
gethTool = file
} else {
devTools = append(devTools, file)
}
}
// Render NSIS scripts: Installer NSIS contains two installer sections,
// first section contains the geth binary, second section holds the dev tools.
templateData := map[string]interface{}{
"License": "COPYING",
"Geth": gethTool,
"DevTools": devTools,
}
build.Render("build/nsis.geth.nsi", filepath.Join(*workdir, "geth.nsi"), 0644, nil)
build.Render("build/nsis.install.nsh", filepath.Join(*workdir, "install.nsh"), 0644, templateData)
build.Render("build/nsis.uninstall.nsh", filepath.Join(*workdir, "uninstall.nsh"), 0644, allTools)
build.Render("build/nsis.envvarupdate.nsh", filepath.Join(*workdir, "EnvVarUpdate.nsh"), 0644, nil)
build.CopyFile(filepath.Join(*workdir, "SimpleFC.dll"), "build/nsis.simplefc.dll", 0755)
build.CopyFile(filepath.Join(*workdir, "COPYING"), "COPYING", 0755)
// Build the installer. This assumes that all the needed files have been previously
// built (don't mix building and packaging to keep cross compilation complexity to a
// minimum).
version := strings.Split(build.VERSION(), ".")
if env.Commit != "" {
version[2] += "-" + env.Commit[:8]
}
installer, _ := filepath.Abs("geth-" + archiveBasename(*arch, env) + ".exe")
build.MustRunCommand("makensis.exe",
"/DOUTPUTFILE="+installer,
"/DMAJORVERSION="+version[0],
"/DMINORVERSION="+version[1],
"/DBUILDVERSION="+version[2],
"/DARCH="+*arch,
filepath.Join(*workdir, "geth.nsi"),
)
// Sign and publish installer.
if err := archiveUpload(installer, *upload, *signer); err != nil {
log.Fatal(err)
}
}
// Android archives
func doAndroidArchive(cmdline []string) {
var (
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. ANDROID_SIGNING_KEY)`)
deploy = flag.String("deploy", "", `Destination to deploy the archive (usually "https://oss.sonatype.org")`)
upload = flag.String("upload", "", `Destination to upload the archive (usually "gethstore/builds")`)
)
flag.CommandLine.Parse(cmdline)
env := build.Env()
// Build the Android archive and Maven resources
build.MustRun(goTool("get", "golang.org/x/mobile/cmd/gomobile"))
build.MustRun(gomobileTool("init"))
build.MustRun(gomobileTool("bind", "--target", "android", "--javapkg", "org.ethereum", "-v", "github.com/kejace/go-ethereum/mobile"))
meta := newMavenMetadata(env)
build.Render("build/mvn.pom", meta.Package+".pom", 0755, meta)
// Skip Maven deploy and Azure upload for PR builds
maybeSkipArchive(env)
// Sign and upload the archive to Azure
archive := "geth-" + archiveBasename("android", env) + ".aar"
os.Rename("geth.aar", archive)
if err := archiveUpload(archive, *upload, *signer); err != nil {
log.Fatal(err)
}
// Sign and upload all the artifacts to Maven Central
os.Rename(archive, meta.Package+".aar")
if *signer != "" && *deploy != "" {
// Import the signing key into the local GPG instance
if b64key := os.Getenv(*signer); b64key != "" {
key, err := base64.StdEncoding.DecodeString(b64key)
if err != nil {
log.Fatalf("invalid base64 %s", *signer)
}
gpg := exec.Command("gpg", "--import")
gpg.Stdin = bytes.NewReader(key)
build.MustRun(gpg)
}
// Upload the artifacts to Sonatype and/or Maven Central
repo := *deploy + "/service/local/staging/deploy/maven2"
if meta.Develop {
repo = *deploy + "/content/repositories/snapshots"
}
build.MustRunCommand("mvn", "gpg:sign-and-deploy-file",
"-settings=build/mvn.settings", "-Durl="+repo, "-DrepositoryId=ossrh",
"-DpomFile="+meta.Package+".pom", "-Dfile="+meta.Package+".aar")
}
}
func gomobileTool(subcmd string, args ...string) *exec.Cmd {
cmd := exec.Command(filepath.Join(GOBIN, "gomobile"), subcmd)
cmd.Args = append(cmd.Args, args...)
cmd.Env = []string{
"GOPATH=" + build.GOPATH(),
}
for _, e := range os.Environ() {
if strings.HasPrefix(e, "GOPATH=") {
continue
}
cmd.Env = append(cmd.Env, e)
}
return cmd
}
type mavenMetadata struct {
Version string
Package string
Develop bool
Contributors []mavenContributor
}
type mavenContributor struct {
Name string
Email string
}
func newMavenMetadata(env build.Environment) mavenMetadata {
// Collect the list of authors from the repo root
contribs := []mavenContributor{}
if authors, err := os.Open("AUTHORS"); err == nil {
defer authors.Close()
scanner := bufio.NewScanner(authors)
for scanner.Scan() {
// Skip any whitespace from the authors list
line := strings.TrimSpace(scanner.Text())
if line == "" || line[0] == '#' {
continue
}
// Split the author and insert as a contributor
re := regexp.MustCompile("([^<]+) <(.+>)")
parts := re.FindStringSubmatch(line)
if len(parts) == 3 {
contribs = append(contribs, mavenContributor{Name: parts[1], Email: parts[2]})
}
}
}
// Render the version and package strings
version := build.VERSION()
if isUnstableBuild(env) {
version += "-SNAPSHOT"
}
return mavenMetadata{
Version: version,
Package: "geth-" + version,
Develop: isUnstableBuild(env),
Contributors: contribs,
}
}
// XCode frameworks
func doXCodeFramework(cmdline []string) {
var (
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. IOS_SIGNING_KEY)`)
deploy = flag.String("deploy", "", `Destination to deploy the archive (usually "trunk")`)
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
)
flag.CommandLine.Parse(cmdline)
env := build.Env()
// Build the iOS XCode framework
build.MustRun(goTool("get", "golang.org/x/mobile/cmd/gomobile"))
build.MustRun(gomobileTool("init"))
archive := "geth-" + archiveBasename("ios", env)
if err := os.Mkdir(archive, os.ModePerm); err != nil {
log.Fatal(err)
}
bind := gomobileTool("bind", "--target", "ios", "--tags", "ios", "--prefix", "GE", "-v", "github.com/kejace/go-ethereum/mobile")
bind.Dir, _ = filepath.Abs(archive)
build.MustRun(bind)
build.MustRunCommand("tar", "-zcvf", archive+".tar.gz", archive)
// Skip CocoaPods deploy and Azure upload for PR builds
maybeSkipArchive(env)
// Sign and upload the framework to Azure
if err := archiveUpload(archive+".tar.gz", *upload, *signer); err != nil {
log.Fatal(err)
}
// Prepare and upload a PodSpec to CocoaPods
if *deploy != "" {
meta := newPodMetadata(env)
build.Render("build/pod.podspec", meta.Name+".podspec", 0755, meta)
build.MustRunCommand("pod", *deploy, "push", meta.Name+".podspec", "--allow-warnings")
}
}
type podMetadata struct {
Name string
Version string
Commit string
Contributors []podContributor
}
type podContributor struct {
Name string
Email string
}
func newPodMetadata(env build.Environment) podMetadata {
// Collect the list of authors from the repo root
contribs := []podContributor{}
if authors, err := os.Open("AUTHORS"); err == nil {
defer authors.Close()
scanner := bufio.NewScanner(authors)
for scanner.Scan() {
// Skip any whitespace from the authors list
line := strings.TrimSpace(scanner.Text())
if line == "" || line[0] == '#' {
continue
}
// Split the author and insert as a contributor
re := regexp.MustCompile("([^<]+) <(.+>)")
parts := re.FindStringSubmatch(line)
if len(parts) == 3 {
contribs = append(contribs, podContributor{Name: parts[1], Email: parts[2]})
}
}
}
name := "Geth"
if isUnstableBuild(env) {
name += "Develop"
}
return podMetadata{
Name: name,
Version: archiveVersion(env),
Commit: env.Commit,
Contributors: contribs,
}
}
// Cross compilation
func doXgo(cmdline []string) {
flag.CommandLine.Parse(cmdline)
env := build.Env()
// Make sure xgo is available for cross compilation
gogetxgo := goTool("get", "github.com/karalabe/xgo")
build.MustRun(gogetxgo)
// Execute the actual cross compilation
xgo := xgoTool(append(buildFlags(env), flag.Args()...))
build.MustRun(xgo)
}
func xgoTool(args []string) *exec.Cmd {
cmd := exec.Command(filepath.Join(GOBIN, "xgo"), args...)
cmd.Env = []string{
"GOPATH=" + build.GOPATH(),
"GOBIN=" + GOBIN,
}
for _, e := range os.Environ() {
if strings.HasPrefix(e, "GOPATH=") || strings.HasPrefix(e, "GOBIN=") {
continue
}
cmd.Env = append(cmd.Env, e)
}
return cmd
}<|fim▁end|> | |
<|file_name|>constraint_shape_based.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest as ut
import unittest_decorators as utx
import numpy as np
import math
import espressomd
import espressomd.interactions
import espressomd.shapes
import tests_common
@utx.skipIfMissingFeatures(["LENNARD_JONES_GENERIC"])
class ShapeBasedConstraintTest(ut.TestCase):
box_l = 30.
system = espressomd.System(box_l=3 * [box_l])
def tearDown(self):
self.system.part.clear()
self.system.constraints.clear()
def pos_on_surface(self, theta, v, semiaxis0, semiaxis1,
semiaxis2, center=np.array([15, 15, 15])):
"""Return position on ellipsoid surface."""
pos = np.array([semiaxis0 * np.sqrt(1. - v**2) * np.cos(theta),
semiaxis1 * np.sqrt(1. - v**2) * np.sin(theta),
semiaxis2 * v])
return pos + center
def test_hollow_conical_frustum(self):
"""
Test implementation of conical frustum shape.
"""
R1 = 5.0
R2 = 10.0
LENGTH = 15.0
D = 2.4
def z(y, r1, r2, l): return l / (r1 - r2) * \
y + l / 2. - l * r1 / (r1 - r2)
shape = espressomd.shapes.HollowConicalFrustum(center=[0.0, 0.0, 0.0], axis=[
0, 0, 1], r1=R1, r2=R2, thickness=0.0, length=LENGTH)
y_vals = np.linspace(R1, R2, 100)
for y in y_vals:
dist = shape.calc_distance(position=[0.0, y, z(y, R1, R2, LENGTH)])
self.assertAlmostEqual(dist[0], 0.0)
shape = espressomd.shapes.HollowConicalFrustum(center=[0.0, 0.0, 0.0], axis=[
0, 0, 1], r1=R1, r2=R2, thickness=D, length=LENGTH, direction=-1)
for y in y_vals:
dist = shape.calc_distance(position=[0.0, y, z(y, R1, R2, LENGTH)])
self.assertAlmostEqual(dist[0], 0.5 * D)
np.testing.assert_almost_equal(np.copy(shape.center), [0.0, 0.0, 0.0])
np.testing.assert_almost_equal(np.copy(shape.axis), [0, 0, 1])
self.assertEqual(shape.r1, R1)
self.assertEqual(shape.r2, R2)
self.assertEqual(shape.thickness, D)
self.assertEqual(shape.length, LENGTH)
self.assertEqual(shape.direction, -1)
shape = espressomd.shapes.HollowConicalFrustum(center=[0.0, 0.0, 0.0], axis=[
0, 0, 1], r1=R1, r2=R2, thickness=D, length=LENGTH)
for y in y_vals:
dist = shape.calc_distance(position=[0.0, y, z(y, R1, R2, LENGTH)])
self.assertAlmostEqual(dist[0], -0.5 * D)
# check sign of dist
shape = espressomd.shapes.HollowConicalFrustum(center=[0.0, 0.0, 0.0], axis=[
0, 0, 1], r1=R1, r2=R1, thickness=D, length=LENGTH)
self.assertLess(shape.calc_distance(
position=[0.0, R1, 0.25 * LENGTH])[0], 0.0)
self.assertLess(shape.calc_distance(
position=[0.0, R1 + (0.5 - sys.float_info.epsilon) * D, 0.25 * LENGTH])[0], 0.0)
self.assertGreater(shape.calc_distance(
position=[0.0, R1 + (0.5 + sys.float_info.epsilon) * D, 0.25 * LENGTH])[0], 0.0)
self.assertGreater(shape.calc_distance(
position=[0.0, R1 - (0.5 + sys.float_info.epsilon) * D, 0.25 * LENGTH])[0], 0.0)
shape = espressomd.shapes.HollowConicalFrustum(center=[0.0, 0.0, 0.0], axis=[
0, 0, 1], r1=R1, r2=R1, thickness=D, length=LENGTH, direction=-1)
self.assertGreater(shape.calc_distance(
position=[0.0, R1, 0.25 * LENGTH])[0], 0.0)
self.assertGreater(shape.calc_distance(
position=[0.0, R1 + (0.5 - sys.float_info.epsilon) * D, 0.25 * LENGTH])[0], 0.0)
self.assertLess(shape.calc_distance(
position=[0.0, R1 + (0.5 + sys.float_info.epsilon) * D, 0.25 * LENGTH])[0], 0.0)
self.assertLess(shape.calc_distance(
position=[0.0, R1 - (0.5 + sys.float_info.epsilon) * D, 0.25 * LENGTH])[0], 0.0)
def test_sphere(self):
"""Checks geometry of an inverted sphere
"""
rad = self.box_l / 2.0
sphere_shape = espressomd.shapes.Sphere(
center=3 * [rad],
radius=rad,
direction=-1)
phi_steps = 11
theta_steps = 11
for distance in {-1.2, 2.6}:
for phi in range(phi_steps):
phi_angle = phi / phi_steps * 2.0 * math.pi
for theta in range(theta_steps):
theta_angle = theta / theta_steps * math.pi
pos = np.array(
[math.cos(phi_angle) * math.sin(theta_angle)
* (rad + distance),
math.sin(phi_angle) * math.sin(theta_angle)
* (rad + distance),
math.cos(theta_angle) * (rad + distance)]) + rad
shape_dist, _ = sphere_shape.calc_distance(
position=pos.tolist())
self.assertAlmostEqual(shape_dist, -distance)
def test_ellipsoid(self):
"""Checks that distance of particles on the ellipsoid constraint's surface is zero.
For the case of a spherical ellipsoid, also several non-zero distances are tested.
"""
system = self.system
system.time_step = 0.01
system.cell_system.skin = 0.4
system.part.add(pos=[0., 0., 0.], type=0)
# abuse generic LJ to measure distance via the potential V(r) = r
system.non_bonded_inter[0, 1].generic_lennard_jones.set_params(
epsilon=1., sigma=1., cutoff=7., shift=0., offset=0., e1=-1, e2=0, b1=1., b2=0.)
N = 10
# check oblate ellipsoid
semiaxes = [2.18, 5.45]
e = espressomd.shapes.Ellipsoid(
a=semiaxes[0],
b=semiaxes[1],
center=3 * [self.box_l / 2.],
direction=+1)
constraint_e = espressomd.constraints.ShapeBasedConstraint(
shape=e, particle_type=1, penetrable=True)
const1 = system.constraints.add(constraint_e)
for i in range(N):
for j in range(N):
theta = 2. * i / float(N) * np.pi
v = j / float(N - 1) * 2. - 1
pos = self.pos_on_surface(
theta, v, semiaxes[0], semiaxes[1], semiaxes[1])
system.part[0].pos = pos
system.integrator.run(recalc_forces=True, steps=0)
energy = system.analysis.energy()
self.assertAlmostEqual(energy["total"], 0., places=6)
system.constraints.remove(const1)
# check prolate ellipsoid
semiaxes = [3.61, 2.23]
e = espressomd.shapes.Ellipsoid(
a=semiaxes[0],
b=semiaxes[1],
center=3 * [self.box_l / 2.],
direction=+1)
constraint_e = espressomd.constraints.ShapeBasedConstraint(
shape=e, particle_type=1, penetrable=True)
const1 = system.constraints.add(constraint_e)
for i in range(N):
for j in range(N):
theta = 2. * i / float(N) * np.pi
v = j / float(N - 1) * 2. - 1
pos = self.pos_on_surface(
theta, v, semiaxes[0], semiaxes[1], semiaxes[1])
system.part[0].pos = pos
system.integrator.run(recalc_forces=True, steps=0)
energy = system.analysis.energy()
self.assertAlmostEqual(energy["total"], 0., places=6)
# check sphere (multiple distances from surface)
# change ellipsoid parameters instead of creating a new constraint
e.a = 1.
e.b = 1.
radii = np.linspace(1., 6.5, 7)
for i in range(N):
for j in range(N):
theta = 2. * i / float(N) * np.pi
v = j / float(N - 1) * 2. - 1
for r in radii:
pos = self.pos_on_surface(theta, v, r, r, r)
system.part[0].pos = pos
system.integrator.run(recalc_forces=True, steps=0)
energy = system.analysis.energy()
self.assertAlmostEqual(energy["total"], r - 1.)
# Reset the interaction to zero
system.non_bonded_inter[0, 1].generic_lennard_jones.set_params(
epsilon=0., sigma=0., cutoff=0., shift=0., offset=0., e1=0, e2=0, b1=0., b2=0.)
def test_cylinder(self):
"""Tests if shape based constraints can be added to a system both by
(1) defining a constraint object which is then added
(2) and via keyword arguments.
Checks that cylinder constraints with LJ interactions exert forces
on a test particle (that is, the constraints do what they should).
"""
system = self.system
system.time_step = 0.01
system.cell_system.skin = 0.4
rad = self.box_l / 2.0
length = self.box_l / 2.0
system.part.add(id=0, pos=[rad, 1.02, rad], type=0)
# check force calculation of a cylinder without top and bottom
interaction_dir = -1 # constraint is directed inwards
cylinder_shape = espressomd.shapes.Cylinder(
center=3 * [rad],
axis=[0, 0, 1],
direction=interaction_dir,
radius=rad,
length=self.box_l + 5) # +5 in order to have no top or bottom
penetrability = False # impenetrable
outer_cylinder_constraint = espressomd.constraints.ShapeBasedConstraint(
shape=cylinder_shape, particle_type=1, penetrable=penetrability)
outer_cylinder_wall = system.constraints.add(outer_cylinder_constraint)
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=1.0, sigma=1.0, cutoff=2.0, shift=0)
system.integrator.run(0) # update forces
self.assertAlmostEqual(outer_cylinder_constraint.min_dist(), 1.02)
# test summed forces on cylinder wall
self.assertAlmostEqual(
-1.0 * outer_cylinder_wall.total_force()[1],
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.0,
sig=1.0,
r=1.02),
places=10) # minus for Newton's third law
# check whether total_summed_outer_normal_force is correct
y_part2 = self.box_l - 1.02
system.part.add(id=1, pos=[rad, y_part2, rad], type=0)
system.integrator.run(0)
dist_part2 = self.box_l - y_part2
self.assertAlmostEqual(outer_cylinder_wall.total_force()[2], 0.0)
self.assertAlmostEqual(
outer_cylinder_wall.total_normal_force(),
2 *
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.0,
sig=1.0,
r=dist_part2))
# Test the geometry of a cylinder with top and bottom
cylinder_shape_finite = espressomd.shapes.Cylinder(
center=3 * [rad],
axis=[0, 0, 1],
direction=1,
radius=rad,
length=length)
phi_steps = 11
for distance in {-3.6, 2.8}:
for z in range(int(self.box_l)):
center = np.array([rad, rad, z])
start_point = np.array([rad, 2 * rad - distance, z])
for phi in range(phi_steps):
# Rotation around the axis of the cylinder
phi_angle = phi / phi_steps * 2.0 * math.pi
phi_rot_matrix = np.array(
[[math.cos(phi_angle), -math.sin(phi_angle), 0.0],
[math.sin(phi_angle), math.cos(phi_angle), 0.0],
[0.0, 0.0, 1.0]])
phi_rot_point = np.dot(
phi_rot_matrix, start_point - center) + center
shape_dist, _ = cylinder_shape_finite.calc_distance(
position=phi_rot_point.tolist())
dist = -distance
if distance > 0.0:
if z < (self.box_l - length) / 2.0 + distance:
dist = (self.box_l - length) / 2.0 - z
elif z > (self.box_l + length) / 2.0 - distance:
dist = z - (self.box_l + length) / 2.0
else:
dist = -distance
else:
if z < (self.box_l - length) / 2.0:
z_dist = (self.box_l - length) / 2.0 - z
dist = math.sqrt(z_dist**2 + distance**2)
elif z > (self.box_l + length) / 2.0:
z_dist = z - (self.box_l + length) / 2.0
dist = math.sqrt(z_dist**2 + distance**2)
else:
dist = -distance
self.assertAlmostEqual(shape_dist, dist)
# Reset
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=0.0, sigma=0.0, cutoff=0.0, shift=0)
def test_spherocylinder(self):
"""Checks that spherocylinder constraints with LJ interactions exert
forces on a test particle (that is, the constraints do what they should)
using geometrical parameters of (1) an infinite cylinder and (2) a
finite spherocylinder.
<|fim▁hole|> system.cell_system.skin = 0.4
system.part.add(
id=0, pos=[self.box_l / 2.0, 1.02, self.box_l / 2.0], type=0)
# check force calculation of spherocylinder constraint
# (1) infinite cylinder
interaction_dir = -1 # constraint is directed inwards
spherocylinder_shape = espressomd.shapes.SpheroCylinder(
center=3 * [self.box_l / 2.0],
axis=[0, 0, 1],
direction=interaction_dir,
radius=self.box_l / 2.0,
length=self.box_l + 5) # +5 in order to have no top or bottom
penetrability = False # impenetrable
outer_cylinder_constraint = espressomd.constraints.ShapeBasedConstraint(
shape=spherocylinder_shape, particle_type=1, penetrable=penetrability)
system.constraints.add(outer_cylinder_constraint)
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=1.0, sigma=1.0, cutoff=2.0, shift=0)
system.integrator.run(0) # update forces
self.assertAlmostEqual(outer_cylinder_constraint.min_dist(), 1.02)
# test summed forces on cylinder wall
self.assertAlmostEqual(
-1.0 * outer_cylinder_constraint.total_force()[1],
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.0,
sig=1.0,
r=1.02),
places=10) # minus for Newton's third law
# check whether total_summed_outer_normal_force is correct
y_part2 = self.box_l - 1.02
system.part.add(
id=1, pos=[self.box_l / 2.0, y_part2, self.box_l / 2.0], type=0)
system.integrator.run(0)
dist_part2 = self.box_l - y_part2
self.assertAlmostEqual(outer_cylinder_constraint.total_force()[2], 0.0)
self.assertAlmostEqual(outer_cylinder_constraint.total_normal_force(),
2 * tests_common.lj_force(
espressomd, cutoff=2.0, offset=0.,
eps=1.0, sig=1.0, r=dist_part2))
# Reset
system.part.clear()
system.constraints.clear()
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=0.0, sigma=0.0, cutoff=0.0, shift=0)
# (2) finite spherocylinder
system.part.clear()
interaction_dir = -1 # constraint is directed inwards
spherocylinder_shape = espressomd.shapes.SpheroCylinder(
center=3 * [self.box_l / 2.0],
axis=[0, 1, 0],
direction=interaction_dir,
radius=10.0,
length=6.0)
penetrability = True # penetrable
inner_cylinder_constraint = espressomd.constraints.ShapeBasedConstraint(
shape=spherocylinder_shape, particle_type=1, penetrable=penetrability)
system.constraints.add(inner_cylinder_constraint)
# V(r) = r
system.non_bonded_inter[0, 1].generic_lennard_jones.set_params(
epsilon=1., sigma=1., cutoff=10., shift=0., offset=0., e1=-1, e2=0, b1=1., b2=0.)
# check hemispherical caps (multiple distances from surface)
N = 10
radii = np.linspace(1., 10., 10)
system.part.add(pos=[0., 0., 0.], type=0)
for i in range(6):
for j in range(N):
theta = 2. * i / float(N) * np.pi
v = j / float(N - 1) * 2. - 1
for r in radii:
pos = self.pos_on_surface(theta, v, r, r, r) + [0, 3, 0]
system.part[0].pos = pos
system.integrator.run(recalc_forces=True, steps=0)
energy = system.analysis.energy()
self.assertAlmostEqual(energy["total"], 10. - r)
# Reset
system.non_bonded_inter[0, 1].generic_lennard_jones.set_params(
epsilon=0., sigma=0., cutoff=0., shift=0., offset=0., e1=0, e2=0, b1=0., b2=0.)
def test_wall_forces(self):
"""Tests if shape based constraints can be added to a system both by
(1) defining a constraint object which is then added
(2) and via keyword arguments.
Checks that wall constraints with LJ interactions exert forces
on a test particle (that is, the constraints do what they should).
"""
system = self.system
system.time_step = 0.01
system.part.add(id=0, pos=[5., 1.21, 0.83], type=0)
# Check forces are initialized to zero
f_part = system.part[0].f
self.assertEqual(f_part[0], 0.)
self.assertEqual(f_part[1], 0.)
self.assertEqual(f_part[2], 0.)
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=1.0, sigma=1.0, cutoff=2.0, shift=0)
system.non_bonded_inter[0, 2].lennard_jones.set_params(
epsilon=1.5, sigma=1.0, cutoff=2.0, shift=0)
shape_xz = espressomd.shapes.Wall(normal=[0., 1., 0.], dist=0.)
shape_xy = espressomd.shapes.Wall(normal=[0., 0., 1.], dist=0.)
# (1)
constraint_xz = espressomd.constraints.ShapeBasedConstraint(
shape=shape_xz, particle_type=1)
wall_xz = system.constraints.add(constraint_xz)
# (2)
wall_xy = system.constraints.add(shape=shape_xy, particle_type=2)
system.integrator.run(0) # update forces
f_part = system.part[0].f
self.assertEqual(f_part[0], 0.)
self.assertAlmostEqual(
f_part[1],
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.0,
sig=1.0,
r=1.21),
places=10)
self.assertAlmostEqual(
f_part[2],
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.5,
sig=1.0,
r=0.83),
places=10)
# test summed forces on walls
self.assertAlmostEqual(
-1.0 * wall_xz.total_force()[1],
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.0,
sig=1.0,
r=1.21),
places=10) # minus for Newton's third law
self.assertAlmostEqual(
-1.0 * wall_xy.total_force()[2],
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.5,
sig=1.0,
r=0.83),
places=10)
# check whether total_normal_force is correct
self.assertAlmostEqual(
wall_xy.total_normal_force(),
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.5,
sig=1.0,
r=0.83),
places=10)
# this one is closer and should get the mindist()
system.part.add(pos=[5., 1.20, 0.82], type=0)
self.assertAlmostEqual(constraint_xz.min_dist(), system.part[1].pos[1])
self.assertAlmostEqual(wall_xz.min_dist(), system.part[1].pos[1])
self.assertAlmostEqual(wall_xy.min_dist(), system.part[1].pos[2])
# Reset
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=0.0, sigma=0.0, cutoff=0.0, shift=0)
system.non_bonded_inter[0, 2].lennard_jones.set_params(
epsilon=0.0, sigma=0.0, cutoff=0.0, shift=0)
def test_slitpore(self):
"""Checks that slitpore constraints with LJ interactions exert forces
on a test particle (that is, the constraints do what they should).
"""
system = self.system
system.time_step = 0.01
system.cell_system.skin = 0.4
# check force calculation of slitpore constraint
slitpore_shape = espressomd.shapes.Slitpore(
channel_width=5,
lower_smoothing_radius=2,
upper_smoothing_radius=3,
pore_length=15,
pore_mouth=20,
pore_width=10,
dividing_plane=self.box_l / 2)
slitpore_constraint = espressomd.constraints.ShapeBasedConstraint(
shape=slitpore_shape, particle_type=1, penetrable=True)
system.constraints.add(slitpore_constraint)
# V(r) = r
system.non_bonded_inter[0, 1].generic_lennard_jones.set_params(
epsilon=1., sigma=1., cutoff=10., shift=0., offset=0., e1=-1, e2=0, b1=1., b2=0.)
system.part.add(pos=[0., 0., 0.], type=0)
x = self.box_l / 2.0
d = 1 - np.sqrt(2) / 2
parameters = [
([x, x, 1.], -4., [0., 0., -1.]), # outside channel
([x, x, 15.], 5., [-1., 0., 0.]), # inside channel
([x, x, 5.], 0., [0., 0., 0.]), # on channel bottom surface
([x - 5., x, 15.], 0., [0., 0., 0.]), # on channel side surface
([x + 5., x, 15.], 0., [0., 0., 0.]), # on channel side surface
([x - 5. + 2 * d, x, 5. + 2 * d], 0., [0., 0., 0.]), # lower circle
([x + 5. - 2 * d, x, 5. + 2 * d], 0., [0., 0., 0.]), # lower circle
([x - 5. - 3 * d, x, 20. - 3 * d], 0., [0., 0., 0.]), # upper circle
([x + 5. + 3 * d, x, 20. - 3 * d], 0., [0., 0., 0.]), # upper circle
([1., x, 20.], 0., [0., 0., 0.]), # on inner wall surface
([x, x, 25.], 0., [0., 0., 0.]), # on outer wall surface
([x, x, 27.], -2., [0., 0., 1.]), # outside wall
]
for pos, ref_mindist, ref_force in parameters:
system.part[0].pos = pos
system.integrator.run(recalc_forces=True, steps=0)
obs_mindist = slitpore_constraint.min_dist()
self.assertAlmostEqual(obs_mindist, ref_mindist, places=10)
if (ref_mindist == 0. and obs_mindist != 0.):
# force direction on a circle is not well-defined due to
# numerical instability
continue
np.testing.assert_almost_equal(
np.copy(slitpore_constraint.total_force()), ref_force, 10)
# Reset
system.non_bonded_inter[0, 1].generic_lennard_jones.set_params(
epsilon=0., sigma=0., cutoff=0., shift=0., offset=0., e1=0, e2=0, b1=0., b2=0.)
def test_rhomboid(self):
"""Checks that rhomboid constraints with LJ interactions exert forces
on a test particle (that is, the constraints do what they should)
using the geometrical parameters of (1) a cuboid and (2) a rhomboid.
"""
system = self.system
system.time_step = 0.01
system.cell_system.skin = 0.4
# check force calculation of rhomboid constraint
# (1) using a cuboid
interaction_dir = +1 # constraint is directed outwards
length = np.array([-5.0, 6.0, 7.0]) # dimension of the cuboid
corner = np.array(3 * [self.box_l / 2.0])
rhomboid_shape = espressomd.shapes.Rhomboid(
corner=corner,
a=[length[0], 0.0, 0.0], # cube
b=[0.0, length[1], 0.0],
c=[0.0, 0.0, length[2]],
direction=interaction_dir
)
penetrability = False # impenetrable
rhomboid_constraint = espressomd.constraints.ShapeBasedConstraint(
shape=rhomboid_shape, particle_type=1, penetrable=penetrability)
rhomboid_constraint = system.constraints.add(rhomboid_constraint)
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=1.0, sigma=1.0, cutoff=2.0, shift=0)
system.part.add(id=0, pos=[self.box_l / 2.0 + length[0] / 2.0,
self.box_l / 2.0 + length[1] / 2.0,
self.box_l / 2.0 - 1], type=0)
system.integrator.run(0) # update forces
f_part = system.part[0].f
self.assertEqual(rhomboid_constraint.min_dist(), 1.)
self.assertEqual(f_part[0], 0.)
self.assertEqual(f_part[1], 0.)
self.assertAlmostEqual(
-f_part[2],
tests_common.lj_force(
espressomd,
cutoff=2.,
offset=0.,
eps=1.,
sig=1.,
r=1.),
places=10)
self.assertAlmostEqual(
rhomboid_constraint.total_normal_force(),
tests_common.lj_force(
espressomd,
cutoff=2.,
offset=0.,
eps=1.,
sig=1.,
r=1.),
places=10)
x_range = 12
y_range = 12
z_range = 12
for x in range(x_range):
for y in range(y_range):
for z in range(z_range):
pos = np.array(
[x + (self.box_l + length[0] - x_range) / 2.0,
y + (self.box_l + length[1] - y_range) / 2.0,
z + (self.box_l + length[2] - z_range) / 2.0])
shape_dist, shape_dist_vec = rhomboid_shape.calc_distance(
position=pos.tolist())
outside = False
edge_case = False
dist_vec = np.array([0.0, 0.0, 0.0])
# check if outside or inside
if(pos[0] < (self.box_l + length[0] - abs(length[0])) / 2.0 or
pos[0] > (self.box_l + length[0] + abs(length[0])) / 2.0 or
pos[1] < (self.box_l + length[1] - abs(length[1])) / 2.0 or
pos[1] > (self.box_l + length[1] + abs(length[1])) / 2.0 or
pos[2] < (self.box_l + length[2] - abs(length[2])) / 2.0 or
pos[2] > (self.box_l + length[2] + abs(length[2])) / 2.0):
outside = True
if outside:
for i in range(3):
if pos[i] < (self.box_l + length[i] -
abs(length[i])) / 2.0:
dist_vec[i] = pos[i] - (
self.box_l + length[i] - abs(length[i])) / 2.0
elif pos[i] > (self.box_l + length[i] + abs(length[i])) / 2.0:
dist_vec[i] = pos[i] - (
self.box_l + length[i] + abs(length[i])) / 2.0
else:
dist_vec[i] = 0.0
dist = np.linalg.norm(dist_vec)
else:
dist = self.box_l
c1 = pos - corner
c2 = corner + length - pos
abs_c1c2 = np.abs(np.concatenate((c1, c2)))
dist = np.amin(abs_c1c2)
where = np.argwhere(dist == abs_c1c2)
if len(where) > 1:
edge_case = True
for which in where:
if which < 3:
dist_vec[which] = dist * np.sign(c1[which])
else:
dist_vec[which - 3] = -dist * \
np.sign(c2[which - 3])
dist *= -interaction_dir
if edge_case:
for i in range(3):
if shape_dist_vec[i] != 0.0:
self.assertAlmostEqual(
abs(shape_dist_vec[i]), abs(dist_vec[i]))
else:
self.assertAlmostEqual(shape_dist_vec[0], dist_vec[0])
self.assertAlmostEqual(shape_dist_vec[1], dist_vec[1])
self.assertAlmostEqual(shape_dist_vec[2], dist_vec[2])
self.assertAlmostEqual(shape_dist, dist)
# (2) using a rhomboid
rhomboid_shape.a = [5., 5., 0.] # rhomboid
rhomboid_shape.b = [0., 0., 5.]
rhomboid_shape.c = [0., 5., 0.]
system.part[0].pos = [self.box_l / 2.0 + 2.5,
self.box_l / 2.0 + 2.5,
self.box_l / 2.0 - 1]
system.integrator.run(0) # update forces
self.assertEqual(rhomboid_constraint.min_dist(), 1.)
self.assertAlmostEqual(
rhomboid_constraint.total_normal_force(),
tests_common.lj_force(
espressomd,
cutoff=2.,
offset=0.,
eps=1.,
sig=1.,
r=1.),
places=10)
system.part[0].pos = system.part[0].pos - [0., 1., 0.]
system.integrator.run(0) # update forces
self.assertAlmostEqual(
rhomboid_constraint.min_dist(), 1.2247448714, 10)
self.assertAlmostEqual(
rhomboid_constraint.total_normal_force(),
tests_common.lj_force(
espressomd,
cutoff=2.,
offset=0.,
eps=1.,
sig=1.,
r=1.2247448714),
places=10)
# Reset
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=0.0, sigma=0.0, cutoff=0.0, shift=0)
def test_torus(self):
"""Checks that torus constraints with LJ interactions exert forces
on a test particle (that is, the constraints do what they should).
"""
system = self.system
system.time_step = 0.01
system.cell_system.skin = 0.4
interaction_dir = 1 # constraint is directed inwards
radius = self.box_l / 4.0
tube_radius = self.box_l / 6.0
part_offset = 1.2
system.part.add(
id=0, pos=[self.box_l / 2.0, self.box_l / 2.0 + part_offset, self.box_l / 2.0], type=0)
# check force calculation of cylinder constraint
torus_shape = espressomd.shapes.Torus(
center=3 * [self.box_l / 2.0],
normal=[0, 0, 1],
direction=interaction_dir,
radius=radius,
tube_radius=tube_radius)
penetrability = False # impenetrable
torus_constraint = espressomd.constraints.ShapeBasedConstraint(
shape=torus_shape, particle_type=1, penetrable=penetrability)
torus_wall = system.constraints.add(torus_constraint)
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=1.0, sigma=1.0, cutoff=2.0, shift=0)
system.integrator.run(0) # update forces
self.assertAlmostEqual(torus_constraint.min_dist(),
radius - tube_radius - part_offset)
# test summed forces on torus wall
self.assertAlmostEqual(
torus_wall.total_force()[1],
tests_common.lj_force(
espressomd,
cutoff=2.0,
offset=0.,
eps=1.0,
sig=1.0,
r=torus_constraint.min_dist()),
places=10)
# check whether total_summed_outer_normal_force is correct
y_part2 = self.box_l / 2.0 + 2.0 * radius - part_offset
system.part.add(
id=1, pos=[self.box_l / 2.0, y_part2, self.box_l / 2.0], type=0)
system.integrator.run(0)
self.assertAlmostEqual(torus_wall.total_force()[1], 0.0)
self.assertAlmostEqual(torus_wall.total_normal_force(), 2 * tests_common.lj_force(
espressomd, cutoff=2.0, offset=0., eps=1.0, sig=1.0,
r=radius - tube_radius - part_offset))
# Test the geometry of the shape directly
phi_steps = 11
theta_steps = 11
center = np.array([self.box_l / 2.0,
self.box_l / 2.0,
self.box_l / 2.0])
tube_center = np.array([self.box_l / 2.0,
self.box_l / 2.0 + radius,
self.box_l / 2.0])
for distance in {1.02, -0.7}:
start_point = np.array([self.box_l / 2.0,
self.box_l / 2.0 + radius -
tube_radius - distance,
self.box_l / 2.0])
for phi in range(phi_steps):
for theta in range(theta_steps):
# Rotation around the tube
theta_angle = theta / theta_steps * 2.0 * math.pi
theta_rot_matrix = np.array(
[[1.0, 0.0, 0.0],
[0.0, math.cos(theta_angle), -math.sin(theta_angle)],
[0.0, math.sin(theta_angle), math.cos(theta_angle)]])
theta_rot_point = np.dot(
theta_rot_matrix,
start_point - tube_center)
theta_rot_point += tube_center
# Rotation around the center of the torus
phi_angle = phi / phi_steps * 2.0 * math.pi
phi_rot_matrix = np.array(
[[math.cos(phi_angle), -math.sin(phi_angle), 0.0],
[math.sin(phi_angle), math.cos(phi_angle), 0.0],
[0.0, 0.0, 1.0]])
phi_rot_point = np.dot(
phi_rot_matrix,
theta_rot_point - center) + center
shape_dist, _ = torus_shape.calc_distance(
position=phi_rot_point.tolist())
self.assertAlmostEqual(shape_dist, distance)
# Reset
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=0.0, sigma=0.0, cutoff=0.0, shift=0)
if __name__ == "__main__":
ut.main()<|fim▁end|> | """
system = self.system
system.time_step = 0.01 |
<|file_name|>Animal2.java<|end_file_name|><|fim▁begin|>package cn.bjsxt.oop.inherit;
/**
* 测试组合
* @author dell
*
*/
public class Animal2 {
String eye;
<|fim▁hole|> public void run(){
System.out.println("跑跑!");
}
public void eat(){
System.out.println("吃吃!");
}
public void sleep(){
System.out.println("zzzzz");
}
public Animal2(){
super();
System.out.println("创建一个动物!");
}
public static void main(String[] args) {
Bird2 b = new Bird2();
b.run();
b.animal2.eat();
}
}
class Mammal2 {
Animal2 animal2=new Animal2(); //所谓组合,就是将父类的animal2作为属性防在这里
public void taisheng(){
System.out.println("我是胎生");
}
}
class Bird2 {
Animal2 animal2=new Animal2(); //所谓组合,就是将父类的animal2作为属性防在这里
public void run(){
animal2.run();
System.out.println("我是一个小小小小鸟,飞呀飞不高");
}
public void eggSheng(){
System.out.println("卵生");
}
public Bird2(){
super();
System.out.println("建一个鸟对象");
}
}<|fim▁end|> | |
<|file_name|>test_LEGACY_post_update.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
from insights.client.phase.v1 import post_update
from mock.mock import patch
from pytest import raises
def patch_insights_config(old_function):
patcher = patch("insights.client.phase.v1.InsightsConfig",
**{"return_value.load_all.return_value.status": False,
"return_value.load_all.return_value.unregister": False,
"return_value.load_all.return_value.offline": False,
"return_value.load_all.return_value.enable_schedule": False,
"return_value.load_all.return_value.disable_schedule": False,
"return_value.load_all.return_value.analyze_container": False,
"return_value.load_all.return_value.display_name": False,
"return_value.load_all.return_value.register": False,
"return_value.load_all.return_value.diagnosis": None,
"return_value.load_all.return_value.list_specs": False,
"return_value.load_all.return_value.show_results": False,
"return_value.load_all.return_value.check_results": False,
"return_value.load_all.return_value.no_upload": False,
"return_value.load_all.return_value.core_collect": False})
return patcher(old_function)
@patch("insights.client.phase.v1.InsightsClient")
@patch_insights_config
def test_post_update_legacy_upload_off(insights_config, insights_client):
"""
Registration is not called when platform upload
"""
insights_config.return_value.load_all.return_value.legacy_upload = False
try:
post_update()
except SystemExit:
pass
insights_client.return_value.register.assert_not_called()
insights_client.return_value.get_machine_id.assert_called_once()
@patch("insights.client.phase.v1.InsightsClient")
@patch_insights_config
def test_post_update_legacy_upload_on(insights_config, insights_client):
"""
Registration is processed in legacy_upload=True
"""
insights_config.return_value.load_all.return_value.legacy_upload = True
try:
post_update()
except SystemExit:
pass
insights_client.return_value.register.assert_called_once()
insights_client.return_value.get_machine_id.assert_called_once()<|fim▁hole|>@patch("insights.client.phase.v1.InsightsClient")
@patch_insights_config
# @patch("insights.client.phase.v1.InsightsClient")
def test_exit_ok(insights_config, insights_client):
"""
Support collection replaces the normal client run.
"""
with raises(SystemExit) as exc_info:
post_update()
assert exc_info.value.code == 0
@patch("insights.client.phase.v1.InsightsClient")
@patch_insights_config
def test_post_update_no_upload(insights_config, insights_client):
"""
No-upload short circuits this phase
"""
insights_config.return_value.load_all.return_value.no_upload = True
try:
post_update()
except SystemExit:
pass
insights_client.return_value.register.assert_not_called()
insights_client.return_value.get_machine_id.assert_called_once()<|fim▁end|> | |
<|file_name|>chat.js<|end_file_name|><|fim▁begin|>var Chat = function(socket) {
this.socket = socket;
};
// function to send chat messages<|fim▁hole|> text: text
};
this.socket.emit('message', message);
};
// function to change rooms
Chat.prototype.changeRoom = function(room) {
this.socket.emit('join', {
newRoom: room
});
};
// processing chat commands
Chat.prototype.processCommand = function(command) {
var words = command.split(' ');
var command = words[0]
.substring(1, words[0].length)
.toLowerCase(); // parse command from first word
var message = false;
switch(command) {
case 'join':
words.shift();
var room = words.join(' ');
this.changeRoom(room); // handle room changing/creating
break;
case 'nick':
words.shift();
var name = words.join(' ');
this.socket.emit('nameAttempt', name); // handle name-change attempts
break;
default:
message = 'Unrecognized command.'; // return error message if command isn't Unrecognized
break;
}
return message;
};<|fim▁end|> | Chat.prototype.sendMessage = function(room, text) {
var message = {
room: room, |
<|file_name|>test_section_topics.py<|end_file_name|><|fim▁begin|># Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from pkg_resources import resource_filename
<|fim▁hole|>import marv_node.testing
from marv_node.testing import make_dataset, run_nodes, temporary_directory
from marv_robotics.detail import connections_section as node
from marv_store import Store
class TestCase(marv_node.testing.TestCase):
# TODO: Generate bags instead, but with connection info!
BAGS = [
resource_filename('marv_node.testing._robotics_tests', 'data/test_0.bag'),
resource_filename('marv_node.testing._robotics_tests', 'data/test_1.bag'),
]
async def test_node(self):
with temporary_directory() as storedir:
store = Store(storedir, {})
dataset = make_dataset(self.BAGS)
store.add_dataset(dataset)
streams = await run_nodes(dataset, [node], store)
self.assertNodeOutput(streams[0], node)
# TODO: test also header<|fim▁end|> | |
<|file_name|>_showticklabels.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class ShowticklabelsValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="showticklabels", parent_name="heatmap.colorbar", **kwargs
):
super(ShowticklabelsValidator, self).__init__(<|fim▁hole|> **kwargs
)<|fim▁end|> | plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"), |
<|file_name|>404.component.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core';
@Component({
selector: 'not-found',
templateUrl: 'app/404.component/404.component.html',<|fim▁hole|> styleUrls: ['app/404.component/404.component.css'],
})
export class NotFoundComponent {}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React, { PropTypes } from 'react';
class Link extends React.Component {
render() {
return <article key={this.props.item.id} className="List-Item">
<header className="List-Item-Header">
<cite className="List-Item-Title"><a href={this.props.item.href}>{this.props.item.title}</a></cite>
</header>
<p className="List-Item-Description List-Item-Description--Short">{this.props.item.short_description}</p>
</article>
}
}
<|fim▁hole|><|fim▁end|> | export default Link; |
<|file_name|>summarize_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# summarize_tests.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
# Invoke this script as
#
# python3 parse_test_output.py <path to test output>.xml<|fim▁hole|>
import junitparser as jp
import glob
import os
import sys
import xml
assert int(jp.version.split('.')[0]) >= 2, 'junitparser version must be >= 2'
def parse_result_file(fname):
results = jp.JUnitXml.fromfile(fname)
if isinstance(results, jp.junitparser.JUnitXml):
# special case for pytest, which wraps all once more
suites = list(results)
assert len(suites) == 1, "JUnit XML files may only contain results from a single testsuite."
results = suites[0]
assert all(len(case.result) == 1 for case in results if case.result), 'Case result has unexpected length > 1'
failed_tests = ['.'.join((case.classname, case.name)) for case in results
if case.result and not isinstance(case.result[0], jp.junitparser.Skipped)]
return {'Tests': results.tests,
'Skipped': results.skipped,
'Failures': results.failures,
'Errors': results.errors,
'Time': results.time,
'Failed tests': failed_tests}
if __name__ == '__main__':
assert len(sys.argv) == 2, 'summarize_tests must be called with TEST_OUTDIR.'
test_outdir = sys.argv[1]
results = {}
totals = {'Tests': 0, 'Skipped': 0,
'Failures': 0, 'Errors': 0,
'Time': 0, 'Failed tests': []}
for pfile in sorted(glob.glob(os.path.join(test_outdir, '*.xml'))):
ph_name = os.path.splitext(os.path.split(pfile)[1])[0].replace('_', ' ')
ph_res = parse_result_file(pfile)
results[ph_name] = ph_res
for k, v in ph_res.items():
totals[k] += v
cols = ['Tests', 'Skipped', 'Failures', 'Errors', 'Time']
tline = '-' * (len(cols) * 10 + 20)
print()
print()
print(tline)
print('NEST Testsuite Results')
print(tline)
print('{:<20s}'.format('Phase'), end='')
for c in cols:
print('{:>10s}'.format(c), end='')
print()
print(tline)
for pn, pr in results.items():
print('{:<20s}'.format(pn), end='')
for c in cols:
fstr = '{:10.1f}' if c == 'Time' else '{:10d}'
print(fstr.format(pr[c]), end='')
print()
print(tline)
print('{:<20s}'.format('Total'), end='')
for c in cols:
fstr = '{:10.1f}' if c == 'Time' else '{:10d}'
print(fstr.format(totals[c]), end='')
print()
print(tline)
print()
if totals['Failures'] + totals['Errors'] > 0:
print('THE NEST TESTSUITE DISCOVERED PROBLEMS')
print(' The following tests failed')
for t in totals['Failed tests']:
print(f' | {t}') # | marks line for parsing
print()
print(' Please report test failures by creating an issue at')
print(' https://github.com/nest/nest_simulator/issues')
print()
print(tline)
print()
sys.exit(1)
else:
print('The NEST Testsuite passed successfully.')
print()
print(tline)
print()<|fim▁end|> | #
# It will print on a single line
#
# <No of tests run> <No of skipped tests> <No of failed tests> <No of errored tests> <List of unsuccessful tests> |
<|file_name|>test_io.py<|end_file_name|><|fim▁begin|>from __future__ import division, absolute_import, print_function
import sys
import gzip
import os
import threading
from tempfile import mkstemp, NamedTemporaryFile
import time
import warnings
import gc
from io import BytesIO
from datetime import datetime
import numpy as np
import numpy.ma as ma
from numpy.lib._iotools import (ConverterError, ConverterLockError,
ConversionWarning)
from numpy.compat import asbytes, asbytes_nested, bytes, asstr
from nose import SkipTest
from numpy.ma.testutils import (
TestCase, assert_equal, assert_array_equal,
assert_raises, assert_raises_regex, run_module_suite
)
from numpy.testing import assert_warns, assert_, build_err_msg
from numpy.testing.utils import tempdir
class TextIO(BytesIO):
"""Helper IO class.
Writes encode strings to bytes if needed, reads return bytes.
This makes it easier to emulate files opened in binary mode
without needing to explicitly convert strings to bytes in
setting up the test data.
"""
def __init__(self, s=""):
BytesIO.__init__(self, asbytes(s))
def write(self, s):
BytesIO.write(self, asbytes(s))
def writelines(self, lines):
BytesIO.writelines(self, [asbytes(s) for s in lines])
MAJVER, MINVER = sys.version_info[:2]
IS_64BIT = sys.maxsize > 2**32
def strptime(s, fmt=None):
"""This function is available in the datetime module only
from Python >= 2.5.
"""
if sys.version_info[0] >= 3:
return datetime(*time.strptime(s.decode('latin1'), fmt)[:3])
else:
return datetime(*time.strptime(s, fmt)[:3])
class RoundtripTest(object):
def roundtrip(self, save_func, *args, **kwargs):
"""
save_func : callable
Function used to save arrays to file.
file_on_disk : bool
If true, store the file on disk, instead of in a
string buffer.
save_kwds : dict
Parameters passed to `save_func`.
load_kwds : dict
Parameters passed to `numpy.load`.
args : tuple of arrays
Arrays stored to file.
"""
save_kwds = kwargs.get('save_kwds', {})
load_kwds = kwargs.get('load_kwds', {})
file_on_disk = kwargs.get('file_on_disk', False)
if file_on_disk:
target_file = NamedTemporaryFile(delete=False)
load_file = target_file.name
else:
target_file = BytesIO()
load_file = target_file
try:
arr = args
save_func(target_file, *arr, **save_kwds)
target_file.flush()
target_file.seek(0)
if sys.platform == 'win32' and not isinstance(target_file, BytesIO):
target_file.close()
arr_reloaded = np.load(load_file, **load_kwds)
self.arr = arr
self.arr_reloaded = arr_reloaded
finally:
if not isinstance(target_file, BytesIO):
target_file.close()
# holds an open file descriptor so it can't be deleted on win
if not isinstance(arr_reloaded, np.lib.npyio.NpzFile):
os.remove(target_file.name)
def check_roundtrips(self, a):
self.roundtrip(a)
self.roundtrip(a, file_on_disk=True)
self.roundtrip(np.asfortranarray(a))
self.roundtrip(np.asfortranarray(a), file_on_disk=True)
if a.shape[0] > 1:
# neither C nor Fortran contiguous for 2D arrays or more
self.roundtrip(np.asfortranarray(a)[1:])
self.roundtrip(np.asfortranarray(a)[1:], file_on_disk=True)
def test_array(self):
a = np.array([], float)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], float)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], int)
self.check_roundtrips(a)
a = np.array([[1 + 5j, 2 + 6j], [3 + 7j, 4 + 8j]], dtype=np.csingle)
self.check_roundtrips(a)
a = np.array([[1 + 5j, 2 + 6j], [3 + 7j, 4 + 8j]], dtype=np.cdouble)
self.check_roundtrips(a)
def test_array_object(self):
if sys.version_info[:2] >= (2, 7):
a = np.array([], object)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], object)
self.check_roundtrips(a)
# Fails with UnpicklingError: could not find MARK on Python 2.6
def test_1D(self):
a = np.array([1, 2, 3, 4], int)
self.roundtrip(a)
@np.testing.dec.knownfailureif(sys.platform == 'win32', "Fail on Win32")
def test_mmap(self):
a = np.array([[1, 2.5], [4, 7.3]])
self.roundtrip(a, file_on_disk=True, load_kwds={'mmap_mode': 'r'})
a = np.asfortranarray([[1, 2.5], [4, 7.3]])
self.roundtrip(a, file_on_disk=True, load_kwds={'mmap_mode': 'r'})
def test_record(self):
a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
self.check_roundtrips(a)
def test_format_2_0(self):
dt = [(("%d" % i) * 100, float) for i in range(500)]
a = np.ones(1000, dtype=dt)
with warnings.catch_warnings(record=True):
warnings.filterwarnings('always', '', UserWarning)
self.check_roundtrips(a)
class TestSaveLoad(RoundtripTest, TestCase):
def roundtrip(self, *args, **kwargs):
RoundtripTest.roundtrip(self, np.save, *args, **kwargs)
assert_equal(self.arr[0], self.arr_reloaded)
assert_equal(self.arr[0].dtype, self.arr_reloaded.dtype)
assert_equal(self.arr[0].flags.fnc, self.arr_reloaded.flags.fnc)
class TestSavezLoad(RoundtripTest, TestCase):
def roundtrip(self, *args, **kwargs):
RoundtripTest.roundtrip(self, np.savez, *args, **kwargs)
try:
for n, arr in enumerate(self.arr):
reloaded = self.arr_reloaded['arr_%d' % n]
assert_equal(arr, reloaded)
assert_equal(arr.dtype, reloaded.dtype)
assert_equal(arr.flags.fnc, reloaded.flags.fnc)
finally:
# delete tempfile, must be done here on windows
if self.arr_reloaded.fid:
self.arr_reloaded.fid.close()
os.remove(self.arr_reloaded.fid.name)
@np.testing.dec.skipif(not IS_64BIT, "Works only with 64bit systems")
@np.testing.dec.slow
def test_big_arrays(self):
L = (1 << 31) + 100000
a = np.empty(L, dtype=np.uint8)
with tempdir(prefix="numpy_test_big_arrays_") as tmpdir:
tmp = os.path.join(tmpdir, "file.npz")
np.savez(tmp, a=a)
del a
npfile = np.load(tmp)
a = npfile['a']
npfile.close()
def test_multiple_arrays(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
self.roundtrip(a, b)
def test_named_arrays(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
c = BytesIO()
np.savez(c, file_a=a, file_b=b)
c.seek(0)
l = np.load(c)
assert_equal(a, l['file_a'])
assert_equal(b, l['file_b'])
def test_BagObj(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
c = BytesIO()
np.savez(c, file_a=a, file_b=b)
c.seek(0)
l = np.load(c)
assert_equal(sorted(dir(l.f)), ['file_a','file_b'])
assert_equal(a, l.f.file_a)
assert_equal(b, l.f.file_b)
def test_savez_filename_clashes(self):
# Test that issue #852 is fixed
# and savez functions in multithreaded environment
def writer(error_list):
fd, tmp = mkstemp(suffix='.npz')
os.close(fd)
try:
arr = np.random.randn(500, 500)
try:
np.savez(tmp, arr=arr)
except OSError as err:
error_list.append(err)
finally:
os.remove(tmp)
errors = []
threads = [threading.Thread(target=writer, args=(errors,))
for j in range(3)]
for t in threads:
t.start()
for t in threads:
t.join()
if errors:
raise AssertionError(errors)
def test_not_closing_opened_fid(self):
# Test that issue #2178 is fixed:
# verify could seek on 'loaded' file
fd, tmp = mkstemp(suffix='.npz')
os.close(fd)
try:
fp = open(tmp, 'wb')
np.savez(fp, data='LOVELY LOAD')
fp.close()
fp = open(tmp, 'rb', 10000)
fp.seek(0)
assert_(not fp.closed)
_ = np.load(fp)['data']
assert_(not fp.closed)
# must not get closed by .load(opened fp)
fp.seek(0)
assert_(not fp.closed)
finally:
fp.close()
os.remove(tmp)
def test_closing_fid(self):
# Test that issue #1517 (too many opened files) remains closed
# It might be a "weak" test since failed to get triggered on
# e.g. Debian sid of 2012 Jul 05 but was reported to
# trigger the failure on Ubuntu 10.04:
# http://projects.scipy.org/numpy/ticket/1517#comment:2
fd, tmp = mkstemp(suffix='.npz')
os.close(fd)
try:
fp = open(tmp, 'wb')
np.savez(fp, data='LOVELY LOAD')
fp.close()
# We need to check if the garbage collector can properly close
# numpy npz file returned by np.load when their reference count
# goes to zero. Python 3 running in debug mode raises a
# ResourceWarning when file closing is left to the garbage
# collector, so we catch the warnings. Because ResourceWarning
# is unknown in Python < 3.x, we take the easy way out and
# catch all warnings.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for i in range(1, 1025):
try:
np.load(tmp)["data"]
except Exception as e:
msg = "Failed to load data from a file: %s" % e
raise AssertionError(msg)
finally:
os.remove(tmp)
def test_closing_zipfile_after_load(self):
# Check that zipfile owns file and can close it.
# This needs to pass a file name to load for the
# test.
with tempdir(prefix="numpy_test_closing_zipfile_after_load_") as tmpdir:
fd, tmp = mkstemp(suffix='.npz', dir=tmpdir)
os.close(fd)
np.savez(tmp, lab='place holder')
data = np.load(tmp)
fp = data.zip.fp
data.close()
assert_(fp.closed)
class TestSaveTxt(TestCase):
def test_array(self):
a = np.array([[1, 2], [3, 4]], float)
fmt = "%.18e"
c = BytesIO()
np.savetxt(c, a, fmt=fmt)
c.seek(0)
assert_equal(c.readlines(),
[asbytes((fmt + ' ' + fmt + '\n') % (1, 2)),
asbytes((fmt + ' ' + fmt + '\n') % (3, 4))])
a = np.array([[1, 2], [3, 4]], int)
c = BytesIO()
np.savetxt(c, a, fmt='%d')
c.seek(0)
assert_equal(c.readlines(), [b'1 2\n', b'3 4\n'])
def test_1D(self):
a = np.array([1, 2, 3, 4], int)
c = BytesIO()
np.savetxt(c, a, fmt='%d')
c.seek(0)
lines = c.readlines()
assert_equal(lines, [b'1\n', b'2\n', b'3\n', b'4\n'])
def test_record(self):
a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
c = BytesIO()
np.savetxt(c, a, fmt='%d')
c.seek(0)
assert_equal(c.readlines(), [b'1 2\n', b'3 4\n'])
def test_delimiter(self):
a = np.array([[1., 2.], [3., 4.]])
c = BytesIO()
np.savetxt(c, a, delimiter=',', fmt='%d')
c.seek(0)
assert_equal(c.readlines(), [b'1,2\n', b'3,4\n'])
def test_format(self):
a = np.array([(1, 2), (3, 4)])
c = BytesIO()
# Sequence of formats
np.savetxt(c, a, fmt=['%02d', '%3.1f'])
c.seek(0)
assert_equal(c.readlines(), [b'01 2.0\n', b'03 4.0\n'])
# A single multiformat string
c = BytesIO()
np.savetxt(c, a, fmt='%02d : %3.1f')
c.seek(0)
lines = c.readlines()
assert_equal(lines, [b'01 : 2.0\n', b'03 : 4.0\n'])
# Specify delimiter, should be overiden
c = BytesIO()
np.savetxt(c, a, fmt='%02d : %3.1f', delimiter=',')
c.seek(0)
lines = c.readlines()
assert_equal(lines, [b'01 : 2.0\n', b'03 : 4.0\n'])
# Bad fmt, should raise a ValueError
c = BytesIO()
assert_raises(ValueError, np.savetxt, c, a, fmt=99)
def test_header_footer(self):
"""
Test the functionality of the header and footer keyword argument.
"""
c = BytesIO()
a = np.array([(1, 2), (3, 4)], dtype=np.int)
test_header_footer = 'Test header / footer'
# Test the header keyword argument
np.savetxt(c, a, fmt='%1d', header=test_header_footer)
c.seek(0)
assert_equal(c.read(),
asbytes('# ' + test_header_footer + '\n1 2\n3 4\n'))
# Test the footer keyword argument
c = BytesIO()
np.savetxt(c, a, fmt='%1d', footer=test_header_footer)
c.seek(0)
assert_equal(c.read(),
asbytes('1 2\n3 4\n# ' + test_header_footer + '\n'))
# Test the commentstr keyword argument used on the header
c = BytesIO()
commentstr = '% '
np.savetxt(c, a, fmt='%1d',
header=test_header_footer, comments=commentstr)
c.seek(0)
assert_equal(c.read(),
asbytes(commentstr + test_header_footer + '\n' + '1 2\n3 4\n'))
# Test the commentstr keyword argument used on the footer
c = BytesIO()
commentstr = '% '
np.savetxt(c, a, fmt='%1d',
footer=test_header_footer, comments=commentstr)
c.seek(0)
assert_equal(c.read(),
asbytes('1 2\n3 4\n' + commentstr + test_header_footer + '\n'))
def test_file_roundtrip(self):
f, name = mkstemp()
os.close(f)
try:
a = np.array([(1, 2), (3, 4)])
np.savetxt(name, a)
b = np.loadtxt(name)
assert_array_equal(a, b)
finally:
os.unlink(name)
def test_complex_arrays(self):
ncols = 2
nrows = 2
a = np.zeros((ncols, nrows), dtype=np.complex128)
re = np.pi
im = np.e
a[:] = re + 1.0j * im
# One format only
c = BytesIO()
np.savetxt(c, a, fmt=' %+.3e')
c.seek(0)
lines = c.readlines()
assert_equal(
lines,
[b' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n',
b' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n'])
# One format for each real and imaginary part
c = BytesIO()
np.savetxt(c, a, fmt=' %+.3e' * 2 * ncols)
c.seek(0)
lines = c.readlines()
assert_equal(
lines,
[b' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n',
b' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n'])
# One format for each complex number
c = BytesIO()
np.savetxt(c, a, fmt=['(%.3e%+.3ej)'] * ncols)
c.seek(0)
lines = c.readlines()
assert_equal(
lines,
[b'(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n',
b'(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n'])
def test_custom_writer(self):
class CustomWriter(list):
def write(self, text):
self.extend(text.split(b'\n'))
w = CustomWriter()
a = np.array([(1, 2), (3, 4)])
np.savetxt(w, a)
b = np.loadtxt(w)
assert_array_equal(a, b)
class TestLoadTxt(TestCase):
def test_record(self):
c = TextIO()
c.write('1 2\n3 4')
c.seek(0)
x = np.loadtxt(c, dtype=[('x', np.int32), ('y', np.int32)])
a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
assert_array_equal(x, a)
d = TextIO()
d.write('M 64.0 75.0\nF 25.0 60.0')
d.seek(0)
mydescriptor = {'names': ('gender', 'age', 'weight'),
'formats': ('S1', 'i4', 'f4')}
b = np.array([('M', 64.0, 75.0),
('F', 25.0, 60.0)], dtype=mydescriptor)
y = np.loadtxt(d, dtype=mydescriptor)
assert_array_equal(y, b)
def test_array(self):
c = TextIO()
c.write('1 2\n3 4')
c.seek(0)
x = np.loadtxt(c, dtype=np.int)
a = np.array([[1, 2], [3, 4]], int)
assert_array_equal(x, a)
c.seek(0)
x = np.loadtxt(c, dtype=float)
a = np.array([[1, 2], [3, 4]], float)
assert_array_equal(x, a)
def test_1D(self):
c = TextIO()
c.write('1\n2\n3\n4\n')
c.seek(0)
x = np.loadtxt(c, dtype=int)
a = np.array([1, 2, 3, 4], int)
assert_array_equal(x, a)
c = TextIO()
c.write('1,2,3,4\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',')
a = np.array([1, 2, 3, 4], int)
assert_array_equal(x, a)
def test_missing(self):
c = TextIO()
c.write('1,2,3,,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)})
a = np.array([1, 2, 3, -999, 5], int)
assert_array_equal(x, a)
def test_converters_with_usecols(self):
c = TextIO()
c.write('1,2,3,,5\n6,7,8,9,10\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)},
usecols=(1, 3,))
a = np.array([[2, -999], [7, 9]], int)
assert_array_equal(x, a)
def test_comments(self):
c = TextIO()
c.write('# comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
comments='#')
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
def test_skiprows(self):
c = TextIO()
c.write('comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
skiprows=1)
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
c = TextIO()
c.write('# comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
skiprows=1)
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
def test_usecols(self):
a = np.array([[1, 2], [3, 4]], float)
c = BytesIO()
np.savetxt(c, a)
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=(1,))
assert_array_equal(x, a[:, 1])
a = np.array([[1, 2, 3], [3, 4, 5]], float)
c = BytesIO()
np.savetxt(c, a)
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=(1, 2))
assert_array_equal(x, a[:, 1:])
# Testing with arrays instead of tuples.
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=np.array([1, 2]))
assert_array_equal(x, a[:, 1:])
# Checking with dtypes defined converters.
data = '''JOE 70.1 25.3
BOB 60.5 27.9
'''
c = TextIO(data)
names = ['stid', 'temp']
dtypes = ['S4', 'f8']
arr = np.loadtxt(c, usecols=(0, 2), dtype=list(zip(names, dtypes)))
assert_equal(arr['stid'], [b"JOE", b"BOB"])
assert_equal(arr['temp'], [25.3, 27.9])
def test_fancy_dtype(self):
c = TextIO()
c.write('1,2,3.0\n4,5,6.0\n')
c.seek(0)
dt = np.dtype([('x', int), ('y', [('t', int), ('s', float)])])
x = np.loadtxt(c, dtype=dt, delimiter=',')
a = np.array([(1, (2, 3.0)), (4, (5, 6.0))], dt)
assert_array_equal(x, a)
def test_shaped_dtype(self):
c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6")
dt = np.dtype([('name', 'S4'), ('x', float), ('y', float),
('block', int, (2, 3))])
x = np.loadtxt(c, dtype=dt)
a = np.array([('aaaa', 1.0, 8.0, [[1, 2, 3], [4, 5, 6]])],
dtype=dt)
assert_array_equal(x, a)
def test_3d_shaped_dtype(self):
c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6 7 8 9 10 11 12")
dt = np.dtype([('name', 'S4'), ('x', float), ('y', float),
('block', int, (2, 2, 3))])
x = np.loadtxt(c, dtype=dt)
a = np.array([('aaaa', 1.0, 8.0,
[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]])],
dtype=dt)
assert_array_equal(x, a)
def test_empty_file(self):
with warnings.catch_warnings():
warnings.filterwarnings("ignore",
message="loadtxt: Empty input file:")
c = TextIO()
x = np.loadtxt(c)
assert_equal(x.shape, (0,))
x = np.loadtxt(c, dtype=np.int64)
assert_equal(x.shape, (0,))
assert_(x.dtype == np.int64)
def test_unused_converter(self):
c = TextIO()
c.writelines(['1 21\n', '3 42\n'])
c.seek(0)
data = np.loadtxt(c, usecols=(1,),
converters={0: lambda s: int(s, 16)})
assert_array_equal(data, [21, 42])
c.seek(0)
data = np.loadtxt(c, usecols=(1,),
converters={1: lambda s: int(s, 16)})
assert_array_equal(data, [33, 66])
def test_dtype_with_object(self):
"Test using an explicit dtype with an object"
from datetime import date
import time
data = """ 1; 2001-01-01
2; 2002-01-31 """
ndtype = [('idx', int), ('code', np.object)]
func = lambda s: strptime(s.strip(), "%Y-%m-%d")
converters = {1: func}
test = np.loadtxt(TextIO(data), delimiter=";", dtype=ndtype,
converters=converters)
control = np.array(
[(1, datetime(2001, 1, 1)), (2, datetime(2002, 1, 31))],
dtype=ndtype)
assert_equal(test, control)
def test_uint64_type(self):
tgt = (9223372043271415339, 9223372043271415853)
c = TextIO()
c.write("%s %s" % tgt)
c.seek(0)
res = np.loadtxt(c, dtype=np.uint64)
assert_equal(res, tgt)
def test_int64_type(self):
tgt = (-9223372036854775807, 9223372036854775807)
c = TextIO()
c.write("%s %s" % tgt)
c.seek(0)
res = np.loadtxt(c, dtype=np.int64)
assert_equal(res, tgt)
def test_universal_newline(self):
f, name = mkstemp()
os.write(f, b'1 21\r3 42\r')
os.close(f)
try:
data = np.loadtxt(name)
assert_array_equal(data, [[1, 21], [3, 42]])
finally:
os.unlink(name)
def test_empty_field_after_tab(self):
c = TextIO()
c.write('1 \t2 \t3\tstart \n4\t5\t6\t \n7\t8\t9.5\t')
c.seek(0)
dt = {'names': ('x', 'y', 'z', 'comment'),
'formats': ('<i4', '<i4', '<f4', '|S8')}
x = np.loadtxt(c, dtype=dt, delimiter='\t')
a = np.array([b'start ', b' ', b''])
assert_array_equal(x['comment'], a)
def test_structure_unpack(self):
txt = TextIO("M 21 72\nF 35 58")
dt = {'names': ('a', 'b', 'c'), 'formats': ('|S1', '<i4', '<f4')}
a, b, c = np.loadtxt(txt, dtype=dt, unpack=True)
assert_(a.dtype.str == '|S1')
assert_(b.dtype.str == '<i4')
assert_(c.dtype.str == '<f4')
assert_array_equal(a, np.array([b'M', b'F']))
assert_array_equal(b, np.array([21, 35]))
assert_array_equal(c, np.array([72., 58.]))
def test_ndmin_keyword(self):
c = TextIO()
c.write('1,2,3\n4,5,6')
c.seek(0)
assert_raises(ValueError, np.loadtxt, c, ndmin=3)
c.seek(0)
assert_raises(ValueError, np.loadtxt, c, ndmin=1.5)
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',', ndmin=1)
a = np.array([[1, 2, 3], [4, 5, 6]])
assert_array_equal(x, a)
d = TextIO()
d.write('0,1,2')
d.seek(0)
x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=2)
assert_(x.shape == (1, 3))
d.seek(0)
x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=1)
assert_(x.shape == (3,))
d.seek(0)
x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=0)
assert_(x.shape == (3,))
e = TextIO()
e.write('0\n1\n2')
e.seek(0)
x = np.loadtxt(e, dtype=int, delimiter=',', ndmin=2)
assert_(x.shape == (3, 1))
e.seek(0)
x = np.loadtxt(e, dtype=int, delimiter=',', ndmin=1)
assert_(x.shape == (3,))
e.seek(0)
x = np.loadtxt(e, dtype=int, delimiter=',', ndmin=0)
assert_(x.shape == (3,))
# Test ndmin kw with empty file.
with warnings.catch_warnings():
warnings.filterwarnings("ignore",
message="loadtxt: Empty input file:")
f = TextIO()
assert_(np.loadtxt(f, ndmin=2).shape == (0, 1,))
assert_(np.loadtxt(f, ndmin=1).shape == (0,))
def test_generator_source(self):
def count():
for i in range(10):
yield "%d" % i
res = np.loadtxt(count())
assert_array_equal(res, np.arange(10))
def test_bad_line(self):
c = TextIO()
c.write('1 2 3\n4 5 6\n2 3')
c.seek(0)
# Check for exception and that exception contains line number
assert_raises_regex(ValueError, "3", np.loadtxt, c)
class Testfromregex(TestCase):
# np.fromregex expects files opened in binary mode.
def test_record(self):
c = TextIO()
c.write('1.312 foo\n1.534 bar\n4.444 qux')
c.seek(0)
dt = [('num', np.float64), ('val', 'S3')]
x = np.fromregex(c, r"([0-9.]+)\s+(...)", dt)
a = np.array([(1.312, 'foo'), (1.534, 'bar'), (4.444, 'qux')],
dtype=dt)
assert_array_equal(x, a)
def test_record_2(self):
c = TextIO()
c.write('1312 foo\n1534 bar\n4444 qux')
c.seek(0)
dt = [('num', np.int32), ('val', 'S3')]
x = np.fromregex(c, r"(\d+)\s+(...)", dt)
a = np.array([(1312, 'foo'), (1534, 'bar'), (4444, 'qux')],
dtype=dt)
assert_array_equal(x, a)
def test_record_3(self):
c = TextIO()
c.write('1312 foo\n1534 bar\n4444 qux')
c.seek(0)
dt = [('num', np.float64)]
x = np.fromregex(c, r"(\d+)\s+...", dt)
a = np.array([(1312,), (1534,), (4444,)], dtype=dt)
assert_array_equal(x, a)
#####--------------------------------------------------------------------------
class TestFromTxt(TestCase):
#
def test_record(self):
"Test w/ explicit dtype"
data = TextIO('1 2\n3 4')
# data.seek(0)
test = np.ndfromtxt(data, dtype=[('x', np.int32), ('y', np.int32)])
control = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
assert_equal(test, control)
#
data = TextIO('M 64.0 75.0\nF 25.0 60.0')
# data.seek(0)
descriptor = {'names': ('gender', 'age', 'weight'),
'formats': ('S1', 'i4', 'f4')}
control = np.array([('M', 64.0, 75.0), ('F', 25.0, 60.0)],
dtype=descriptor)
test = np.ndfromtxt(data, dtype=descriptor)
assert_equal(test, control)
def test_array(self):
"Test outputing a standard ndarray"
data = TextIO('1 2\n3 4')
control = np.array([[1, 2], [3, 4]], dtype=int)
test = np.ndfromtxt(data, dtype=int)
assert_array_equal(test, control)
#
data.seek(0)
control = np.array([[1, 2], [3, 4]], dtype=float)
test = np.loadtxt(data, dtype=float)
assert_array_equal(test, control)
def test_1D(self):
"Test squeezing to 1D"
control = np.array([1, 2, 3, 4], int)
#
data = TextIO('1\n2\n3\n4\n')
test = np.ndfromtxt(data, dtype=int)
assert_array_equal(test, control)
#
data = TextIO('1,2,3,4\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',')
assert_array_equal(test, control)
def test_comments(self):
"Test the stripping of comments"
control = np.array([1, 2, 3, 5], int)
# Comment on its own line
data = TextIO('# comment\n1,2,3,5\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',', comments='#')
assert_equal(test, control)
# Comment at the end of a line
data = TextIO('1,2,3,5# comment\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',', comments='#')
assert_equal(test, control)
def test_skiprows(self):
"Test row skipping"
control = np.array([1, 2, 3, 5], int)
kwargs = dict(dtype=int, delimiter=',')
#
data = TextIO('comment\n1,2,3,5\n')
test = np.ndfromtxt(data, skip_header=1, **kwargs)
assert_equal(test, control)
#
data = TextIO('# comment\n1,2,3,5\n')
test = np.loadtxt(data, skiprows=1, **kwargs)
assert_equal(test, control)
def test_skip_footer(self):
data = ["# %i" % i for i in range(1, 6)]
data.append("A, B, C")
data.extend(["%i,%3.1f,%03s" % (i, i, i) for i in range(51)])
data[-1] = "99,99"
kwargs = dict(delimiter=",", names=True, skip_header=5, skip_footer=10)
test = np.genfromtxt(TextIO("\n".join(data)), **kwargs)
ctrl = np.array([("%f" % i, "%f" % i, "%f" % i) for i in range(41)],
dtype=[(_, float) for _ in "ABC"])
assert_equal(test, ctrl)
def test_skip_footer_with_invalid(self):
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
basestr = '1 1\n2 2\n3 3\n4 4\n5 \n6 \n7 \n'
# Footer too small to get rid of all invalid values
assert_raises(ValueError, np.genfromtxt,
TextIO(basestr), skip_footer=1)
# except ValueError:
# pass
a = np.genfromtxt(
TextIO(basestr), skip_footer=1, invalid_raise=False)
assert_equal(a, np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]]))
#
a = np.genfromtxt(TextIO(basestr), skip_footer=3)
assert_equal(a, np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]]))
#
basestr = '1 1\n2 \n3 3\n4 4\n5 \n6 6\n7 7\n'
a = np.genfromtxt(
TextIO(basestr), skip_footer=1, invalid_raise=False)
assert_equal(a, np.array([[1., 1.], [3., 3.], [4., 4.], [6., 6.]]))
a = np.genfromtxt(
TextIO(basestr), skip_footer=3, invalid_raise=False)
assert_equal(a, np.array([[1., 1.], [3., 3.], [4., 4.]]))
def test_header(self):
"Test retrieving a header"
data = TextIO('gender age weight\nM 64.0 75.0\nF 25.0 60.0')
test = np.ndfromtxt(data, dtype=None, names=True)
control = {'gender': np.array([b'M', b'F']),
'age': np.array([64.0, 25.0]),
'weight': np.array([75.0, 60.0])}
assert_equal(test['gender'], control['gender'])
assert_equal(test['age'], control['age'])
assert_equal(test['weight'], control['weight'])
def test_auto_dtype(self):
"Test the automatic definition of the output dtype"
data = TextIO('A 64 75.0 3+4j True\nBCD 25 60.0 5+6j False')
test = np.ndfromtxt(data, dtype=None)
control = [np.array([b'A', b'BCD']),
np.array([64, 25]),
np.array([75.0, 60.0]),
np.array([3 + 4j, 5 + 6j]),
np.array([True, False]), ]
assert_equal(test.dtype.names, ['f0', 'f1', 'f2', 'f3', 'f4'])
for (i, ctrl) in enumerate(control):
assert_equal(test['f%i' % i], ctrl)
def test_auto_dtype_uniform(self):
"Tests whether the output dtype can be uniformized"
data = TextIO('1 2 3 4\n5 6 7 8\n')
test = np.ndfromtxt(data, dtype=None)
control = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])
assert_equal(test, control)
def test_fancy_dtype(self):
"Check that a nested dtype isn't MIA"
data = TextIO('1,2,3.0\n4,5,6.0\n')
fancydtype = np.dtype([('x', int), ('y', [('t', int), ('s', float)])])
test = np.ndfromtxt(data, dtype=fancydtype, delimiter=',')
control = np.array([(1, (2, 3.0)), (4, (5, 6.0))], dtype=fancydtype)
assert_equal(test, control)
def test_names_overwrite(self):
"Test overwriting the names of the dtype"
descriptor = {'names': ('g', 'a', 'w'),
'formats': ('S1', 'i4', 'f4')}
data = TextIO(b'M 64.0 75.0\nF 25.0 60.0')
names = ('gender', 'age', 'weight')
test = np.ndfromtxt(data, dtype=descriptor, names=names)
descriptor['names'] = names
control = np.array([('M', 64.0, 75.0),
('F', 25.0, 60.0)], dtype=descriptor)
assert_equal(test, control)
def test_commented_header(self):
"Check that names can be retrieved even if the line is commented out."
data = TextIO("""
#gender age weight
M 21 72.100000
F 35 58.330000
M 33 21.99
""")
# The # is part of the first name and should be deleted automatically.
test = np.genfromtxt(data, names=True, dtype=None)
ctrl = np.array([('M', 21, 72.1), ('F', 35, 58.33), ('M', 33, 21.99)],
dtype=[('gender', '|S1'), ('age', int), ('weight', float)])
assert_equal(test, ctrl)
# Ditto, but we should get rid of the first element
data = TextIO(b"""
# gender age weight
M 21 72.100000
F 35 58.330000
M 33 21.99<|fim▁hole|> assert_equal(test, ctrl)
def test_autonames_and_usecols(self):
"Tests names and usecols"
data = TextIO('A B C D\n aaaa 121 45 9.1')
test = np.ndfromtxt(data, usecols=('A', 'C', 'D'),
names=True, dtype=None)
control = np.array(('aaaa', 45, 9.1),
dtype=[('A', '|S4'), ('C', int), ('D', float)])
assert_equal(test, control)
def test_converters_with_usecols(self):
"Test the combination user-defined converters and usecol"
data = TextIO('1,2,3,,5\n6,7,8,9,10\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)},
usecols=(1, 3,))
control = np.array([[2, -999], [7, 9]], int)
assert_equal(test, control)
def test_converters_with_usecols_and_names(self):
"Tests names and usecols"
data = TextIO('A B C D\n aaaa 121 45 9.1')
test = np.ndfromtxt(data, usecols=('A', 'C', 'D'), names=True,
dtype=None, converters={'C': lambda s: 2 * int(s)})
control = np.array(('aaaa', 90, 9.1),
dtype=[('A', '|S4'), ('C', int), ('D', float)])
assert_equal(test, control)
def test_converters_cornercases(self):
"Test the conversion to datetime."
converter = {
'date': lambda s: strptime(s, '%Y-%m-%d %H:%M:%SZ')}
data = TextIO('2009-02-03 12:00:00Z, 72214.0')
test = np.ndfromtxt(data, delimiter=',', dtype=None,
names=['date', 'stid'], converters=converter)
control = np.array((datetime(2009, 2, 3), 72214.),
dtype=[('date', np.object_), ('stid', float)])
assert_equal(test, control)
def test_converters_cornercases2(self):
"Test the conversion to datetime64."
converter = {
'date': lambda s: np.datetime64(strptime(s, '%Y-%m-%d %H:%M:%SZ'))}
data = TextIO('2009-02-03 12:00:00Z, 72214.0')
test = np.ndfromtxt(data, delimiter=',', dtype=None,
names=['date', 'stid'], converters=converter)
control = np.array((datetime(2009, 2, 3), 72214.),
dtype=[('date', 'datetime64[us]'), ('stid', float)])
assert_equal(test, control)
def test_unused_converter(self):
"Test whether unused converters are forgotten"
data = TextIO("1 21\n 3 42\n")
test = np.ndfromtxt(data, usecols=(1,),
converters={0: lambda s: int(s, 16)})
assert_equal(test, [21, 42])
#
data.seek(0)
test = np.ndfromtxt(data, usecols=(1,),
converters={1: lambda s: int(s, 16)})
assert_equal(test, [33, 66])
def test_invalid_converter(self):
strip_rand = lambda x: float((b'r' in x.lower() and x.split()[-1]) or
(b'r' not in x.lower() and x.strip() or 0.0))
strip_per = lambda x: float((b'%' in x.lower() and x.split()[0]) or
(b'%' not in x.lower() and x.strip() or 0.0))
s = TextIO("D01N01,10/1/2003 ,1 %,R 75,400,600\r\n"
"L24U05,12/5/2003, 2 %,1,300, 150.5\r\n"
"D02N03,10/10/2004,R 1,,7,145.55")
kwargs = dict(
converters={2: strip_per, 3: strip_rand}, delimiter=",",
dtype=None)
assert_raises(ConverterError, np.genfromtxt, s, **kwargs)
def test_tricky_converter_bug1666(self):
"Test some corner case"
s = TextIO('q1,2\nq3,4')
cnv = lambda s: float(s[1:])
test = np.genfromtxt(s, delimiter=',', converters={0: cnv})
control = np.array([[1., 2.], [3., 4.]])
assert_equal(test, control)
def test_dtype_with_converters(self):
dstr = "2009; 23; 46"
test = np.ndfromtxt(TextIO(dstr,),
delimiter=";", dtype=float, converters={0: bytes})
control = np.array([('2009', 23., 46)],
dtype=[('f0', '|S4'), ('f1', float), ('f2', float)])
assert_equal(test, control)
test = np.ndfromtxt(TextIO(dstr,),
delimiter=";", dtype=float, converters={0: float})
control = np.array([2009., 23., 46],)
assert_equal(test, control)
def test_dtype_with_converters_and_usecols(self):
dstr = "1,5,-1,1:1\n2,8,-1,1:n\n3,3,-2,m:n\n"
dmap = {'1:1':0, '1:n':1, 'm:1':2, 'm:n':3}
dtyp = [('E1','i4'),('E2','i4'),('E3','i2'),('N', 'i1')]
conv = {0: int, 1: int, 2: int, 3: lambda r: dmap[r.decode()]}
test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
names=None, converters=conv)
control = np.rec.array([[1,5,-1,0], [2,8,-1,1], [3,3,-2,3]], dtype=dtyp)
assert_equal(test, control)
dtyp = [('E1','i4'),('E2','i4'),('N', 'i1')]
test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
usecols=(0,1,3), names=None, converters=conv)
control = np.rec.array([[1,5,0], [2,8,1], [3,3,3]], dtype=dtyp)
assert_equal(test, control)
def test_dtype_with_object(self):
"Test using an explicit dtype with an object"
from datetime import date
import time
data = """ 1; 2001-01-01
2; 2002-01-31 """
ndtype = [('idx', int), ('code', np.object)]
func = lambda s: strptime(s.strip(), "%Y-%m-%d")
converters = {1: func}
test = np.genfromtxt(TextIO(data), delimiter=";", dtype=ndtype,
converters=converters)
control = np.array(
[(1, datetime(2001, 1, 1)), (2, datetime(2002, 1, 31))],
dtype=ndtype)
assert_equal(test, control)
#
ndtype = [('nest', [('idx', int), ('code', np.object)])]
try:
test = np.genfromtxt(TextIO(data), delimiter=";",
dtype=ndtype, converters=converters)
except NotImplementedError:
pass
else:
errmsg = "Nested dtype involving objects should be supported."
raise AssertionError(errmsg)
def test_userconverters_with_explicit_dtype(self):
"Test user_converters w/ explicit (standard) dtype"
data = TextIO('skip,skip,2001-01-01,1.0,skip')
test = np.genfromtxt(data, delimiter=",", names=None, dtype=float,
usecols=(2, 3), converters={2: bytes})
control = np.array([('2001-01-01', 1.)],
dtype=[('', '|S10'), ('', float)])
assert_equal(test, control)
def test_spacedelimiter(self):
"Test space delimiter"
data = TextIO("1 2 3 4 5\n6 7 8 9 10")
test = np.ndfromtxt(data)
control = np.array([[1., 2., 3., 4., 5.],
[6., 7., 8., 9., 10.]])
assert_equal(test, control)
def test_integer_delimiter(self):
"Test using an integer for delimiter"
data = " 1 2 3\n 4 5 67\n890123 4"
test = np.genfromtxt(TextIO(data), delimiter=3)
control = np.array([[1, 2, 3], [4, 5, 67], [890, 123, 4]])
assert_equal(test, control)
def test_missing(self):
data = TextIO('1,2,3,,5\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)})
control = np.array([1, 2, 3, -999, 5], int)
assert_equal(test, control)
def test_missing_with_tabs(self):
"Test w/ a delimiter tab"
txt = "1\t2\t3\n\t2\t\n1\t\t3"
test = np.genfromtxt(TextIO(txt), delimiter="\t",
usemask=True,)
ctrl_d = np.array([(1, 2, 3), (np.nan, 2, np.nan), (1, np.nan, 3)],)
ctrl_m = np.array([(0, 0, 0), (1, 0, 1), (0, 1, 0)], dtype=bool)
assert_equal(test.data, ctrl_d)
assert_equal(test.mask, ctrl_m)
def test_usecols(self):
"Test the selection of columns"
# Select 1 column
control = np.array([[1, 2], [3, 4]], float)
data = TextIO()
np.savetxt(data, control)
data.seek(0)
test = np.ndfromtxt(data, dtype=float, usecols=(1,))
assert_equal(test, control[:, 1])
#
control = np.array([[1, 2, 3], [3, 4, 5]], float)
data = TextIO()
np.savetxt(data, control)
data.seek(0)
test = np.ndfromtxt(data, dtype=float, usecols=(1, 2))
assert_equal(test, control[:, 1:])
# Testing with arrays instead of tuples.
data.seek(0)
test = np.ndfromtxt(data, dtype=float, usecols=np.array([1, 2]))
assert_equal(test, control[:, 1:])
def test_usecols_as_css(self):
"Test giving usecols with a comma-separated string"
data = "1 2 3\n4 5 6"
test = np.genfromtxt(TextIO(data),
names="a, b, c", usecols="a, c")
ctrl = np.array([(1, 3), (4, 6)], dtype=[(_, float) for _ in "ac"])
assert_equal(test, ctrl)
def test_usecols_with_structured_dtype(self):
"Test usecols with an explicit structured dtype"
data = TextIO("JOE 70.1 25.3\nBOB 60.5 27.9")
names = ['stid', 'temp']
dtypes = ['S4', 'f8']
test = np.ndfromtxt(
data, usecols=(0, 2), dtype=list(zip(names, dtypes)))
assert_equal(test['stid'], [b"JOE", b"BOB"])
assert_equal(test['temp'], [25.3, 27.9])
def test_usecols_with_integer(self):
"Test usecols with an integer"
test = np.genfromtxt(TextIO(b"1 2 3\n4 5 6"), usecols=0)
assert_equal(test, np.array([1., 4.]))
def test_usecols_with_named_columns(self):
"Test usecols with named columns"
ctrl = np.array([(1, 3), (4, 6)], dtype=[('a', float), ('c', float)])
data = "1 2 3\n4 5 6"
kwargs = dict(names="a, b, c")
test = np.genfromtxt(TextIO(data), usecols=(0, -1), **kwargs)
assert_equal(test, ctrl)
test = np.genfromtxt(TextIO(data),
usecols=('a', 'c'), **kwargs)
assert_equal(test, ctrl)
def test_empty_file(self):
"Test that an empty file raises the proper warning."
with warnings.catch_warnings():
warnings.filterwarnings("ignore",
message="genfromtxt: Empty input file:")
data = TextIO()
test = np.genfromtxt(data)
assert_equal(test, np.array([]))
def test_fancy_dtype_alt(self):
"Check that a nested dtype isn't MIA"
data = TextIO('1,2,3.0\n4,5,6.0\n')
fancydtype = np.dtype([('x', int), ('y', [('t', int), ('s', float)])])
test = np.mafromtxt(data, dtype=fancydtype, delimiter=',')
control = ma.array([(1, (2, 3.0)), (4, (5, 6.0))], dtype=fancydtype)
assert_equal(test, control)
def test_shaped_dtype(self):
c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6")
dt = np.dtype([('name', 'S4'), ('x', float), ('y', float),
('block', int, (2, 3))])
x = np.ndfromtxt(c, dtype=dt)
a = np.array([('aaaa', 1.0, 8.0, [[1, 2, 3], [4, 5, 6]])],
dtype=dt)
assert_array_equal(x, a)
def test_withmissing(self):
data = TextIO('A,B\n0,1\n2,N/A')
kwargs = dict(delimiter=",", missing_values="N/A", names=True)
test = np.mafromtxt(data, dtype=None, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
#
data.seek(0)
test = np.mafromtxt(data, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.float), ('B', np.float)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
def test_user_missing_values(self):
data = "A, B, C\n0, 0., 0j\n1, N/A, 1j\n-9, 2.2, N/A\n3, -99, 3j"
basekwargs = dict(dtype=None, delimiter=",", names=True,)
mdtype = [('A', int), ('B', float), ('C', complex)]
#
test = np.mafromtxt(TextIO(data), missing_values="N/A",
**basekwargs)
control = ma.array([(0, 0.0, 0j), (1, -999, 1j),
(-9, 2.2, -999j), (3, -99, 3j)],
mask=[(0, 0, 0), (0, 1, 0), (0, 0, 1), (0, 0, 0)],
dtype=mdtype)
assert_equal(test, control)
#
basekwargs['dtype'] = mdtype
test = np.mafromtxt(TextIO(data),
missing_values={0: -9, 1: -99, 2: -999j}, **basekwargs)
control = ma.array([(0, 0.0, 0j), (1, -999, 1j),
(-9, 2.2, -999j), (3, -99, 3j)],
mask=[(0, 0, 0), (0, 1, 0), (1, 0, 1), (0, 1, 0)],
dtype=mdtype)
assert_equal(test, control)
#
test = np.mafromtxt(TextIO(data),
missing_values={0: -9, 'B': -99, 'C': -999j},
**basekwargs)
control = ma.array([(0, 0.0, 0j), (1, -999, 1j),
(-9, 2.2, -999j), (3, -99, 3j)],
mask=[(0, 0, 0), (0, 1, 0), (1, 0, 1), (0, 1, 0)],
dtype=mdtype)
assert_equal(test, control)
def test_user_filling_values(self):
"Test with missing and filling values"
ctrl = np.array([(0, 3), (4, -999)], dtype=[('a', int), ('b', int)])
data = "N/A, 2, 3\n4, ,???"
kwargs = dict(delimiter=",",
dtype=int,
names="a,b,c",
missing_values={0: "N/A", 'b': " ", 2: "???"},
filling_values={0: 0, 'b': 0, 2: -999})
test = np.genfromtxt(TextIO(data), **kwargs)
ctrl = np.array([(0, 2, 3), (4, 0, -999)],
dtype=[(_, int) for _ in "abc"])
assert_equal(test, ctrl)
#
test = np.genfromtxt(TextIO(data), usecols=(0, -1), **kwargs)
ctrl = np.array([(0, 3), (4, -999)], dtype=[(_, int) for _ in "ac"])
assert_equal(test, ctrl)
data2 = "1,2,*,4\n5,*,7,8\n"
test = np.genfromtxt(TextIO(data2), delimiter=',', dtype=int,
missing_values="*", filling_values=0)
ctrl = np.array([[1, 2, 0, 4], [5, 0, 7, 8]])
assert_equal(test, ctrl)
test = np.genfromtxt(TextIO(data2), delimiter=',', dtype=int,
missing_values="*", filling_values=-1)
ctrl = np.array([[1, 2, -1, 4], [5, -1, 7, 8]])
assert_equal(test, ctrl)
def test_withmissing_float(self):
data = TextIO('A,B\n0,1.5\n2,-999.00')
test = np.mafromtxt(data, dtype=None, delimiter=',',
missing_values='-999.0', names=True,)
control = ma.array([(0, 1.5), (2, -1.)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.float)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
def test_with_masked_column_uniform(self):
"Test masked column"
data = TextIO('1 2 3\n4 5 6\n')
test = np.genfromtxt(data, dtype=None,
missing_values='2,5', usemask=True)
control = ma.array([[1, 2, 3], [4, 5, 6]], mask=[[0, 1, 0], [0, 1, 0]])
assert_equal(test, control)
def test_with_masked_column_various(self):
"Test masked column"
data = TextIO('True 2 3\nFalse 5 6\n')
test = np.genfromtxt(data, dtype=None,
missing_values='2,5', usemask=True)
control = ma.array([(1, 2, 3), (0, 5, 6)],
mask=[(0, 1, 0), (0, 1, 0)],
dtype=[('f0', bool), ('f1', bool), ('f2', int)])
assert_equal(test, control)
def test_invalid_raise(self):
"Test invalid raise"
data = ["1, 1, 1, 1, 1"] * 50
for i in range(5):
data[10 * i] = "2, 2, 2, 2 2"
data.insert(0, "a, b, c, d, e")
mdata = TextIO("\n".join(data))
#
kwargs = dict(delimiter=",", dtype=None, names=True)
# XXX: is there a better way to get the return value of the callable in
# assert_warns ?
ret = {}
def f(_ret={}):
_ret['mtest'] = np.ndfromtxt(mdata, invalid_raise=False, **kwargs)
assert_warns(ConversionWarning, f, _ret=ret)
mtest = ret['mtest']
assert_equal(len(mtest), 45)
assert_equal(mtest, np.ones(45, dtype=[(_, int) for _ in 'abcde']))
#
mdata.seek(0)
assert_raises(ValueError, np.ndfromtxt, mdata,
delimiter=",", names=True)
def test_invalid_raise_with_usecols(self):
"Test invalid_raise with usecols"
data = ["1, 1, 1, 1, 1"] * 50
for i in range(5):
data[10 * i] = "2, 2, 2, 2 2"
data.insert(0, "a, b, c, d, e")
mdata = TextIO("\n".join(data))
kwargs = dict(delimiter=",", dtype=None, names=True,
invalid_raise=False)
# XXX: is there a better way to get the return value of the callable in
# assert_warns ?
ret = {}
def f(_ret={}):
_ret['mtest'] = np.ndfromtxt(mdata, usecols=(0, 4), **kwargs)
assert_warns(ConversionWarning, f, _ret=ret)
mtest = ret['mtest']
assert_equal(len(mtest), 45)
assert_equal(mtest, np.ones(45, dtype=[(_, int) for _ in 'ae']))
#
mdata.seek(0)
mtest = np.ndfromtxt(mdata, usecols=(0, 1), **kwargs)
assert_equal(len(mtest), 50)
control = np.ones(50, dtype=[(_, int) for _ in 'ab'])
control[[10 * _ for _ in range(5)]] = (2, 2)
assert_equal(mtest, control)
def test_inconsistent_dtype(self):
"Test inconsistent dtype"
data = ["1, 1, 1, 1, -1.1"] * 50
mdata = TextIO("\n".join(data))
converters = {4: lambda x: "(%s)" % x}
kwargs = dict(delimiter=",", converters=converters,
dtype=[(_, int) for _ in 'abcde'],)
assert_raises(ValueError, np.genfromtxt, mdata, **kwargs)
def test_default_field_format(self):
"Test default format"
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=None, defaultfmt="f%02i")
ctrl = np.array([(0, 1, 2.3), (4, 5, 6.7)],
dtype=[("f00", int), ("f01", int), ("f02", float)])
assert_equal(mtest, ctrl)
def test_single_dtype_wo_names(self):
"Test single dtype w/o names"
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=float, defaultfmt="f%02i")
ctrl = np.array([[0., 1., 2.3], [4., 5., 6.7]], dtype=float)
assert_equal(mtest, ctrl)
def test_single_dtype_w_explicit_names(self):
"Test single dtype w explicit names"
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=float, names="a, b, c")
ctrl = np.array([(0., 1., 2.3), (4., 5., 6.7)],
dtype=[(_, float) for _ in "abc"])
assert_equal(mtest, ctrl)
def test_single_dtype_w_implicit_names(self):
"Test single dtype w implicit names"
data = "a, b, c\n0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=float, names=True)
ctrl = np.array([(0., 1., 2.3), (4., 5., 6.7)],
dtype=[(_, float) for _ in "abc"])
assert_equal(mtest, ctrl)
def test_easy_structured_dtype(self):
"Test easy structured dtype"
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data), delimiter=",",
dtype=(int, float, float), defaultfmt="f_%02i")
ctrl = np.array([(0, 1., 2.3), (4, 5., 6.7)],
dtype=[("f_00", int), ("f_01", float), ("f_02", float)])
assert_equal(mtest, ctrl)
def test_autostrip(self):
"Test autostrip"
data = "01/01/2003 , 1.3, abcde"
kwargs = dict(delimiter=",", dtype=None)
mtest = np.ndfromtxt(TextIO(data), **kwargs)
ctrl = np.array([('01/01/2003 ', 1.3, ' abcde')],
dtype=[('f0', '|S12'), ('f1', float), ('f2', '|S8')])
assert_equal(mtest, ctrl)
mtest = np.ndfromtxt(TextIO(data), autostrip=True, **kwargs)
ctrl = np.array([('01/01/2003', 1.3, 'abcde')],
dtype=[('f0', '|S10'), ('f1', float), ('f2', '|S5')])
assert_equal(mtest, ctrl)
def test_replace_space(self):
"Test the 'replace_space' option"
txt = "A.A, B (B), C:C\n1, 2, 3.14"
# Test default: replace ' ' by '_' and delete non-alphanum chars
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=None)
ctrl_dtype = [("AA", int), ("B_B", int), ("CC", float)]
ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype)
assert_equal(test, ctrl)
# Test: no replace, no delete
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=None,
replace_space='', deletechars='')
ctrl_dtype = [("A.A", int), ("B (B)", int), ("C:C", float)]
ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype)
assert_equal(test, ctrl)
# Test: no delete (spaces are replaced by _)
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=None,
deletechars='')
ctrl_dtype = [("A.A", int), ("B_(B)", int), ("C:C", float)]
ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype)
assert_equal(test, ctrl)
def test_incomplete_names(self):
"Test w/ incomplete names"
data = "A,,C\n0,1,2\n3,4,5"
kwargs = dict(delimiter=",", names=True)
# w/ dtype=None
ctrl = np.array([(0, 1, 2), (3, 4, 5)],
dtype=[(_, int) for _ in ('A', 'f0', 'C')])
test = np.ndfromtxt(TextIO(data), dtype=None, **kwargs)
assert_equal(test, ctrl)
# w/ default dtype
ctrl = np.array([(0, 1, 2), (3, 4, 5)],
dtype=[(_, float) for _ in ('A', 'f0', 'C')])
test = np.ndfromtxt(TextIO(data), **kwargs)
def test_names_auto_completion(self):
"Make sure that names are properly completed"
data = "1 2 3\n 4 5 6"
test = np.genfromtxt(TextIO(data),
dtype=(int, float, int), names="a")
ctrl = np.array([(1, 2, 3), (4, 5, 6)],
dtype=[('a', int), ('f0', float), ('f1', int)])
assert_equal(test, ctrl)
def test_names_with_usecols_bug1636(self):
"Make sure we pick up the right names w/ usecols"
data = "A,B,C,D,E\n0,1,2,3,4\n0,1,2,3,4\n0,1,2,3,4"
ctrl_names = ("A", "C", "E")
test = np.genfromtxt(TextIO(data),
dtype=(int, int, int), delimiter=",",
usecols=(0, 2, 4), names=True)
assert_equal(test.dtype.names, ctrl_names)
#
test = np.genfromtxt(TextIO(data),
dtype=(int, int, int), delimiter=",",
usecols=("A", "C", "E"), names=True)
assert_equal(test.dtype.names, ctrl_names)
#
test = np.genfromtxt(TextIO(data),
dtype=int, delimiter=",",
usecols=("A", "C", "E"), names=True)
assert_equal(test.dtype.names, ctrl_names)
def test_fixed_width_names(self):
"Test fix-width w/ names"
data = " A B C\n 0 1 2.3\n 45 67 9."
kwargs = dict(delimiter=(5, 5, 4), names=True, dtype=None)
ctrl = np.array([(0, 1, 2.3), (45, 67, 9.)],
dtype=[('A', int), ('B', int), ('C', float)])
test = np.ndfromtxt(TextIO(data), **kwargs)
assert_equal(test, ctrl)
#
kwargs = dict(delimiter=5, names=True, dtype=None)
ctrl = np.array([(0, 1, 2.3), (45, 67, 9.)],
dtype=[('A', int), ('B', int), ('C', float)])
test = np.ndfromtxt(TextIO(data), **kwargs)
assert_equal(test, ctrl)
def test_filling_values(self):
"Test missing values"
data = b"1, 2, 3\n1, , 5\n0, 6, \n"
kwargs = dict(delimiter=",", dtype=None, filling_values=-999)
ctrl = np.array([[1, 2, 3], [1, -999, 5], [0, 6, -999]], dtype=int)
test = np.ndfromtxt(TextIO(data), **kwargs)
assert_equal(test, ctrl)
def test_comments_is_none(self):
# Github issue 329 (None was previously being converted to 'None').
test = np.genfromtxt(TextIO("test1,testNonetherestofthedata"),
dtype=None, comments=None, delimiter=',')
assert_equal(test[1], b'testNonetherestofthedata')
test = np.genfromtxt(TextIO("test1, testNonetherestofthedata"),
dtype=None, comments=None, delimiter=',')
assert_equal(test[1], b' testNonetherestofthedata')
def test_recfromtxt(self):
#
data = TextIO('A,B\n0,1\n2,3')
kwargs = dict(delimiter=",", missing_values="N/A", names=True)
test = np.recfromtxt(data, **kwargs)
control = np.array([(0, 1), (2, 3)],
dtype=[('A', np.int), ('B', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
#
data = TextIO('A,B\n0,1\n2,N/A')
test = np.recfromtxt(data, dtype=None, usemask=True, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.A, [0, 2])
def test_recfromcsv(self):
#
data = TextIO('A,B\n0,1\n2,3')
kwargs = dict(missing_values="N/A", names=True, case_sensitive=True)
test = np.recfromcsv(data, dtype=None, **kwargs)
control = np.array([(0, 1), (2, 3)],
dtype=[('A', np.int), ('B', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
#
data = TextIO('A,B\n0,1\n2,N/A')
test = np.recfromcsv(data, dtype=None, usemask=True, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.A, [0, 2])
#
data = TextIO('A,B\n0,1\n2,3')
test = np.recfromcsv(data, missing_values='N/A',)
control = np.array([(0, 1), (2, 3)],
dtype=[('a', np.int), ('b', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
#
data = TextIO('A,B\n0,1\n2,3')
dtype = [('a', np.int), ('b', np.float)]
test = np.recfromcsv(data, missing_values='N/A', dtype=dtype)
control = np.array([(0, 1), (2, 3)],
dtype=dtype)
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
def test_gft_using_filename(self):
# Test that we can load data from a filename as well as a file object
wanted = np.arange(6).reshape((2, 3))
if sys.version_info[0] >= 3:
# python 3k is known to fail for '\r'
linesep = ('\n', '\r\n')
else:
linesep = ('\n', '\r\n', '\r')
for sep in linesep:
data = '0 1 2' + sep + '3 4 5'
f, name = mkstemp()
# We can't use NamedTemporaryFile on windows, because we cannot
# reopen the file.
try:
os.write(f, asbytes(data))
assert_array_equal(np.genfromtxt(name), wanted)
finally:
os.close(f)
os.unlink(name)
def test_gft_using_generator(self):
# gft doesn't work with unicode.
def count():
for i in range(10):
yield asbytes("%d" % i)
res = np.genfromtxt(count())
assert_array_equal(res, np.arange(10))
def test_gzip_load():
a = np.random.random((5, 5))
s = BytesIO()
f = gzip.GzipFile(fileobj=s, mode="w")
np.save(f, a)
f.close()
s.seek(0)
f = gzip.GzipFile(fileobj=s, mode="r")
assert_array_equal(np.load(f), a)
def test_gzip_loadtxt():
# Thanks to another windows brokeness, we can't use
# NamedTemporaryFile: a file created from this function cannot be
# reopened by another open call. So we first put the gzipped string
# of the test reference array, write it to a securely opened file,
# which is then read from by the loadtxt function
s = BytesIO()
g = gzip.GzipFile(fileobj=s, mode='w')
g.write(b'1 2 3\n')
g.close()
s.seek(0)
f, name = mkstemp(suffix='.gz')
try:
os.write(f, s.read())
s.close()
assert_array_equal(np.loadtxt(name), [1, 2, 3])
finally:
os.close(f)
os.unlink(name)
def test_gzip_loadtxt_from_string():
s = BytesIO()
f = gzip.GzipFile(fileobj=s, mode="w")
f.write(b'1 2 3\n')
f.close()
s.seek(0)
f = gzip.GzipFile(fileobj=s, mode="r")
assert_array_equal(np.loadtxt(f), [1, 2, 3])
def test_npzfile_dict():
s = BytesIO()
x = np.zeros((3, 3))
y = np.zeros((3, 3))
np.savez(s, x=x, y=y)
s.seek(0)
z = np.load(s)
assert_('x' in z)
assert_('y' in z)
assert_('x' in z.keys())
assert_('y' in z.keys())
for f, a in z.items():
assert_(f in ['x', 'y'])
assert_equal(a.shape, (3, 3))
assert_(len(z.items()) == 2)
for f in z:
assert_(f in ['x', 'y'])
assert_('x' in z.keys())
def test_load_refcount():
# Check that objects returned by np.load are directly freed based on
# their refcount, rather than needing the gc to collect them.
f = BytesIO()
np.savez(f, [1, 2, 3])
f.seek(0)
gc.collect()
n_before = len(gc.get_objects())
np.load(f)
n_after = len(gc.get_objects())
assert_equal(n_before, n_after)
if __name__ == "__main__":
run_module_suite()<|fim▁end|> | """)
test = np.genfromtxt(data, names=True, dtype=None) |
<|file_name|>nullvec_ops_primitive.rs<|end_file_name|><|fim▁begin|>use std::ops::{Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor};
use algos::vec_ops::Elemwise;
use nullvec::NullVec;
macro_rules! add_primitive_broadcast_op {
($t:ident, $tr:ident, $op:ident, $sym:tt) => {
// Nullvec + Primitive
impl $tr<$t> for NullVec<$t> {
type Output = NullVec<$t>;
fn $op(self, other: $t) -> NullVec<$t> {
let mask = self.mask.clone();
let new_values = Elemwise::broadcast_oo(self.data, other, |x, y| x $sym y);
NullVec::with_mask(new_values, mask)
}
}
// Nullvec + &Primitive
impl<'a> $tr<&'a $t> for NullVec<$t> {
type Output = NullVec<$t>;
fn $op(self, other: &$t) -> NullVec<$t> {
let mask = self.mask.clone();
let new_values = Elemwise::broadcast_or(self.data, other, |x, y| x $sym y);
NullVec::with_mask(new_values, mask)
}
}
// &Nullvec + Primitive
impl<'b> $tr<$t> for &'b NullVec<$t> {
type Output = NullVec<$t>;
fn $op(self, other: $t) -> NullVec<$t> {
let mask = self.mask.clone();
let new_values = Elemwise::broadcast_ro(&self.data, other, |x, y| x $sym y);
NullVec::with_mask(new_values, mask)
}
}
// &Nullvec + &Primitive
impl<'a, 'b> $tr<&'a $t> for &'b NullVec<$t> {
type Output = NullVec<$t>;
fn $op(self, other: &$t) -> NullVec<$t> {
let mask = self.mask.clone();
let new_values = Elemwise::broadcast_rr(&self.data, other, |x, y| x $sym y);
NullVec::with_mask(new_values, mask)
}
}
}
}
macro_rules! add_primitive_broadcast_arithmetic_op_patterns {
($t:ident) => {
add_primitive_broadcast_op!($t, Add, add, +);
add_primitive_broadcast_op!($t, Sub, sub, -);
add_primitive_broadcast_op!($t, Mul, mul, *);
add_primitive_broadcast_op!($t, Div, div, /);
add_primitive_broadcast_op!($t, Rem, rem, %);
}
}
macro_dispatch!(
add_primitive_broadcast_arithmetic_op_patterns,
i64,
i32,
i16,
i8,
isize,
u64,
u32,
u16,
u8,
usize,
f64,
f32
);
macro_rules! add_primitive_broadcast_bitwise_op_patterns {
($t:ident) => {
add_primitive_broadcast_op!($t, BitAnd, bitand, &);
add_primitive_broadcast_op!($t, BitOr, bitor, |);
add_primitive_broadcast_op!($t, BitXor, bitxor, ^);
}
}
macro_dispatch!(
add_primitive_broadcast_bitwise_op_patterns,
i64,
i32,
i16,
i8,
isize,
u64,
u32,
u16,
u8,
usize,
bool
);
#[cfg(test)]
mod tests {
use std::f64;
use nullvec::NullVec;
#[test]
fn test_int() {
let values: Vec<usize> = vec![1, 2, 3];
let nvec = NullVec::new(values);
let res = nvec + 2;
assert_eq!(res.data, vec![3, 4, 5]);
assert_eq!(res.mask, None);
}
#[test]
fn test_int_ref_rhs() {
let values: Vec<usize> = vec![1, 2, 3];
let nvec = NullVec::new(values);
let res = nvec + &2;
assert_eq!(res.data, vec![3, 4, 5]);
assert_eq!(res.mask, None);
}
#[test]
fn test_int_ref_lhs() {
let values: Vec<usize> = vec![1, 2, 3];
let nvec = NullVec::new(values);
let res = &nvec + 2;
assert_eq!(res.data, vec![3, 4, 5]);
assert_eq!(res.mask, None);
}
#[test]
fn test_int_ref_both() {
let values: Vec<usize> = vec![1, 2, 3];
let nvec = NullVec::new(values);
let res = &nvec + &2;
assert_eq!(res.data, vec![3, 4, 5]);
assert_eq!(res.mask, None);
}
#[test]
fn test_float() {
let values: Vec<f64> = vec![1.1, 1.2, 1.3];
let nvec = NullVec::new(values);
let res = nvec + 2.;
assert_eq!(res.data, vec![3.1, 3.2, 3.3]);
assert_eq!(res.mask, None);
}
#[test]
fn test_float_nan() {
let values: Vec<f64> = vec![1.1, f64::NAN, 1.3];<|fim▁hole|> assert_eq!(res.data, vec![3.1, 2., 3.3]);
assert_eq!(res.mask, Some(vec![false, true, false]));
}
#[test]
fn test_bool() {
let values: Vec<bool> = vec![true, false, true];
let nvec = NullVec::new(values);
let res = nvec & true;
assert_eq!(res.data, vec![true, false, true]);
assert_eq!(res.mask, None);
}
}<|fim▁end|> | let nvec = NullVec::new(values);
let res = nvec + 2.; |
<|file_name|>accordion.js<|end_file_name|><|fim▁begin|>KB.onClick('.accordion-toggle', function (e) {<|fim▁hole|> }
});<|fim▁end|> | var sectionElement = KB.dom(e.target).parent('.accordion-section');
if (sectionElement) {
KB.dom(sectionElement).toggleClass('accordion-collapsed'); |
<|file_name|>TestServer.py<|end_file_name|><|fim▁begin|># "$Name: $";
# "$Header: $";
# ============================================================================
#
# file : TestServer.py
#
# description : Python source for the TestServer and its commands.
# The class is derived from Device. It represents the
# CORBA servant object which will be accessed from the
# network. All commands which can be executed on the
# TestServer are implemented in this file.
#
# project : TANGO Device Server
#
# $Author: $
#
# $Revision: $
#
# $Log: $
#
# copyleft : European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
# ============================================================================
# This file is generated by POGO
# (Program Obviously used to Generate tango Object)
#
# (c) - Software Engineering Group - ESRF
# ============================================================================
#
import PyTango
import sys
import numpy
import struct
import pickle
if sys.version_info > (3,):
long = int
# unicode = str
else:
bytes = str
# =================================================================
# TestServer Class Description:
#
# My Simple Server
#
# =================================================================
# Device States Description:
#
# DevState.ON : Server On
# =================================================================
class TestServer(PyTango.Device_4Impl):
# -------- Add you global variables here --------------------------
# -----------------------------------------------------------------
# Device constructor
# -----------------------------------------------------------------
def __init__(self, cl, name):
PyTango.Device_4Impl.__init__(self, cl, name)
self.defaults = {}
self.defaults["ScalarBoolean"] = [
True, PyTango.SCALAR, PyTango.DevBoolean]
self.defaults["ScalarUChar"] = [
12, PyTango.SCALAR, PyTango.DevUChar]
self.defaults["ScalarShort"] = [
12, PyTango.SCALAR, PyTango.DevShort]
self.defaults["ScalarUShort"] = [
12, PyTango.SCALAR, PyTango.DevUShort]
self.defaults["ScalarLong"] = [
123, PyTango.SCALAR, PyTango.DevLong]
self.defaults["ScalarULong"] = [
123, PyTango.SCALAR, PyTango.DevULong]
self.defaults["ScalarLong64"] = [
123, PyTango.SCALAR, PyTango.DevLong64]
self.defaults["ScalarULong64"] = [
123, PyTango.SCALAR, PyTango.DevULong64]
self.defaults["ScalarFloat"] = [
-1.23, PyTango.SCALAR, PyTango.DevFloat]
self.defaults["ScalarDouble"] = [
123.45, PyTango.SCALAR, PyTango.DevDouble]
self.defaults["ScalarString"] = [
"Hello!", PyTango.SCALAR, PyTango.DevString]
self.defaults["ScalarEncoded"] = [
("UTF8", b"Hello UTF8! Pr\xc3\xb3ba \xe6\xb5\x8b"),
PyTango.SCALAR, PyTango.DevEncoded]
self.dtype = None
self.attr_ScalarBoolean = True
self.attr_ScalarUChar = 12
self.attr_ScalarShort = 12
self.attr_ScalarUShort = 12
self.attr_ScalarLong = 123
self.attr_ScalarULong = 123
self.attr_ScalarLong64 = 123
self.attr_ScalarULong64 = 123
self.attr_ScalarFloat = -1.23
self.attr_ScalarDouble = 1.233
self.attr_ScalarString = "Hello!"
self.attr_ScalarEncoded = \
"UTF8", b"Hello UTF8! Pr\xc3\xb3ba \xe6\xb5\x8b"
self.attr_SpectrumBoolean = [True, False]
self.attr_SpectrumUChar = [1, 2]
self.attr_SpectrumShort = [1, -3, 4]
self.attr_SpectrumUShort = [1, 4, 5, 6]
self.attr_SpectrumULong = numpy.array(
[1234, 5678, 45, 345], dtype='uint32')
self.attr_SpectrumLong = [1123, -435, 35, -6345]
self.attr_SpectrumLong64 = [1123, -435, 35, -6345]
self.attr_SpectrumULong64 = [1123, 23435, 35, 3345]
self.attr_SpectrumFloat = [11.23, -4.35, 3.5, -634.5]
self.attr_SpectrumDouble = [1.123, 23.435, 3.5, 3.345]
self.attr_SpectrumString = ["Hello", "Word", "!", "!!"]
self.attr_SpectrumEncoded = [
"INT32", b"\x00\x01\x03\x04\x20\x31\x43\x54\x10\x11\x13\x14"]
self.attr_SpectrumEncoded = self.encodeSpectrum()
self.attr_ImageBoolean = numpy.array([[True]], dtype='int16')
self.attr_ImageUChar = numpy.array([[2, 5], [3, 4]], dtype='uint8')
self.attr_ImageShort = numpy.array([[2, 5], [3, 4]], dtype='int16')
self.attr_ImageUShort = numpy.array([[2, 5], [3, 4]], dtype='uint16')
self.attr_ImageLong = numpy.array([[2, 5], [3, 4]], dtype='int32')
self.attr_ImageULong = numpy.array([[2, 5], [3, 4]], dtype='uint32')
self.attr_ImageLong64 = numpy.array([[2, 5], [3, 4]], dtype='int64')
self.attr_ImageULong64 = numpy.array([[2, 5], [3, 4]], dtype='uint64')
self.attr_ImageFloat = numpy.array([[2., 5.], [3., 4.]],
dtype='float32')
self.attr_ImageDouble = numpy.array([[2.4, 5.45], [3.4, 4.45]],
dtype='double')
self.attr_ImageString = [['True']]
self.attr_ImageEncoded = self.encodeImage()
self.attr_value = ""
TestServer.init_device(self)
def encodeSpectrum(self):
format = 'INT32'
# uint8 B
# mode = 0
# uint16 H
# mode = 1
# uint32 I
# mode = 2
fspectrum = numpy.array(self.attr_SpectrumULong, dtype='int32')
ibuffer = bytes(struct.pack('i' * fspectrum.size, *fspectrum))
return [format, ibuffer]
def encodeImage(self):
format = 'VIDEO_IMAGE'
# uint8 B
mode = 0
# uint16 H
# mode = 1
width, height = self.attr_ImageUChar.shape
version = 1
endian = sys.byteorder == u'big'
# endian = ord(str(struct.pack('=H', 1)[-1]))
hsize = struct.calcsize('!IHHqiiHHHH')
header = struct.pack(
'!IHHqiiHHHH', 0x5644454f, version, mode, -1,
width, height, endian, hsize, 0, 0)
fimage = self.attr_ImageUChar.flatten()
ibuffer = struct.pack('B' * fimage.size, *fimage)
return [format, bytes(header + ibuffer)]
# -----------------------------------------------------------------
# Device destructor
# -----------------------------------------------------------------
def delete_device(self):
""" """
# -----------------------------------------------------------------
# Device initialization
# -----------------------------------------------------------------
def init_device(self):
self.set_state(PyTango.DevState.ON)
self.get_device_properties(self.get_device_class())
env = {'new': {'ActiveMntGrp': 'nxsmntgrp',
'DataCompressionRank': 0,
'NeXusSelectorDevice': u'p09/nxsrecselector/1',
'ScanDir': u'/tmp/',
'ScanFile': [u'sar4r.nxs'],
'ScanID': 192,
'_ViewOptions': {'ShowDial': True}}}
self.attr_Environment = ("pickle", pickle.dumps(env, protocol=2))
self.ChangeValueType("ScalarDouble")
self.attr_DoorList = ['test/door/1', 'test/door/2']
# -----------------------------------------------------------------
# Always excuted hook method
# -----------------------------------------------------------------
def always_executed_hook(self):
pass
# print "In ", self.get_name(), "::always_excuted_hook()"
#
# =================================================================
#
# TestServer read/write attribute methods
#
# =================================================================
#
# -----------------------------------------------------------------
# Read DoorList attribute
# -----------------------------------------------------------------
def read_DoorList(self, attr):
# Add your own code here
attr.set_value(self.attr_DoorList)
# -----------------------------------------------------------------
# Write DoorList attribute
# -----------------------------------------------------------------
def write_DoorList(self, attr):
# Add your own code here
self.attr_DoorList = attr.get_write_value()
# -----------------------------------------------------------------
# Read Environment attribute
# -----------------------------------------------------------------
def read_Environment(self, attr):
# Add your own code here
attr.set_value(self.attr_Environment[0], self.attr_Environment[1])
# -----------------------------------------------------------------
# Write Environment attribute
# -----------------------------------------------------------------
def write_Environment(self, attr):
# Add your own code here
self.attr_Environment = attr.get_write_value()
# -----------------------------------------------------------------
# Read Value attribute
# -----------------------------------------------------------------
def read_Value(self, attr):
# Add your own code here
attr.set_value(self.defaults[self.dtype][0])
# -----------------------------------------------------------------
# Write Value attribute
# -----------------------------------------------------------------
def write_Value(self, attr):
# Add your own code here
self.defaults[self.dtype][0] = attr.get_write_value()
# =================================================================
#
# TestServer command methods
#
# =================================================================
#
# -----------------------------------------------------------------
# SetState command:
#
# Description: Set state of tango device
#
# argin: DevString tango state
# -----------------------------------------------------------------
def SetState(self, state):
if state == "RUNNING":
self.set_state(PyTango.DevState.RUNNING)
elif state == "FAULT":
self.set_state(PyTango.DevState.FAULT)
elif state == "ALARM":
self.set_state(PyTango.DevState.ALARM)
else:
self.set_state(PyTango.DevState.ON)
# -----------------------------------------------------------------
# ChangeValueType command:
#
# Description: Set state of tango device
#
# argin: DevString tango state
# -----------------------------------------------------------------
def ChangeValueType(self, dtype):
if dtype in self.defaults.keys():
if self.dtype is not None:
self.remove_attribute("Value")
self.dtype = dtype
dev_class = self.get_device_class()
attr_data = PyTango.AttrData(
"Value", dev_class.get_name(),
[
[
self.defaults[self.dtype][2],
self.defaults[self.dtype][1],
PyTango.READ_WRITE
],
{
'description': "dynamic attribute",
}
]
)
self.add_attribute(attr_data,
r_meth=self.read_Value,
w_meth=self.write_Value)
# -----------------------------------------------------------------
# Read ScalarLong attribute
# -----------------------------------------------------------------
def read_ScalarLong(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarLong)
# -----------------------------------------------------------------
# Write ScalarLong attribute
# -----------------------------------------------------------------
def write_ScalarLong(self, attr):
# Add your own code here
self.attr_ScalarLong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarBoolean attribute
# -----------------------------------------------------------------
def read_ScalarBoolean(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarBoolean)
# -----------------------------------------------------------------
# Write ScalarBoolean attribute
# -----------------------------------------------------------------
def write_ScalarBoolean(self, attr):
# Add your own code here
self.attr_ScalarBoolean = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarShort attribute
# -----------------------------------------------------------------
def read_ScalarShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarShort)
# -----------------------------------------------------------------
# Write ScalarShort attribute
# -----------------------------------------------------------------
def write_ScalarShort(self, attr):
# Add your own code here
self.attr_ScalarShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarUShort attribute
# -----------------------------------------------------------------
def read_ScalarUShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarUShort)
# -----------------------------------------------------------------
# Write ScalarUShort attribute
# -----------------------------------------------------------------
def write_ScalarUShort(self, attr):
# Add your own code here
self.attr_ScalarUShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarULong attribute
# -----------------------------------------------------------------
def read_ScalarULong(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarULong)
# -----------------------------------------------------------------
# Write ScalarULong attribute
# -----------------------------------------------------------------
def write_ScalarULong(self, attr):
# Add your own code here
self.attr_ScalarULong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarLong64 attribute
# -----------------------------------------------------------------
def read_ScalarLong64(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarLong64)
# -----------------------------------------------------------------
# Write ScalarLong64 attribute
# -----------------------------------------------------------------
def write_ScalarLong64(self, attr):
# Add your own code here
self.attr_ScalarLong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarULong64 attribute
# -----------------------------------------------------------------
def read_ScalarULong64(self, attr):
# Add your own code here
attr.set_value(long(self.attr_ScalarULong64))
# Do not work as well
# -----------------------------------------------------------------
# Write ScalarULong64 attribute
# -----------------------------------------------------------------
def write_ScalarULong64(self, attr):
# Add your own code here
self.attr_ScalarULong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarFloat attribute
# -----------------------------------------------------------------
def read_ScalarFloat(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarFloat)
# -----------------------------------------------------------------
# Write ScalarFloat attribute
# -----------------------------------------------------------------
def write_ScalarFloat(self, attr):
# Add your own code here
self.attr_ScalarFloat = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarDouble attribute
# -----------------------------------------------------------------
def read_ScalarDouble(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarDouble)
# -----------------------------------------------------------------
# Write ScalarDouble attribute
# -----------------------------------------------------------------
def write_ScalarDouble(self, attr):
# Add your own code here
self.attr_ScalarDouble = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarString attribute
# -----------------------------------------------------------------
def read_ScalarString(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarString)
# -----------------------------------------------------------------
# Write ScalarString attribute
# -----------------------------------------------------------------
def write_ScalarString(self, attr):
# Add your own code here
self.attr_ScalarString = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarEncoded attribute
# -----------------------------------------------------------------
def read_ScalarEncoded(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarEncoded[0], self.attr_ScalarEncoded[1])
# -----------------------------------------------------------------
# Write ScalarEncoded attribute
# -----------------------------------------------------------------
def write_ScalarEncoded(self, attr):
# Add your own code here
self.attr_ScalarEncoded = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarUChar attribute
# -----------------------------------------------------------------
def read_ScalarUChar(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarUChar)
# -----------------------------------------------------------------
# Write ScalarUChar attribute
# -----------------------------------------------------------------
def write_ScalarUChar(self, attr):
# Add your own code here
self.attr_ScalarUChar = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumEncoded attribute
# -----------------------------------------------------------------
def read_SpectrumEncoded(self, attr):
# Add your own code here
self.attr_SpectrumEncoded = self.encodeSpectrum()
attr.set_value(self.attr_SpectrumEncoded[0],
self.attr_SpectrumEncoded[1])
# -----------------------------------------------------------------
# Write SpectrumEncoded attribute
# -----------------------------------------------------------------
def write_SpectrumEncoded(self, attr):
# Add your own code here
self.attr_SpectrumEncoded = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageEncoded attribute
# -----------------------------------------------------------------
def read_ImageEncoded(self, attr):
# Add your own code here
self.attr_ImageEncoded = self.encodeImage()
attr.set_value(self.attr_ImageEncoded[0], self.attr_ImageEncoded[1])
# -----------------------------------------------------------------
# Write ImageEncoded attribute
# -----------------------------------------------------------------
def write_ImageEncoded(self, attr):
# Add your own code here
self.attr_ImageEncoded = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumBoolean attribute
# -----------------------------------------------------------------
def read_SpectrumBoolean(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumBoolean)
# -----------------------------------------------------------------
# Write SpectrumBoolean attribute
# -----------------------------------------------------------------
def write_SpectrumBoolean(self, attr):
# Add your own code here
self.attr_SpectrumBoolean = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumUChar attribute
# -----------------------------------------------------------------
def read_SpectrumUChar(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumUChar)
# -----------------------------------------------------------------
# Write SpectrumUChar attribute
# -----------------------------------------------------------------
def write_SpectrumUChar(self, attr):
# Add your own code here
self.attr_SpectrumUChar = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumShort attribute
# -----------------------------------------------------------------
def read_SpectrumShort(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumShort)
# -----------------------------------------------------------------
# Write SpectrumShort attribute
# -----------------------------------------------------------------
def write_SpectrumShort(self, attr):
# Add your own code here
self.attr_SpectrumShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumUShort attribute
# -----------------------------------------------------------------
def read_SpectrumUShort(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumUShort)
# -----------------------------------------------------------------
# Write SpectrumUShort attribute
# -----------------------------------------------------------------
def write_SpectrumUShort(self, attr):
# Add your own code here
self.attr_SpectrumUShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumLong attribute
# -----------------------------------------------------------------
def read_SpectrumLong(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumLong)
# -----------------------------------------------------------------
# Write SpectrumLong attribute
# -----------------------------------------------------------------
def write_SpectrumLong(self, attr):
# Add your own code here
self.attr_SpectrumLong = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumULong attribute
# -----------------------------------------------------------------
def read_SpectrumULong(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumULong)
# -----------------------------------------------------------------
# Write SpectrumULong attribute
# -----------------------------------------------------------------
def write_SpectrumULong(self, attr):
# Add your own code here
self.attr_SpectrumULong = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumLong64 attribute
# -----------------------------------------------------------------
def read_SpectrumLong64(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumLong64)
# -----------------------------------------------------------------
# Write SpectrumLong64 attribute
# -----------------------------------------------------------------
def write_SpectrumLong64(self, attr):
# Add your own code here
self.attr_SpectrumLong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumULong64 attribute
# -----------------------------------------------------------------
def read_SpectrumULong64(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumULong64)
# -----------------------------------------------------------------
# Write SpectrumULong64 attribute
# -----------------------------------------------------------------
def write_SpectrumULong64(self, attr):
# Add your own code here
self.attr_SpectrumULong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumFloat attribute
# -----------------------------------------------------------------
def read_SpectrumFloat(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumFloat)
# -----------------------------------------------------------------
# Write SpectrumFloat attribute
# -----------------------------------------------------------------
def write_SpectrumFloat(self, attr):
# Add your own code here
self.attr_SpectrumFloat = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumDouble attribute
# -----------------------------------------------------------------
def read_SpectrumDouble(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumDouble)
# -----------------------------------------------------------------
# Write SpectrumDouble attribute
# -----------------------------------------------------------------
def write_SpectrumDouble(self, attr):
# Add your own code here
self.attr_SpectrumDouble = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumString attribute
# -----------------------------------------------------------------
def read_SpectrumString(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumString)
# -----------------------------------------------------------------
# Write SpectrumString attribute
# -----------------------------------------------------------------
def write_SpectrumString(self, attr):<|fim▁hole|> # -----------------------------------------------------------------
# Read ImageBoolean attribute
# -----------------------------------------------------------------
def read_ImageBoolean(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageBoolean)
# -----------------------------------------------------------------
# Write ImageBoolean attribute
# -----------------------------------------------------------------
def write_ImageBoolean(self, attr):
# Add your own code here
self.attr_ImageBoolean = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageUChar attribute
# -----------------------------------------------------------------
def read_ImageUChar(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageUChar)
# -----------------------------------------------------------------
# Write ImageUChar attribute
# -----------------------------------------------------------------
def write_ImageUChar(self, attr):
self.attr_ImageUChar = attr.get_write_value()
# Add your own code here
# -----------------------------------------------------------------
# Read ImageShort attribute
# -----------------------------------------------------------------
def read_ImageShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageShort)
# -----------------------------------------------------------------
# Write ImageShort attribute
# -----------------------------------------------------------------
def write_ImageShort(self, attr):
# Add your own code here
self.attr_ImageShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageUShort attribute
# -----------------------------------------------------------------
def read_ImageUShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageUShort)
# -----------------------------------------------------------------
# Write ImageUShort attribute
# -----------------------------------------------------------------
def write_ImageUShort(self, attr):
# Add your own code here
self.attr_ImageUShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageLong attribute
# -----------------------------------------------------------------
def read_ImageLong(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageLong)
# -----------------------------------------------------------------
# Write ImageLong attribute
# -----------------------------------------------------------------
def write_ImageLong(self, attr):
# Add your own code here
self.attr_ImageLong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageULong attribute
# -----------------------------------------------------------------
def read_ImageULong(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageULong)
# -----------------------------------------------------------------
# Write ImageULong attribute
# -----------------------------------------------------------------
def write_ImageULong(self, attr):
# Add your own code here
self.attr_ImageULong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageLong64 attribute
# -----------------------------------------------------------------
def read_ImageLong64(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageLong64)
# -----------------------------------------------------------------
# Write ImageLong64 attribute
# -----------------------------------------------------------------
def write_ImageLong64(self, attr):
# Add your own code here
self.attr_ImageLong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageULong64 attribute
# -----------------------------------------------------------------
def read_ImageULong64(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageULong64)
# -----------------------------------------------------------------
# Write ImageULong64 attribute
# -----------------------------------------------------------------
def write_ImageULong64(self, attr):
# Add your own code here
self.attr_ImageULong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageFloat attribute
# -----------------------------------------------------------------
def read_ImageFloat(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageFloat)
# -----------------------------------------------------------------
# Write ImageFloat attribute
# -----------------------------------------------------------------
def write_ImageFloat(self, attr):
# Add your own code here
self.attr_ImageFloat = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageDouble attribute
# -----------------------------------------------------------------
def read_ImageDouble(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageDouble)
# -----------------------------------------------------------------
# Write ImageDouble attribute
# -----------------------------------------------------------------
def write_ImageDouble(self, attr):
# Add your own code here
self.attr_ImageDouble = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageString attribute
# -----------------------------------------------------------------
def read_ImageString(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageString)
# -----------------------------------------------------------------
# Write ImageString attribute
# -----------------------------------------------------------------
def write_ImageString(self, attr):
# Add your own code here
self.attr_ImageString = attr.get_write_value()
# =================================================================
#
# SimpleServer command methods
#
# =================================================================
#
# -----------------------------------------------------------------
# GetBoolean command:
#
# Description: Returns ScalarBoolean
#
# argout: DevBoolean ScalarBoolean
# -----------------------------------------------------------------
def GetBoolean(self):
# Add your own code here
return self.attr_ScalarBoolean
# -----------------------------------------------------------------
# GetShort command:
#
# Description: Returns ScalarShort
#
# argout: DevShort ScalarShort
# -----------------------------------------------------------------
def GetShort(self):
# Add your own code here
return self.attr_ScalarShort
# -----------------------------------------------------------------
# GetLong command:
#
# Description: Returns ScalarLong
#
# argout: DevLong ScalarLong
# -----------------------------------------------------------------
def GetLong(self):
# Add your own code here
return self.attr_ScalarLong
# -----------------------------------------------------------------
# GetLong64 command:
#
# Description: Returns ScalarLong64
#
# argout: DevLong64 ScalarLong64
# -----------------------------------------------------------------
def GetLong64(self):
# Add your own code here
return self.attr_ScalarLong64
# -----------------------------------------------------------------
# GetFloat command:
#
# Description: Returns ScalarFloat
#
# argout: DevFloat ScalarFloat
# -----------------------------------------------------------------
def GetFloat(self):
# Add your own code here
return self.attr_ScalarFloat
# -----------------------------------------------------------------
# GetDouble command:
#
# Description: Returns ScalarDouble
#
# argout: DevDouble ScalarDouble
# -----------------------------------------------------------------
def GetDouble(self):
# Add your own code here
return self.attr_ScalarDouble
# -----------------------------------------------------------------
# GetUShort command:
#
# Description: Returns ScalarUShort
#
# argout: DevUShort ScalarUShort
# -----------------------------------------------------------------
def GetUShort(self):
# Add your own code here
return self.attr_ScalarUShort
# -----------------------------------------------------------------
# GetULong command:
#
# Description: Returns ScalarULong
#
# argout: DevULong ScalarULong
# -----------------------------------------------------------------
def GetULong(self):
# Add your own code here
return self.attr_ScalarULong
# -----------------------------------------------------------------
# GetULong64 command:
#
# Description: Returns ScalarULong64
#
# argout: DevULong64 ScalarULong64
# -----------------------------------------------------------------
def GetULong64(self):
# Add your own code here
return self.attr_ScalarULong64
# -----------------------------------------------------------------
# GetString command:
#
# Description: Returns ScalarString
#
# argout: DevString ScalarString
# -----------------------------------------------------------------
def GetString(self):
# Add your own code here
return self.attr_ScalarString
# -----------------------------------------------------------------
# CreateDataSource command:
#
# -----------------------------------------------------------------
def CreateAttribute(self, name):
# Add your own code here
attr = PyTango.Attr(name, PyTango.DevString, PyTango.READ_WRITE)
self.add_attribute(attr, self.read_General, self.write_General)
def read_General(self, attr):
attr.set_value(self.attr_value)
def write_General(self, attr):
self.attr_value = attr.get_write_value()
# =================================================================
#
# TestServerClass class definition
#
# =================================================================
class TestServerClass(PyTango.DeviceClass):
# Class Properties
class_property_list = {
}
# Device Properties
device_property_list = {
'StringList':
[PyTango.DevVarStringArray,
"element names",
[]],
}
# Command definitions
cmd_list = {
'SetState':
[[PyTango.DevString, "ScalarString"],
[PyTango.DevVoid, ""]],
'CreateAttribute':
[[PyTango.DevString, "ScalarString"],
[PyTango.DevVoid, ""]],
'ChangeValueType':
[[PyTango.DevString, "ScalarString"],
[PyTango.DevVoid, ""]],
'GetBoolean':
[[PyTango.DevVoid, ""],
[PyTango.DevBoolean, "ScalarBoolean"]],
'GetShort':
[[PyTango.DevVoid, ""],
[PyTango.DevShort, "ScalarShort"]],
'GetLong':
[[PyTango.DevVoid, ""],
[PyTango.DevLong, "ScalarLong"]],
'GetLong64':
[[PyTango.DevVoid, ""],
[PyTango.DevLong64, "ScalarLong64"]],
'GetFloat':
[[PyTango.DevVoid, ""],
[PyTango.DevFloat, "ScalarFloat"]],
'GetDouble':
[[PyTango.DevVoid, ""],
[PyTango.DevDouble, "ScalarDouble"]],
'GetUShort':
[[PyTango.DevVoid, ""],
[PyTango.DevUShort, "ScalarUShort"]],
'GetULong':
[[PyTango.DevVoid, ""],
[PyTango.DevULong, "ScalarULong"]],
'GetULong64':
[[PyTango.DevVoid, ""],
[PyTango.DevULong64, "ScalarULong64"]],
'GetString':
[[PyTango.DevVoid, ""],
[PyTango.DevString, "ScalarString"]],
}
# Attribute definitions
attr_list = {
'ScalarLong':
[[PyTango.DevLong,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "test long scalar attribute",
}],
'ScalarBoolean':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "test scalar bool attribute",
}],
'ScalarShort':
[[PyTango.DevShort,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "Scalar Short attribute",
}],
'ScalarUShort':
[[PyTango.DevUShort,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarUShort attribute",
}],
'ScalarULong':
[[PyTango.DevULong,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarULong attribute",
}],
'ScalarLong64':
[[PyTango.DevLong64,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarLong64 attribute",
}],
'ScalarULong64':
[[PyTango.DevULong64,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarULong64 attribute",
}],
'ScalarFloat':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarFloat attribute",
}],
'ScalarDouble':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarDouble attribute",
}],
'ScalarString':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarString attribute",
}],
'ScalarEncoded':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarEncoded attribute",
}],
'ScalarUChar':
[[PyTango.DevUChar,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarUChar attribute",
}],
'SpectrumEncoded':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "SpectrumEncoded attribute",
}],
'ImageEncoded':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ImageEncoded attribute",
}],
'SpectrumBoolean':
[[PyTango.DevBoolean,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumBoolean attribute",
}],
'SpectrumUChar':
[[PyTango.DevUChar,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumUChar attribute",
}],
'SpectrumShort':
[[PyTango.DevShort,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumShort attribute",
}],
'SpectrumUShort':
[[PyTango.DevUShort,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumUShort",
}],
'SpectrumLong':
[[PyTango.DevLong,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumLong attribute",
}],
'SpectrumULong':
[[PyTango.DevULong,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumULong attribute",
}],
'SpectrumLong64':
[[PyTango.DevLong64,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumLong64 attribute",
}],
'SpectrumULong64':
[[PyTango.DevULong64,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumULong64 attribute",
}],
'SpectrumFloat':
[[PyTango.DevFloat,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumFloat attribute",
}],
'SpectrumDouble':
[[PyTango.DevDouble,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumDouble attribute",
}],
'SpectrumString':
[[PyTango.DevString,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumString attribute",
}],
'ImageBoolean':
[[PyTango.DevBoolean,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageBoolean attribute",
}],
'ImageUChar':
[[PyTango.DevUChar,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageUChar attribute",
}],
'ImageShort':
[[PyTango.DevShort,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageShort attribute",
}],
'ImageUShort':
[[PyTango.DevUShort,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageUShort attribute",
}],
'ImageLong':
[[PyTango.DevLong,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageLong attribute",
}],
'ImageULong':
[[PyTango.DevULong,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageULong attribute",
}],
'ImageLong64':
[[PyTango.DevLong64,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageLong64 attribute",
}],
'ImageULong64':
[[PyTango.DevULong64,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageULong64 attribute",
}],
'ImageFloat':
[[PyTango.DevFloat,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageFloat attribute",
}],
'ImageDouble':
[[PyTango.DevDouble,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageDouble attribute",
}],
'ImageString':
[[PyTango.DevString,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageString attribute",
}],
'Environment':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "Environment attribute",
}],
'DoorList':
[[PyTango.DevString,
PyTango.SPECTRUM,
PyTango.READ_WRITE,
256],
{
'description': "Environment attribute",
}],
}
# -----------------------------------------------------------------
# TestServerClass Constructor
# -----------------------------------------------------------------
def __init__(self, name):
PyTango.DeviceClass.__init__(self, name)
self.set_type(name)
# =================================================================
#
# TestServer class main method
#
# =================================================================
if __name__ == '__main__':
try:
py = PyTango.Util(sys.argv)
py.add_class(TestServerClass, TestServer, 'TestServer')
U = PyTango.Util.instance()
U.server_init()
U.server_run()
except PyTango.DevFailed as e:
print('-------> Received a DevFailed exception: %s' % e)
except Exception as e:
print('-------> An unforeseen exception occured.... %s' % e)<|fim▁end|> | # Add your own code here
self.attr_SpectrumString = attr.get_write_value()
|
<|file_name|>environment.js<|end_file_name|><|fim▁begin|>/* jshint node: true */
module.exports = function(environment) {
var ENV = {
modulePrefix: 'firehon',
environment: environment,
contentSecurityPolicy: { 'connect-src': "'self' wss://*.firebaseio.com" },
firebase: 'https://firehon.firebaseio.com/',
baseURL: '/',
locationType: 'auto',
EmberENV: {<|fim▁hole|> }
},
APP: {
// Here you can pass flags/options to your application instance
// when it is created
}
};
if (environment === 'development') {
// ENV.APP.LOG_RESOLVER = true;
// ENV.APP.LOG_ACTIVE_GENERATION = true;
// ENV.APP.LOG_TRANSITIONS = true;
// ENV.APP.LOG_TRANSITIONS_INTERNAL = true;
// ENV.APP.LOG_VIEW_LOOKUPS = true;
}
if (environment === 'test') {
// Testem prefers this...
ENV.baseURL = '/';
ENV.locationType = 'none';
// keep test console output quieter
ENV.APP.LOG_ACTIVE_GENERATION = false;
ENV.APP.LOG_VIEW_LOOKUPS = false;
ENV.APP.rootElement = '#ember-testing';
}
if (environment === 'production') {
}
return ENV;
};<|fim▁end|> | FEATURES: {
// Here you can enable experimental features on an ember canary build
// e.g. 'with-controller': true |
<|file_name|>view_conversion.rs<|end_file_name|><|fim▁begin|>// local imports<|fim▁hole|>use view::MatrixView;
use traits::*;
use sralgebra::MagmaBase;
/// Implements matrix conversion API
impl <'a, T:MagmaBase> Conversion<T> for MatrixView<'a, T> {
/// Converts the view to vector from standard library
fn to_std_vec(&self) -> Vec<T> {
let mut vec: Vec<T> = Vec::with_capacity(self.num_cells());
// We iterate over elements in matrix and push in the vector
let ptr = self.matrix().as_ptr();
for c in 0..self.num_cols(){
for r in 0..self.num_rows(){
let offset = self.cell_to_offset(r, c);
vec.push(unsafe{*ptr.offset(offset)});
}
}
vec
}
}
#[cfg(test)]
mod test{
use traits::*;
use constructors::*;
#[test]
fn test_view_to_scalar(){
let m = matrix_rw_i32(3, 3, &[1, 2, 3,
4, 5, 6,
7, 8, 9]);
let v = m.view(2,1, 1, 1);
assert_eq!(v.to_scalar(), 8);
}
}<|fim▁end|> | |
<|file_name|>razorback_felspur.py<|end_file_name|><|fim▁begin|>import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('desert_razorback_felspur')
mobileTemplate.setLevel(24)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Herbivore Meat")
mobileTemplate.setMeatAmount(65)
mobileTemplate.setHideType("Leathery Hide")
mobileTemplate.setBoneAmount(40)
mobileTemplate.setBoneType("Animal Bone")
mobileTemplate.setHideAmount(25)
mobileTemplate.setSocialGroup("zucca Boar")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_zucca_boar.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
<|fim▁hole|> attacks.add('bm_charge_2')
attacks.add('bm_dampen_pain_2')
attacks.add('bm_slash_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('razorback_felspur', mobileTemplate)
return<|fim▁end|> | attacks = Vector() |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Platform-specific extensions to `std` for Windows.
//!
//! Provides access to platform-level information for Windows, and exposes
//! Windows-specific idioms that would otherwise be inappropriate as part
//! the core `std` library. These extensions allow developers to use
//! `std` types and idioms with Windows in a way that the normal
//! platform-agnostic idioms would not normally support.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(cfg(windows))]
pub mod ffi;
pub mod fs;
pub mod io;
pub mod process;
pub mod raw;
pub mod thread;
/// A prelude for conveniently writing platform-specific code.
///
/// Includes all extension traits, and some important type definitions.
#[stable(feature = "rust1", since = "1.0.0")]
pub mod prelude {<|fim▁hole|> #[stable(feature = "file_offset", since = "1.15.0")]
pub use super::fs::FileExt;
#[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::fs::{MetadataExt, OpenOptionsExt};
#[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::io::{
AsHandle, AsSocket, BorrowedHandle, BorrowedSocket, FromRawHandle, FromRawSocket,
HandleOrInvalid, IntoRawHandle, IntoRawSocket, OwnedHandle, OwnedSocket,
};
#[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::io::{AsRawHandle, AsRawSocket, RawHandle, RawSocket};
}<|fim▁end|> | #[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::ffi::{OsStrExt, OsStringExt};
#[doc(no_inline)] |
<|file_name|>test_data_sources.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest import test
from tempest_lib.common.utils import data_utils
class DataSourceTest(base.BaseDataProcessingTest):
def _check_data_source_create(self, source_body):
source_name = data_utils.rand_name('sahara-data-source')
# create data source
resp_body = self.create_data_source(source_name, **source_body)
# check that source created successfully
self.assertEqual(source_name, resp_body.name)
if source_body['type'] == 'swift':
source_body = self.swift_data_source
self.assertDictContainsSubset(source_body, resp_body.__dict__)
return resp_body.id, source_name
def _check_data_source_list(self, source_id, source_name):
# check for data source in list
source_list = self.client.data_sources.list()
sources_info = [(source.id, source.name) for source in source_list]
self.assertIn((source_id, source_name), sources_info)
def _check_data_source_get(self, source_id, source_name, source_body):
# check data source fetch by id
source = self.client.data_sources.get(source_id)
self.assertEqual(source_name, source.name)
self.assertDictContainsSubset(source_body, source.__dict__)
def _check_data_source_delete(self, source_id):
# delete data source
self.client.data_sources.delete(source_id)
# check that data source really deleted
source_list = self.client.data_sources.list()
self.assertNotIn(source_id, [source.id for source in source_list])
@test.services('data_processing')
def test_swift_data_source(self):
# Create extra self.swift_data_source variable to use for comparison to
# data source response body because response body has no 'credentials'<|fim▁hole|> self.swift_data_source = self.swift_data_source_with_creds.copy()
del self.swift_data_source['credentials']
source_id, source_name = self._check_data_source_create(
self.swift_data_source_with_creds)
self._check_data_source_list(source_id, source_name)
self._check_data_source_get(source_id, source_name,
self.swift_data_source)
self._check_data_source_delete(source_id)
@test.services('data_processing')
def test_local_hdfs_data_source(self):
source_id, source_name = self._check_data_source_create(
self.local_hdfs_data_source)
self._check_data_source_list(source_id, source_name)
self._check_data_source_get(source_id, source_name,
self.local_hdfs_data_source)
self._check_data_source_delete(source_id)
@test.services('data_processing')
def test_external_hdfs_data_source(self):
source_id, source_name = self._check_data_source_create(
self.external_hdfs_data_source)
self._check_data_source_list(source_id, source_name)
self._check_data_source_get(source_id, source_name,
self.external_hdfs_data_source)
self._check_data_source_delete(source_id)<|fim▁end|> | # field. |
<|file_name|>active_attempt.rs<|end_file_name|><|fim▁begin|>use splits::Splits;
use attempt::Attempt;
use stopwatch::Stopwatch;
use segment_attempt::SegmentAttempt;
use chrono::Duration;
use std::rc::Rc;
#[derive(Eq, PartialEq)]
pub enum AttemptState {
NotRunning,
Running,
Ended,
}
use self::AttemptState::*;
pub struct ActiveAttempt {
pub state: AttemptState,
pub split_index: usize,
pub attempt: Attempt,
pub stopwatch: Stopwatch,
}
impl ActiveAttempt {
pub fn new(splits: Rc<Splits>, id: usize) -> ActiveAttempt {
ActiveAttempt {
state: NotRunning,
split_index: 0,
attempt: Attempt::new(splits, id),
stopwatch: Stopwatch::new(),
}
}
pub fn is_running(&self) -> bool {
self.state == Running
}
pub fn is_started(&self) -> bool {
match self.state {
NotRunning => false,
_ => true,
}
}
pub fn get_current_segment(&self) -> Option<&SegmentAttempt> {
if self.is_running() {
Some(&self.attempt.segments[self.split_index])
} else {
None
}
}
pub fn start(&mut self) {
if self.state == NotRunning {
self.stopwatch.start();
self.state = Running;
}
}
<|fim▁hole|> self.state = Ended;
} else {
self.split_index += 1;
}
result
}
pub fn split(&mut self) -> Option<&SegmentAttempt> {
if self.is_running() {
let time = Some(self.stopwatch.get_time());
self.move_to_next_segment(time)
} else {
None
}
}
pub fn skip(&mut self) -> Option<&SegmentAttempt> {
if self.is_running() && self.split_index < self.attempt.segments.len() - 1 {
self.move_to_next_segment(None)
} else {
None
}
}
pub fn undo(&mut self) -> Option<&SegmentAttempt> {
if self.split_index > 0 {
if self.state == Ended {
self.state = Running;
} else {
self.split_index -= 1;
}
self.attempt.segments[self.split_index].time = None;
Some(&self.attempt.segments[self.split_index])
} else {
None
}
}
pub fn reset(self) -> Attempt {
self.attempt
}
}<|fim▁end|> | fn move_to_next_segment(&mut self, time: Option<Duration>) -> Option<&SegmentAttempt> {
self.attempt.segments[self.split_index].time = time;
let result = Some(&self.attempt.segments[self.split_index]);
if self.split_index == self.attempt.segments.len() - 1 { |
<|file_name|>state_db.rs<|end_file_name|><|fim▁begin|>// CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>
use state::backend::*;
use util::{JournalDB, DBTransaction, H256, UtilError, HashDB};
pub struct StateDB {
/// Backing database.
db: Box<JournalDB>,
}
impl StateDB {
pub fn new(db: Box<JournalDB>) -> StateDB {
StateDB { db: db }
}
/// Clone the database.
pub fn boxed_clone(&self) -> StateDB {
StateDB { db: self.db.boxed_clone() }
}
/// Journal all recent operations under the given era and ID.
pub fn journal_under(&mut self, batch: &mut DBTransaction, now: u64, id: &H256) -> Result<u32, UtilError> {
self.db.journal_under(batch, now, id)
}
/// Returns underlying `JournalDB`.
pub fn journal_db(&self) -> &JournalDB {
&*self.db
}
}
impl Backend for StateDB {
fn as_hashdb(&self) -> &HashDB {
self.db.as_hashdb()
}
fn as_hashdb_mut(&mut self) -> &mut HashDB {
self.db.as_hashdb_mut()
}
}<|fim▁end|> | |
<|file_name|>test_nested.py<|end_file_name|><|fim▁begin|>from pylastica.query import Query
from pylastica.aggregation.min import Min
from pylastica.aggregation.nested import Nested
from pylastica.doc_type.mapping import Mapping
from pylastica.document import Document
from tests.base import Base
__author__ = 'Joe Linn'
import unittest
class NestedTest(unittest.TestCase, Base):
def setUp(self):
super(NestedTest, self).setUp()
self._index = self._create_index("test_aggregation_nested")
mapping = Mapping()
mapping.set_properties({
"resellers": {
"type": "nested",
"properties": {
"name": {"type": "string"},
"price": {"type": "double"}
}
}
})
doc_type = self._index.get_doc_type("test")
doc_type.mapping = mapping
docs = [
Document(1, {
"resellers": {
"name": "spacely sprockets",
"price": 5.55
}
}),
Document(2, {
"resellers": {
"name": "cogswell cogs",<|fim▁hole|> doc_type.add_documents(docs)
self._index.refresh()
def tearDown(self):
super(NestedTest, self).tearDown()
self._index.delete()
def test_nested_aggregation(self):
agg = Nested("resellers", "resellers")
agg.add_aggregation(Min("min_price").set_field("price"))
query = Query()
query.add_aggregation(agg)
results = self._index.search(query).aggregations['resellers']
self.assertEqual(4.98, results['min_price']['value'])
if __name__ == '__main__':
unittest.main()<|fim▁end|> | "price": 4.98
}
})
] |
<|file_name|>t_PiecewiseHermiteEvaluationImplementation_std.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
from openturns import *
ref = NumericalMathFunction("x", "sin(x)")
size = 12
locations = NumericalPoint(size)
values = NumericalPoint(size)
derivatives = NumericalPoint(size)
# Build locations/values/derivatives with non-increasing locations
for i in range(size):
locations[i] = 10.0 * i * i / (size - 1.0) / (size - 1.0)
values[i] = ref([locations[i]])[0]
derivatives[i] = ref.gradient([locations[i]])[0, 0]
evaluation = PiecewiseHermiteEvaluationImplementation(
locations, values, derivatives)<|fim▁hole|> x = [-1.0 + 12.0 * i / (2.0 * size - 1.0)]
print "f( %.12g )=" % x[0], evaluation(x), ", ref=", ref(x)<|fim▁end|> | print "evaluation=", evaluation
# Check the values
for i in range(2 * size): |
<|file_name|>ExecutorDatabaseProducer.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.executor.cdi;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Produces;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.PersistenceUnit;
import org.jbpm.shared.services.impl.TransactionalCommandService;
@ApplicationScoped
public class ExecutorDatabaseProducer {
private EntityManagerFactory emf;
@PersistenceUnit(unitName = "org.jbpm.executor")
@ApplicationScoped
@Produces<|fim▁hole|> // this needs to be here for non EE containers
this.emf = Persistence.createEntityManagerFactory("org.jbpm.executor");
}
return this.emf;
}
@Produces
public TransactionalCommandService produceCommandService(EntityManagerFactory emf) {
return new TransactionalCommandService(emf);
}
}<|fim▁end|> | public EntityManagerFactory getEntityManagerFactory() {
if (this.emf == null) { |
<|file_name|>slider.js<|end_file_name|><|fim▁begin|>$.widget("metro.slider", {
version: "3.0.0",
options: {
position: 0,
accuracy: 0,
color: 'default',
completeColor: 'default',
markerColor: 'default',
colors: false,
showHint: false,
permanentHint: false,
hintPosition: 'top',
vertical: false,
min: 0,
max: 100,
animate: true,
minValue: 0,
maxValue: 100,
currValue: 0,
returnType: 'value',
target: false,
onChange: function(value, slider){},
_slider : {
vertical: false,
offset: 0,
length: 0,
marker: 0,
ppp: 0,
start: 0,
stop: 0
}
},
_create: function(){
var that = this,
element = this.element;
var o = this.options,
s = o._slider;
$.each(element.data(), function(key, value){
if (key in o) {
try {
o[key] = $.parseJSON(value);
} catch (e) {
o[key] = value;
}
}
});
o.accuracy = o.accuracy < 0 ? 0 : o.accuracy;
o.min = o.min < 0 ? 0 : o.min;
o.min = o.min > o.max ? o.max : o.min;
o.max = o.max > 100 ? 100 : o.max;
o.max = o.max < o.min ? o.min : o.max;
o.position = this._correctValue(element.data('position') > o.min ? (element.data('position') > o.max ? o.max : element.data('position')) : o.min);
o.colors = o.colors ? o.colors.split(",") : false;
s.vertical = o.vertical;
if (o.vertical && !element.hasClass('vertical')) {
element.addClass('vertical');
}
if (o.permanentHint && !element.hasClass('permanent-hint')) {
element.addClass('permanent-hint');
}
if (!o.vertical && o.hintPosition === 'bottom') {
element.addClass('hint-bottom');
}
if (o.vertical && o.hintPosition === 'left') {
element.addClass('hint-left');
}
this._createSlider();
this._initPoints();
this._placeMarker(o.position);
var event_down = isTouchDevice() ? 'touchstart' : 'mousedown';
element.children('.marker').on(event_down, function (e) {
e.preventDefault();
that._startMoveMarker(e);
});
element.on(event_down, function (e) {
e.preventDefault();
that._startMoveMarker(e);
});
element.data('slider', this);
},
_startMoveMarker: function(e){
var element = this.element, o = this.options, that = this, hint = element.children('.slider-hint');
var returnedValue;
var event_move = isTouchDevice() ? 'touchmove' : 'mousemove';
var event_up = isTouchDevice() ? 'touchend' : 'mouseup mouseleave';
$(element).on(event_move, function (event) {
that._movingMarker(event);
if (!element.hasClass('permanent-hint')) {
hint.css('display', 'block');
}
});
$(element).on(event_up, function () {
$(element).off('mousemove');
$(element).off('mouseup');
element.data('value', o.position);
element.trigger('changed', o.position);
returnedValue = o.returnType === 'value' ? that._valueToRealValue(o.position) : o.position;
if (!element.hasClass('permanent-hint')) {
hint.css('display', 'none');
}
});
this._initPoints();
this._movingMarker(e);
},
_movingMarker: function (ev) {
var element = this.element, o = this.options;
var cursorPos,
percents,
valuePix,
vertical = o._slider.vertical,
sliderOffset = o._slider.offset,
sliderStart = o._slider.start,
sliderEnd = o._slider.stop,
sliderLength = o._slider.length,
markerSize = o._slider.marker;
var event = !isTouchDevice() ? ev.originalEvent : ev.originalEvent.touches[0];
//console.log(event);
if (vertical) {
cursorPos = event.pageY - sliderOffset;
} else {
cursorPos = event.pageX - sliderOffset;
}
if (cursorPos < sliderStart) {
cursorPos = sliderStart;
} else if (cursorPos > sliderEnd) {
cursorPos = sliderEnd;
}
if (vertical) {
valuePix = sliderLength - cursorPos - markerSize / 2;
} else {
valuePix = cursorPos - markerSize / 2;
}
percents = this._pixToPerc(valuePix);
this._placeMarker(percents);
o.currValue = this._valueToRealValue(percents);
o.position = percents;
var returnedValue = o.returnType === 'value' ? this._valueToRealValue(o.position) : o.position;
if (o.target) {
$(o.target).val(returnedValue);
}
if (typeof o.onChange === 'function') {
o.onChange(returnedValue, element);
} else {
if (typeof window[o.onChange] === 'function') {
window[o.onChange](returnedValue, element);
} else {
var result = eval("(function(){"+o.onChange+"})");
result.call(returnedValue, element);
}
}
},
_placeMarker: function (value) {
var size, size2, o = this.options, colorParts, colorIndex = 0, colorDelta, element = this.element,
marker = this.element.children('.marker'),
complete = this.element.children('.complete'),
hint = this.element.children('.slider-hint'), hintValue,
oldPos = this._percToPix(o.position);
colorParts = o.colors.length;
colorDelta = o._slider.length / colorParts;
if (o._slider.vertical) {
var oldSize = this._percToPix(o.position) + o._slider.marker,
oldSize2 = o._slider.length - oldSize;
size = this._percToPix(value) + o._slider.marker;
size2 = o._slider.length - size;
this._animate(marker.css('top', oldSize2),{top: size2});
this._animate(complete.css('height', oldSize),{height: size});
if (colorParts) {
colorIndex = Math.round(size / colorDelta)-1;
complete.css('background-color', o.colors[colorIndex<0?0:colorIndex]);
}
if (o.showHint) {
hintValue = this._valueToRealValue(value);
hint.html(hintValue).css('top', size2 - hint.height()/2 + (element.hasClass('large') ? 8 : 0));
}
} else {
size = this._percToPix(value);
this._animate(marker.css('left', oldPos),{left: size});
this._animate(complete.css('width', oldPos),{width: size});
if (colorParts) {
colorIndex = Math.round(size / colorDelta)-1;
complete.css('background-color', o.colors[colorIndex<0?0:colorIndex]);
}
if (o.showHint) {
hintValue = this._valueToRealValue(value);
hint.html(hintValue).css({left: size - hint.width() / 2 + (element.hasClass('large') ? 6 : 0)});
}
}
},
_valueToRealValue: function(value){
var o = this.options;
var real_value;
var percent_value = (o.maxValue - o.minValue) / 100;
real_value = value * percent_value + o.minValue;
return Math.round(real_value);
},
_animate: function (obj, val) {
var o = this.options;
if(o.animate) {
obj.stop(true).animate(val);
} else {
obj.css(val);
}
},
_pixToPerc: function (valuePix) {
var valuePerc;
valuePerc = valuePix * this.options._slider.ppp;
return Math.round(this._correctValue(valuePerc));<|fim▁hole|> _percToPix: function (value) {
if (this.options._slider.ppp === 0) {
return 0;
}
return Math.round(value / this.options._slider.ppp);
},
_correctValue: function (value) {
var o = this.options;
var accuracy = o.accuracy;
var max = o.max;
var min = o.min;
if (accuracy === 0) {
return value;
}
if (value === max) {
return max;
}
if (value === min) {
return min;
}
value = Math.floor(value / accuracy) * accuracy + Math.round(value % accuracy / accuracy) * accuracy;
if (value > max) {
return max;
}
if (value < min) {
return min;
}
return value;
},
_initPoints: function(){
var o = this.options, s = o._slider, element = this.element;
if (s.vertical) {
s.offset = element.offset().top;
s.length = element.height();
s.marker = element.children('.marker').height();
} else {
s.offset = element.offset().left;
s.length = element.width();
s.marker = element.children('.marker').width();
}
s.ppp = o.max / (s.length - s.marker);
s.start = s.marker / 2;
s.stop = s.length - s.marker / 2;
},
_createSlider: function(){
var element = this.element,
o = this.options,
complete, marker, hint;
element.html('');
complete = $("<div/>").addClass("complete").appendTo(element);
marker = $("<a/>").addClass("marker").appendTo(element);
if (o.showHint) {
hint = $("<span/>").addClass("slider-hint").appendTo(element);
}
if (o.color !== 'default') {
if (o.color.isColor()) {
element.css('background-color', o.color);
} else {
element.addClass(o.color);
}
}
if (o.completeColor !== 'default') {
if (o.completeColor.isColor()) {
complete.css('background-color', o.completeColor);
} else {
complete.addClass(o.completeColor);
}
}
if (o.markerColor !== 'default') {
if (o.markerColor.isColor()) {
marker.css('background-color', o.markerColor);
} else {
marker.addClass(o.markerColor);
}
}
},
value: function (value) {
var element = this.element, o = this.options, returnedValue;
if (typeof value !== 'undefined') {
value = value > o.max ? o.max : value;
value = value < o.min ? o.min : value;
this._placeMarker(parseInt(value));
o.position = parseInt(value);
returnedValue = o.returnType === 'value' ? this._valueToRealValue(o.position) : o.position;
if (typeof o.onChange === 'function') {
o.onChange(returnedValue, element);
} else {
if (typeof window[o.onChange] === 'function') {
window[o.onChange](returnedValue, element);
} else {
var result = eval("(function(){"+o.onChange+"})");
result.call(returnedValue, element);
}
}
return this;
} else {
returnedValue = o.returnType === 'value' ? this._valueToRealValue(o.position) : o.position;
return returnedValue;
}
},
_destroy: function(){},
_setOption: function(key, value){
this._super('_setOption', key, value);
}
});<|fim▁end|> | },
|
<|file_name|>Icon_test.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
const {assert} = chai;
import {Icon} from '../../../../front_end/ui/Icon.js';
describe('Icon', () => {
it('can create an empty instance without issues', () => {
const icon = Icon.create();
assert.strictEqual(icon.tagName, 'SPAN', 'icon span element was not created correctly');
assert.strictEqual(icon.getAttribute('is'), 'ui-icon', 'icon span element "is" attribute was not set correctly');
});
// TODO continue writing tests here or use another describe block
});<|fim▁end|> | // Copyright 2019 The Chromium Authors. All rights reserved. |
<|file_name|>file.rs<|end_file_name|><|fim▁begin|>use std::path::Path;
use std::fs::File;
use std::io::{Read, Write};
use encoding::{Encoding, DecoderTrap, EncoderTrap};
use encoding::all::WINDOWS_1252;
pub fn read_all_text<P: AsRef<Path>>(path: P) -> String {
let mut file = File::open(path).unwrap();
let mut data = String::new();
file.read_to_string(&mut data).unwrap();
data
}
pub fn write_all_text<P: AsRef<Path>>(path: P, text: &str) {
let mut file = File::create(path).unwrap();
file.write_all(text.as_bytes()).unwrap();
}
pub fn read_all_win_1252<P: AsRef<Path>>(path: P) -> String {
let mut file = File::open(path).unwrap();
let mut data = Vec::new();
file.read_to_end(&mut data).unwrap();
WINDOWS_1252.decode(&data, DecoderTrap::Strict).unwrap()
}
pub fn write_all_win_1252<P: AsRef<Path>>(path: P, text: &str) {
let mut file = File::create(path).unwrap();
<|fim▁hole|><|fim▁end|> | let data = WINDOWS_1252.encode(&text, EncoderTrap::Strict).unwrap();
file.write_all(&data).unwrap();
} |
<|file_name|>conf_fixture.py<|end_file_name|><|fim▁begin|># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.<|fim▁hole|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from nova import config
from nova import ipv6
from nova import paths
from nova.tests.unit import utils
CONF = cfg.CONF
CONF.import_opt('use_ipv6', 'nova.netconf')
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('scheduler_driver', 'nova.scheduler.manager')
CONF.import_opt('fake_network', 'nova.network.linux_net')
CONF.import_opt('network_size', 'nova.network.manager')
CONF.import_opt('num_networks', 'nova.network.manager')
CONF.import_opt('floating_ip_dns_manager', 'nova.network.floating_ips')
CONF.import_opt('instance_dns_manager', 'nova.network.floating_ips')
CONF.import_opt('policy_file', 'nova.openstack.common.policy')
CONF.import_opt('compute_driver', 'nova.virt.driver')
CONF.import_opt('api_paste_config', 'nova.wsgi')
class ConfFixture(config_fixture.Config):
"""Fixture to manage global conf settings."""
def setUp(self):
super(ConfFixture, self).setUp()
self.conf.set_default('api_paste_config',
paths.state_path_def('etc/nova/api-paste.ini'))
self.conf.set_default('host', 'fake-mini')
self.conf.set_default('compute_driver',
'nova.virt.fake.SmallFakeDriver')
self.conf.set_default('fake_network', True)
self.conf.set_default('flat_network_bridge', 'br100')
self.conf.set_default('floating_ip_dns_manager',
'nova.tests.unit.utils.dns_manager')
self.conf.set_default('instance_dns_manager',
'nova.tests.unit.utils.dns_manager')
self.conf.set_default('network_size', 8)
self.conf.set_default('num_networks', 2)
self.conf.set_default('use_ipv6', True)
self.conf.set_default('vlan_interface', 'eth0')
self.conf.set_default('auth_strategy', 'noauth')
config.parse_args([], default_config_files=[])
self.conf.set_default('connection', "sqlite://", group='database')
self.conf.set_default('sqlite_synchronous', False, group='database')
self.conf.set_default('fatal_exception_format_errors', True)
self.conf.set_default('enabled', True, 'osapi_v3')
self.conf.set_default('force_dhcp_release', False)
self.conf.set_default('periodic_enable', False)
self.addCleanup(utils.cleanup_dns_managers)
self.addCleanup(ipv6.api.reset_backend)<|fim▁end|> | # All Rights Reserved.
# |
<|file_name|>routes.js<|end_file_name|><|fim▁begin|>export default {
'/': {
component: require('./components/NowPlayingView'),
name: 'NowPlaying'
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>logsgui3.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys # provides interaction with the Python interpreter
from functools import partial
from PyQt4 import QtGui # provides the graphic elements
from PyQt4.QtCore import Qt # provides Qt identifiers
from PyQt4.QtGui import QPushButton
try:
from sh import inxi
except:
print(" 'inxi' not found, install it to get this info")
try:
from sh import mhwd
except:
print(" 'mhwd' not found, this is not Manjaro?")
try:
from sh import hwinfo
except:
print(" 'hwinfo' not found")
try:
from sh import free
except:
print(" 'free' not found")
try:
from sh import lsblk
except:
print(" 'lsblk' not found")
try:
from sh import df
except:
print(" 'df' not found")
try:
from sh import blockdev
except:
print(" 'blockdev' not found")
try:
from sh import test
except:
print(" 'test' not found")
try:
from sh import parted
except:
print(" 'parted' not found")
TMP_FILE = "/tmp/mlogsout.txt"
HEADER = '''
===================
|{:^17}| {}
===================
'''
checkbuttons = [
'Inxi',
'Installed g. drivers',
'List all g. drivers',
'Graphic Card Info',
'Memory Info',
'Partitions',
'Free Disk Space',
'Xorg.0',
'Xorg.1',
'pacman.log',
'journalctl - Emergency',
'journalctl - Alert',
'journalctl - Critical',
'journalctl - Failed',
'Open&Rc - rc.log',
]
def look_in_file(file_name, kws):
"""reads a file and returns only the lines that contain one of the keywords"""
with open(file_name) as f:
return "".join(filter(lambda line: any(kw in line for kw in kws), f))
class Window(QtGui.QWidget):
def __init__(self, parent=None):
super(Window, self).__init__(parent)
self.checks = [False]*len(checkbuttons) # initialize all buttons to False
# creates a vertical box layout for the window
vlayout = QtGui.QVBoxLayout()
# creates the checkboxes
for idx, text in enumerate(checkbuttons):
checkbox = QtGui.QCheckBox(text)
# connects the 'stateChanged()' signal with the 'checkbox_state_changed()' slot
checkbox.stateChanged.connect(partial(self.checkbox_state_changed, idx))
vlayout.addWidget(checkbox) # adds the checkbox to the layout
btn = QPushButton("&Show Info ({})".format(TMP_FILE), self)
btn.clicked.connect(self.to_computer)
btn.clicked.connect(self.to_editor)
vlayout.addWidget(btn)
vlayout.addStretch()
self.setLayout(vlayout) # sets the window layout
def checkbox_state_changed(self, idx, state):
self.checks[idx] = state == Qt.Checked
def to_computer(self, text):
f = open(TMP_FILE, 'w') # write mode clears any previous content from the file if it exists
if self.checks[0]:
print("Saving: inxi to file")
f.write(HEADER.format("Inxi -Fxzc0", "Listing computer information"))
try:
f.write(str(inxi('-Fxxxzc0')))
except:
" 'inxi' not found, install it to get this info"
f.write('\n')
if self.checks[1]:
print("Getting info about installed graphical driver")
f.write(HEADER.format("Installed drivers", "Shows which graphic driver is installed"))
try:
f.write(str(mhwd('-li')))
except:
print(" 'mhwd' not found, this is not Manjaro?")
f.write('\n')
if self.checks[2]:
print("Getting list of all drivers supported on detected gpu's")
f.write(HEADER.format("Available drivers", "list of all drivers supported on detected gpu's"))
try:
f.write(str(mhwd('-l')))
except:
print(" 'mhwd' not found, this is not Manjaro?")
# f.write('\n')
if self.checks[3]:
print('hwinfo -graphic card')
# os.system('hwinfo --gfxcard')
f.write(HEADER.format("hwinfo --gfxcard", "Show Graphic Card info"))
try:
f.write(str(hwinfo('--gfxcard')))
except:
print('hwinfo graphic card info error')
f.write('hwinfo graphic card info error')
f.write('\n')
if self.checks[4]:
print('memory info')
# os.system('free -h')
f.write(HEADER.format("Memory Info", "Info about Memory and Swap"))
try:
f.write(str(free(' -h')))
except:
print('memory info error')
f.write('memory info error')
f.write('\n')
if self.checks[5]:
print('disk info')
# os.system('lsblk')
f.write(HEADER.format("Disk Info", "Disks and Partitions"))
try:
f.write(str(lsblk()))
except:
print('lsblk error')
f.write('lsblk error')
f.write('\n')
if self.checks[6]:
print('free disk space')
# os.system('df')
f.write(HEADER.format("Free Disk Space", "Free space per pertition"))
try:
f.write(str(df()))
except:
print('free disk space error')
f.write('free disk space error')
f.write('\n')
if self.checks[9]:
print("Saving: Xorg.0.log to file")
f.write(HEADER.format("Xorg.0.log", "searching for: failed, error & (WW) keywords"))
try:
f.write(look_in_file('/var/log/Xorg.0.log', ['failed', 'error', '(WW)']))
except FileNotFoundError:
print("/var/log/Xorg.0.log not found!")<|fim▁hole|> if self.checks[10]:
print("Saving: Xorg.1.log to file")
f.write(HEADER.format("Xorg.1.log", "searching for: failed, error & (WW) keywords"))
try:
f.write(look_in_file('/var/log/Xorg.1.log', ['failed', 'error', '(WW)']))
except FileNotFoundError:
print("/var/log/Xorg.1.log not found!")
f.write("Xorg.1.log not found!")
f.write('\n')
if self.checks[11]:
print("Saving: pacman.log to file")
f.write(HEADER.format("pacman.log", "searching for: pacsave, pacnew, pacorig keywords"))
try:
f.write(look_in_file('/var/log/pacman.log', ['pacsave', 'pacnew', 'pacorig']))
except FileNotFoundError:
print("/var/log/pacman.log not found, this is not Manjaro or Arch based Linux?")
f.write("pacman.log not found! Not Arch based OS?")
f.write('\n')
if self.checks[12]:
print("Saving: journalctl (emergency) to file")
os.system("journalctl -b > /tmp/journalctl.txt")
f.write(HEADER.format("journalctl.txt", "Searching for: Emergency keywords"))
f.write(look_in_file('/tmp/journalctl.txt', ['emergency', 'Emergency', 'EMERGENCY']))
f.write('\n')
if self.checks[13]:
print("Saving: journalctl (alert) to file")
os.system("journalctl -b > /tmp/journalctl.txt")
f.write(HEADER.format("journalctl.txt", "Searching for: Alert keywords"))
f.write(look_in_file('/tmp/journalctl.txt', ['alert', 'Alert', 'ALERT']))
f.write('\n')
if self.checks[14]:
print("Saving: journalctl (critical) to file")
os.system("journalctl -b > /tmp/journalctl.txt")
f.write(HEADER.format("journalctl.txt", "Searching for: Critical keywords"))
f.write(look_in_file('/tmp/journalctl.txt', ['critical', 'Critical', 'CRITICAL']))
f.write('\n')
if self.checks[15]:
print("Saving: journalctl (failed) to file")
os.system("journalctl -b > /tmp/journalctl.txt")
f.write(HEADER.format("journalctl.txt", "Searching for: Failed keywords"))
f.write(look_in_file('/tmp/journalctl.txt', ['failed', 'Failed', 'FAILED']))
f.write('\n')
if self.checks[16]:
print("Saving: rc.log to file")
f.write(HEADER.format("rc.log", "OpenRc only! searching for: WARNING: keywords"))
try:
f.write(look_in_file('/var/log/rc.log', ['WARNING:']))
except FileNotFoundError:
print("/var/log/rc.log not found! Systemd based OS?")
f.write("rc.log not found! Systemd based OS?")
f.write('\n')
f.close()
def to_editor(self):
os.system("xdg-open "+TMP_FILE)
# creates the application and takes arguments from the command line
application = QtGui.QApplication(sys.argv)
# creates the window and sets its properties
window = Window()
window.setWindowTitle('Manjaro Logs') # title
window.resize(280, 50) # size
window.show() # shows the window
# runs the application and waits for its return value at the end
sys.exit(application.exec_())<|fim▁end|> | f.write("Xorg.0.log not found!")
f.write('\n')
|
<|file_name|>bbtests.py<|end_file_name|><|fim▁begin|>import os
import re
import oeqa.utils.ftools as ftools
from oeqa.selftest.base import oeSelfTest
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
from oeqa.utils.decorators import testcase
class BitbakeTests(oeSelfTest):
def getline(self, res, line):
for l in res.output.split('\n'):
if line in l:
return l
@testcase(789)
def test_run_bitbake_from_dir_1(self):
os.chdir(os.path.join(self.builddir, 'conf'))
self.assertEqual(bitbake('-e').status, 0, msg = "bitbake couldn't run from \"conf\" dir")
@testcase(790)
def test_run_bitbake_from_dir_2(self):
my_env = os.environ.copy()
my_env['BBPATH'] = my_env['BUILDDIR']
os.chdir(os.path.dirname(os.environ['BUILDDIR']))
self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from builddir")
@testcase(806)
def test_event_handler(self):
self.write_config("INHERIT += \"test_events\"")
result = bitbake('m4-native')
find_build_started = re.search("NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing RunQueue Tasks", result.output)
find_build_completed = re.search("Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
self.assertTrue(find_build_completed, msg = "Match failed in:\n%s" % result.output)
self.assertFalse('Test for bb.event.InvalidEvent' in result.output, msg = "\"Test for bb.event.InvalidEvent\" message found during bitbake process. bitbake output: %s" % result.output)
@testcase(103)
def test_local_sstate(self):
bitbake('m4-native')
bitbake('m4-native -cclean')
result = bitbake('m4-native')
find_setscene = re.search("m4-native.*do_.*_setscene", result.output)
self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output )
@testcase(105)
def test_bitbake_invalid_recipe(self):
result = bitbake('-b asdf', ignore_status=True)
self.assertTrue("ERROR: Unable to find any recipe file matching 'asdf'" in result.output, msg = "Though asdf recipe doesn't exist, bitbake didn't output any err. message. bitbake output: %s" % result.output)
@testcase(107)
def test_bitbake_invalid_target(self):
result = bitbake('asdf', ignore_status=True)
self.assertTrue("ERROR: Nothing PROVIDES 'asdf'" in result.output, msg = "Though no 'asdf' target exists, bitbake didn't output any err. message. bitbake output: %s" % result.output)
@testcase(106)
def test_warnings_errors(self):
result = bitbake('-b asdf', ignore_status=True)
find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output)
find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output)
self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
@testcase(108)
def test_invalid_patch(self):
# This patch already exists in SRC_URI so adding it again will cause the
# patch to fail.
self.write_recipeinc('man', 'SRC_URI += "file://man-1.5h1-make.patch"')
self.write_config("INHERIT_remove = \"report-error\"")
result = bitbake('man -c patch', ignore_status=True)
self.delete_recipeinc('man')
bitbake('-cclean man')
line = self.getline(result, "Function failed: patch_do_patch")
self.assertTrue(line and line.startswith("ERROR:"), msg = "Repeated patch application didn't fail. bitbake output: %s" % result.output)
@testcase(1354)
def test_force_task_1(self):
# test 1 from bug 5875
test_recipe = 'zlib'
test_data = "Microsoft Made No Profit From Anyone's Zunes Yo"
bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
image_dir = bb_vars['D']
pkgsplit_dir = bb_vars['PKGDEST']
man_dir = bb_vars['mandir']
bitbake('-c clean %s' % test_recipe)
bitbake('-c package -f %s' % test_recipe)
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
man_file = os.path.join(image_dir + man_dir, 'man3/zlib.3')
ftools.append_file(man_file, test_data)
bitbake('-c package -f %s' % test_recipe)
man_split_file = os.path.join(pkgsplit_dir, 'zlib-doc' + man_dir, 'man3/zlib.3')
man_split_content = ftools.read_file(man_split_file)
self.assertIn(test_data, man_split_content, 'The man file has not changed in packages-split.')
ret = bitbake(test_recipe)
self.assertIn('task do_package_write_rpm:', ret.output, 'Task do_package_write_rpm did not re-executed.')
@testcase(163)
def test_force_task_2(self):
# test 2 from bug 5875
test_recipe = 'zlib'
bitbake(test_recipe)
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
result = bitbake('-C compile %s' % test_recipe)
look_for_tasks = ['do_compile:', 'do_install:', 'do_populate_sysroot:', 'do_package:']
for task in look_for_tasks:
self.assertIn(task, result.output, msg="Couldn't find %s task.")
@testcase(167)
def test_bitbake_g(self):
result = bitbake('-g core-image-minimal')<|fim▁hole|> self.assertTrue('Task dependencies saved to \'task-depends.dot\'' in result.output, msg = "No task dependency \"task-depends.dot\" file was generated for the given task target. bitbake output: %s" % result.output)
self.assertTrue('busybox' in ftools.read_file(os.path.join(self.builddir, 'task-depends.dot')), msg = "No \"busybox\" dependency found in task-depends.dot file.")
@testcase(899)
def test_image_manifest(self):
bitbake('core-image-minimal')
bb_vars = get_bb_vars(["DEPLOY_DIR_IMAGE", "IMAGE_LINK_NAME"], "core-image-minimal")
deploydir = bb_vars["DEPLOY_DIR_IMAGE"]
imagename = bb_vars["IMAGE_LINK_NAME"]
manifest = os.path.join(deploydir, imagename + ".manifest")
self.assertTrue(os.path.islink(manifest), msg="No manifest file created for image. It should have been created in %s" % manifest)
@testcase(168)
def test_invalid_recipe_src_uri(self):
data = 'SRC_URI = "file://invalid"'
self.write_recipeinc('man', data)
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
INHERIT_remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
bitbake('-ccleanall man')
result = bitbake('-c fetch man', ignore_status=True)
bitbake('-ccleanall man')
self.delete_recipeinc('man')
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
self.assertTrue('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:' in result.output, msg = "\"invalid\" file \
doesn't exist, yet no error message encountered. bitbake output: %s" % result.output)
line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
@testcase(171)
def test_rename_downloaded_file(self):
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
data = 'SRC_URI_append = ";downloadfilename=test-aspell.tar.gz"'
self.write_recipeinc('aspell', data)
bitbake('-ccleanall aspell')
result = bitbake('-c fetch aspell', ignore_status=True)
self.delete_recipeinc('aspell')
self.assertEqual(result.status, 0, msg = "Couldn't fetch aspell. %s" % result.output)
dl_dir = get_bb_var("DL_DIR")
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % dl_dir)
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % dl_dir)
@testcase(1028)
def test_environment(self):
self.write_config("TEST_ENV=\"localconf\"")
result = runCmd('bitbake -e | grep TEST_ENV=')
self.assertTrue('localconf' in result.output, msg = "bitbake didn't report any value for TEST_ENV variable. To test, run 'bitbake -e | grep TEST_ENV='")
@testcase(1029)
def test_dry_run(self):
result = runCmd('bitbake -n m4-native')
self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
@testcase(1030)
def test_just_parse(self):
result = runCmd('bitbake -p')
self.assertEqual(0, result.status, "errors encountered when parsing recipes. %s" % result.output)
@testcase(1031)
def test_version(self):
result = runCmd('bitbake -s | grep wget')
find = re.search("wget *:([0-9a-zA-Z\.\-]+)", result.output)
self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
@testcase(1032)
def test_prefile(self):
preconf = os.path.join(self.builddir, 'conf/prefile.conf')
self.track_for_cleanup(preconf)
ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
self.assertTrue('prefile' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration. ")
self.write_config("TEST_PREFILE=\"localconf\"")
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
self.assertTrue('localconf' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration.")
@testcase(1033)
def test_postfile(self):
postconf = os.path.join(self.builddir, 'conf/postfile.conf')
self.track_for_cleanup(postconf)
ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
self.write_config("TEST_POSTFILE=\"localconf\"")
result = runCmd('bitbake -R conf/postfile.conf -e | grep TEST_POSTFILE=')
self.assertTrue('postfile' in result.output, "Postconfigure file \"postfile.conf\"was not taken into consideration.")
@testcase(1034)
def test_checkuri(self):
result = runCmd('bitbake -c checkuri m4')
self.assertEqual(0, result.status, msg = "\"checkuri\" task was not executed. bitbake output: %s" % result.output)
@testcase(1035)
def test_continue(self):
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
INHERIT_remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
self.write_recipeinc('man',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
runCmd('bitbake -c cleanall man xcursor-transparent-theme')
result = runCmd('bitbake -c unpack -k man xcursor-transparent-theme', ignore_status=True)
errorpos = result.output.find('ERROR: Function failed: do_fail_task')
manver = re.search("NOTE: recipe xcursor-transparent-theme-(.*?): task do_unpack: Started", result.output)
continuepos = result.output.find('NOTE: recipe xcursor-transparent-theme-%s: task do_unpack: Started' % manver.group(1))
self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
@testcase(1119)
def test_non_gplv3(self):
self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"')
result = bitbake('selftest-ed', ignore_status=True)
self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
lic_dir = get_bb_var('LICENSE_DIRECTORY')
self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3')))
self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2')))
@testcase(1422)
def test_setscene_only(self):
""" Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
test_recipe = 'ed'
bitbake(test_recipe)
bitbake('-c clean %s' % test_recipe)
ret = bitbake('--setscene-only %s' % test_recipe)
tasks = re.findall(r'task\s+(do_\S+):', ret.output)
for task in tasks:
self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
'Executed tasks were: %s' % (task, str(tasks)))
@testcase(1425)
def test_bbappend_order(self):
""" Bitbake should bbappend to recipe in a predictable order """
test_recipe = 'ed'
bb_vars = get_bb_vars(['SUMMARY', 'PV'], test_recipe)
test_recipe_summary_before = bb_vars['SUMMARY']
test_recipe_pv = bb_vars['PV']
recipe_append_file = test_recipe + '_' + test_recipe_pv + '.bbappend'
expected_recipe_summary = test_recipe_summary_before
for i in range(5):
recipe_append_dir = test_recipe + '_test_' + str(i)
recipe_append_path = os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir, recipe_append_file)
os.mkdir(os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir))
feature = 'SUMMARY += "%s"\n' % i
ftools.write_file(recipe_append_path, feature)
expected_recipe_summary += ' %s' % i
self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, 'recipes-test',
test_recipe + '_test_*'))
test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
self.assertEqual(expected_recipe_summary, test_recipe_summary_after)<|fim▁end|> | for f in ['pn-buildlist', 'recipe-depends.dot', 'task-depends.dot']:
self.addCleanup(os.remove, f) |
<|file_name|>_models.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class AddressSpace(msrest.serialization.Model):
"""AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network.
:param address_prefixes: A list of address blocks reserved for this virtual network in CIDR
notation.
:type address_prefixes: list[str]
"""
_attribute_map = {
'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AddressSpace, self).__init__(**kwargs)
self.address_prefixes = kwargs.get('address_prefixes', None)
class Resource(msrest.serialization.Model):
"""Common resource representation.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = None
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
class ApplicationGateway(Resource):
"""Application gateway resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param zones: A list of availability zones denoting where the resource needs to come from.
:type zones: list[str]
:param identity: The identity of the application gateway, if configured.
:type identity: ~azure.mgmt.network.v2019_04_01.models.ManagedServiceIdentity
:param sku: SKU of the application gateway resource.
:type sku: ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySku
:param ssl_policy: SSL policy of the application gateway resource.
:type ssl_policy: ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslPolicy
:ivar operational_state: Operational state of the application gateway resource. Possible values
include: "Stopped", "Starting", "Running", "Stopping".
:vartype operational_state: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayOperationalState
:param gateway_ip_configurations: Subnets of the application gateway resource. For default
limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type gateway_ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayIPConfiguration]
:param authentication_certificates: Authentication certificates of the application gateway
resource. For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type authentication_certificates:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayAuthenticationCertificate]
:param trusted_root_certificates: Trusted Root certificates of the application gateway
resource. For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type trusted_root_certificates:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayTrustedRootCertificate]
:param ssl_certificates: SSL certificates of the application gateway resource. For default
limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type ssl_certificates:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslCertificate]
:param frontend_ip_configurations: Frontend IP addresses of the application gateway resource.
For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type frontend_ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFrontendIPConfiguration]
:param frontend_ports: Frontend ports of the application gateway resource. For default limits,
see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type frontend_ports:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFrontendPort]
:param probes: Probes of the application gateway resource.
:type probes: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayProbe]
:param backend_address_pools: Backend address pool of the application gateway resource. For
default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type backend_address_pools:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendAddressPool]
:param backend_http_settings_collection: Backend http settings of the application gateway
resource. For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type backend_http_settings_collection:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendHttpSettings]
:param http_listeners: Http listeners of the application gateway resource. For default limits,
see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type http_listeners:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayHttpListener]
:param url_path_maps: URL path map of the application gateway resource. For default limits, see
`Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type url_path_maps: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayUrlPathMap]
:param request_routing_rules: Request routing rules of the application gateway resource.
:type request_routing_rules:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRequestRoutingRule]
:param rewrite_rule_sets: Rewrite rules for the application gateway resource.
:type rewrite_rule_sets:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRewriteRuleSet]
:param redirect_configurations: Redirect configurations of the application gateway resource.
For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits>`_.
:type redirect_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRedirectConfiguration]
:param web_application_firewall_configuration: Web application firewall configuration.
:type web_application_firewall_configuration:
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayWebApplicationFirewallConfiguration
:param firewall_policy: Reference of the FirewallPolicy resource.
:type firewall_policy: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param enable_http2: Whether HTTP2 is enabled on the application gateway resource.
:type enable_http2: bool
:param enable_fips: Whether FIPS is enabled on the application gateway resource.
:type enable_fips: bool
:param autoscale_configuration: Autoscale Configuration.
:type autoscale_configuration:
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayAutoscaleConfiguration
:param resource_guid: Resource GUID property of the application gateway resource.
:type resource_guid: str
:param provisioning_state: Provisioning state of the application gateway resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param custom_error_configurations: Custom error configurations of the application gateway
resource.
:type custom_error_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayCustomError]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'operational_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'sku': {'key': 'properties.sku', 'type': 'ApplicationGatewaySku'},
'ssl_policy': {'key': 'properties.sslPolicy', 'type': 'ApplicationGatewaySslPolicy'},
'operational_state': {'key': 'properties.operationalState', 'type': 'str'},
'gateway_ip_configurations': {'key': 'properties.gatewayIPConfigurations', 'type': '[ApplicationGatewayIPConfiguration]'},
'authentication_certificates': {'key': 'properties.authenticationCertificates', 'type': '[ApplicationGatewayAuthenticationCertificate]'},
'trusted_root_certificates': {'key': 'properties.trustedRootCertificates', 'type': '[ApplicationGatewayTrustedRootCertificate]'},
'ssl_certificates': {'key': 'properties.sslCertificates', 'type': '[ApplicationGatewaySslCertificate]'},
'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[ApplicationGatewayFrontendIPConfiguration]'},
'frontend_ports': {'key': 'properties.frontendPorts', 'type': '[ApplicationGatewayFrontendPort]'},
'probes': {'key': 'properties.probes', 'type': '[ApplicationGatewayProbe]'},
'backend_address_pools': {'key': 'properties.backendAddressPools', 'type': '[ApplicationGatewayBackendAddressPool]'},
'backend_http_settings_collection': {'key': 'properties.backendHttpSettingsCollection', 'type': '[ApplicationGatewayBackendHttpSettings]'},
'http_listeners': {'key': 'properties.httpListeners', 'type': '[ApplicationGatewayHttpListener]'},
'url_path_maps': {'key': 'properties.urlPathMaps', 'type': '[ApplicationGatewayUrlPathMap]'},
'request_routing_rules': {'key': 'properties.requestRoutingRules', 'type': '[ApplicationGatewayRequestRoutingRule]'},
'rewrite_rule_sets': {'key': 'properties.rewriteRuleSets', 'type': '[ApplicationGatewayRewriteRuleSet]'},
'redirect_configurations': {'key': 'properties.redirectConfigurations', 'type': '[ApplicationGatewayRedirectConfiguration]'},
'web_application_firewall_configuration': {'key': 'properties.webApplicationFirewallConfiguration', 'type': 'ApplicationGatewayWebApplicationFirewallConfiguration'},
'firewall_policy': {'key': 'properties.firewallPolicy', 'type': 'SubResource'},
'enable_http2': {'key': 'properties.enableHttp2', 'type': 'bool'},
'enable_fips': {'key': 'properties.enableFips', 'type': 'bool'},
'autoscale_configuration': {'key': 'properties.autoscaleConfiguration', 'type': 'ApplicationGatewayAutoscaleConfiguration'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'custom_error_configurations': {'key': 'properties.customErrorConfigurations', 'type': '[ApplicationGatewayCustomError]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGateway, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.zones = kwargs.get('zones', None)
self.identity = kwargs.get('identity', None)
self.sku = kwargs.get('sku', None)
self.ssl_policy = kwargs.get('ssl_policy', None)
self.operational_state = None
self.gateway_ip_configurations = kwargs.get('gateway_ip_configurations', None)
self.authentication_certificates = kwargs.get('authentication_certificates', None)
self.trusted_root_certificates = kwargs.get('trusted_root_certificates', None)
self.ssl_certificates = kwargs.get('ssl_certificates', None)
self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None)
self.frontend_ports = kwargs.get('frontend_ports', None)
self.probes = kwargs.get('probes', None)
self.backend_address_pools = kwargs.get('backend_address_pools', None)
self.backend_http_settings_collection = kwargs.get('backend_http_settings_collection', None)
self.http_listeners = kwargs.get('http_listeners', None)
self.url_path_maps = kwargs.get('url_path_maps', None)
self.request_routing_rules = kwargs.get('request_routing_rules', None)
self.rewrite_rule_sets = kwargs.get('rewrite_rule_sets', None)
self.redirect_configurations = kwargs.get('redirect_configurations', None)
self.web_application_firewall_configuration = kwargs.get('web_application_firewall_configuration', None)
self.firewall_policy = kwargs.get('firewall_policy', None)
self.enable_http2 = kwargs.get('enable_http2', None)
self.enable_fips = kwargs.get('enable_fips', None)
self.autoscale_configuration = kwargs.get('autoscale_configuration', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.custom_error_configurations = kwargs.get('custom_error_configurations', None)
class SubResource(msrest.serialization.Model):
"""Reference to another subresource.
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubResource, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ApplicationGatewayAuthenticationCertificate(SubResource):
"""Authentication certificates of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the authentication certificate that is unique within an Application
Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param data: Certificate public data.
:type data: str
:param provisioning_state: Provisioning state of the authentication certificate resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data': {'key': 'properties.data', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAuthenticationCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.data = kwargs.get('data', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayAutoscaleConfiguration(msrest.serialization.Model):
"""Application Gateway autoscale configuration.
All required parameters must be populated in order to send to Azure.
:param min_capacity: Required. Lower bound on number of Application Gateway capacity.
:type min_capacity: int
:param max_capacity: Upper bound on number of Application Gateway capacity.
:type max_capacity: int
"""
_validation = {
'min_capacity': {'required': True, 'minimum': 0},
'max_capacity': {'minimum': 2},
}
_attribute_map = {
'min_capacity': {'key': 'minCapacity', 'type': 'int'},
'max_capacity': {'key': 'maxCapacity', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAutoscaleConfiguration, self).__init__(**kwargs)
self.min_capacity = kwargs['min_capacity']
self.max_capacity = kwargs.get('max_capacity', None)
class ApplicationGatewayAvailableSslOptions(Resource):
"""Response for ApplicationGatewayAvailableSslOptions API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param predefined_policies: List of available Ssl predefined policy.
:type predefined_policies: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param default_policy: Name of the Ssl predefined policy applied by default to application
gateway. Possible values include: "AppGwSslPolicy20150501", "AppGwSslPolicy20170401",
"AppGwSslPolicy20170401S".
:type default_policy: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslPolicyName
:param available_cipher_suites: List of available Ssl cipher suites.
:type available_cipher_suites: list[str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslCipherSuite]
:param available_protocols: List of available Ssl protocols.
:type available_protocols: list[str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslProtocol]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'predefined_policies': {'key': 'properties.predefinedPolicies', 'type': '[SubResource]'},
'default_policy': {'key': 'properties.defaultPolicy', 'type': 'str'},
'available_cipher_suites': {'key': 'properties.availableCipherSuites', 'type': '[str]'},
'available_protocols': {'key': 'properties.availableProtocols', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAvailableSslOptions, self).__init__(**kwargs)
self.predefined_policies = kwargs.get('predefined_policies', None)
self.default_policy = kwargs.get('default_policy', None)
self.available_cipher_suites = kwargs.get('available_cipher_suites', None)
self.available_protocols = kwargs.get('available_protocols', None)
class ApplicationGatewayAvailableSslPredefinedPolicies(msrest.serialization.Model):
"""Response for ApplicationGatewayAvailableSslOptions API service call.
:param value: List of available Ssl predefined policy.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslPredefinedPolicy]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationGatewaySslPredefinedPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAvailableSslPredefinedPolicies, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ApplicationGatewayAvailableWafRuleSetsResult(msrest.serialization.Model):
"""Response for ApplicationGatewayAvailableWafRuleSets API service call.
:param value: The list of application gateway rule sets.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFirewallRuleSet]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationGatewayFirewallRuleSet]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAvailableWafRuleSetsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ApplicationGatewayBackendAddress(msrest.serialization.Model):
"""Backend address of an application gateway.
:param fqdn: Fully qualified domain name (FQDN).
:type fqdn: str
:param ip_address: IP address.
:type ip_address: str
"""
_attribute_map = {
'fqdn': {'key': 'fqdn', 'type': 'str'},
'ip_address': {'key': 'ipAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendAddress, self).__init__(**kwargs)
self.fqdn = kwargs.get('fqdn', None)
self.ip_address = kwargs.get('ip_address', None)
class ApplicationGatewayBackendAddressPool(SubResource):
"""Backend Address Pool of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the backend address pool that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param backend_ip_configurations: Collection of references to IPs defined in network
interfaces.
:type backend_ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceIPConfiguration]
:param backend_addresses: Backend addresses.
:type backend_addresses:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendAddress]
:param provisioning_state: Provisioning state of the backend address pool resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'backend_addresses': {'key': 'properties.backendAddresses', 'type': '[ApplicationGatewayBackendAddress]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendAddressPool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.backend_ip_configurations = kwargs.get('backend_ip_configurations', None)
self.backend_addresses = kwargs.get('backend_addresses', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayBackendHealth(msrest.serialization.Model):
"""Response for ApplicationGatewayBackendHealth API service call.
:param backend_address_pools: A list of ApplicationGatewayBackendHealthPool resources.
:type backend_address_pools:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendHealthPool]
"""
_attribute_map = {
'backend_address_pools': {'key': 'backendAddressPools', 'type': '[ApplicationGatewayBackendHealthPool]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealth, self).__init__(**kwargs)
self.backend_address_pools = kwargs.get('backend_address_pools', None)
class ApplicationGatewayBackendHealthHttpSettings(msrest.serialization.Model):
"""Application gateway BackendHealthHttp settings.
:param backend_http_settings: Reference of an ApplicationGatewayBackendHttpSettings resource.
:type backend_http_settings:
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendHttpSettings
:param servers: List of ApplicationGatewayBackendHealthServer resources.
:type servers:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendHealthServer]
"""
_attribute_map = {
'backend_http_settings': {'key': 'backendHttpSettings', 'type': 'ApplicationGatewayBackendHttpSettings'},
'servers': {'key': 'servers', 'type': '[ApplicationGatewayBackendHealthServer]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthHttpSettings, self).__init__(**kwargs)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
self.servers = kwargs.get('servers', None)
class ApplicationGatewayBackendHealthOnDemand(msrest.serialization.Model):
"""Result of on demand test probe.
:param backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource.
:type backend_address_pool:
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendAddressPool
:param backend_health_http_settings: Application gateway BackendHealthHttp settings.
:type backend_health_http_settings:
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendHealthHttpSettings
"""
_attribute_map = {
'backend_address_pool': {'key': 'backendAddressPool', 'type': 'ApplicationGatewayBackendAddressPool'},
'backend_health_http_settings': {'key': 'backendHealthHttpSettings', 'type': 'ApplicationGatewayBackendHealthHttpSettings'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthOnDemand, self).__init__(**kwargs)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_health_http_settings = kwargs.get('backend_health_http_settings', None)
class ApplicationGatewayBackendHealthPool(msrest.serialization.Model):
"""Application gateway BackendHealth pool.
:param backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource.
:type backend_address_pool:
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendAddressPool
:param backend_http_settings_collection: List of ApplicationGatewayBackendHealthHttpSettings
resources.
:type backend_http_settings_collection:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendHealthHttpSettings]
"""
_attribute_map = {
'backend_address_pool': {'key': 'backendAddressPool', 'type': 'ApplicationGatewayBackendAddressPool'},
'backend_http_settings_collection': {'key': 'backendHttpSettingsCollection', 'type': '[ApplicationGatewayBackendHealthHttpSettings]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthPool, self).__init__(**kwargs)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings_collection = kwargs.get('backend_http_settings_collection', None)
class ApplicationGatewayBackendHealthServer(msrest.serialization.Model):
"""Application gateway backendhealth http settings.
:param address: IP address or FQDN of backend server.
:type address: str
:param ip_configuration: Reference of IP configuration of backend server.
:type ip_configuration: ~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceIPConfiguration
:param health: Health of backend server. Possible values include: "Unknown", "Up", "Down",
"Partial", "Draining".
:type health: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendHealthServerHealth
:param health_probe_log: Health Probe Log.
:type health_probe_log: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'ip_configuration': {'key': 'ipConfiguration', 'type': 'NetworkInterfaceIPConfiguration'},
'health': {'key': 'health', 'type': 'str'},
'health_probe_log': {'key': 'healthProbeLog', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthServer, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.ip_configuration = kwargs.get('ip_configuration', None)
self.health = kwargs.get('health', None)
self.health_probe_log = kwargs.get('health_probe_log', None)
class ApplicationGatewayBackendHttpSettings(SubResource):
"""Backend address pool settings of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the backend http settings that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param port: The destination port on the backend.
:type port: int
:param protocol: The protocol used to communicate with the backend. Possible values include:
"Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayProtocol
:param cookie_based_affinity: Cookie based affinity. Possible values include: "Enabled",
"Disabled".
:type cookie_based_affinity: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayCookieBasedAffinity
:param request_timeout: Request timeout in seconds. Application Gateway will fail the request
if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400
seconds.
:type request_timeout: int
:param probe: Probe resource of an application gateway.
:type probe: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param authentication_certificates: Array of references to application gateway authentication
certificates.
:type authentication_certificates: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param trusted_root_certificates: Array of references to application gateway trusted root
certificates.
:type trusted_root_certificates: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param connection_draining: Connection draining of the backend http settings resource.
:type connection_draining:
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayConnectionDraining
:param host_name: Host header to be sent to the backend servers.
:type host_name: str
:param pick_host_name_from_backend_address: Whether to pick host header should be picked from
the host name of the backend server. Default value is false.
:type pick_host_name_from_backend_address: bool
:param affinity_cookie_name: Cookie name to use for the affinity cookie.
:type affinity_cookie_name: str
:param probe_enabled: Whether the probe is enabled. Default value is false.
:type probe_enabled: bool
:param path: Path which should be used as a prefix for all HTTP requests. Null means no path
will be prefixed. Default value is null.
:type path: str
:param provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'cookie_based_affinity': {'key': 'properties.cookieBasedAffinity', 'type': 'str'},
'request_timeout': {'key': 'properties.requestTimeout', 'type': 'int'},
'probe': {'key': 'properties.probe', 'type': 'SubResource'},
'authentication_certificates': {'key': 'properties.authenticationCertificates', 'type': '[SubResource]'},
'trusted_root_certificates': {'key': 'properties.trustedRootCertificates', 'type': '[SubResource]'},
'connection_draining': {'key': 'properties.connectionDraining', 'type': 'ApplicationGatewayConnectionDraining'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'pick_host_name_from_backend_address': {'key': 'properties.pickHostNameFromBackendAddress', 'type': 'bool'},
'affinity_cookie_name': {'key': 'properties.affinityCookieName', 'type': 'str'},
'probe_enabled': {'key': 'properties.probeEnabled', 'type': 'bool'},
'path': {'key': 'properties.path', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHttpSettings, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.protocol = kwargs.get('protocol', None)
self.cookie_based_affinity = kwargs.get('cookie_based_affinity', None)
self.request_timeout = kwargs.get('request_timeout', None)
self.probe = kwargs.get('probe', None)
self.authentication_certificates = kwargs.get('authentication_certificates', None)
self.trusted_root_certificates = kwargs.get('trusted_root_certificates', None)
self.connection_draining = kwargs.get('connection_draining', None)
self.host_name = kwargs.get('host_name', None)
self.pick_host_name_from_backend_address = kwargs.get('pick_host_name_from_backend_address', None)
self.affinity_cookie_name = kwargs.get('affinity_cookie_name', None)
self.probe_enabled = kwargs.get('probe_enabled', None)
self.path = kwargs.get('path', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayConnectionDraining(msrest.serialization.Model):
"""Connection draining allows open connections to a backend server to be active for a specified time after the backend server got removed from the configuration.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether connection draining is enabled or not.
:type enabled: bool
:param drain_timeout_in_sec: Required. The number of seconds connection draining is active.
Acceptable values are from 1 second to 3600 seconds.
:type drain_timeout_in_sec: int
"""
_validation = {
'enabled': {'required': True},
'drain_timeout_in_sec': {'required': True, 'maximum': 3600, 'minimum': 1},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'drain_timeout_in_sec': {'key': 'drainTimeoutInSec', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayConnectionDraining, self).__init__(**kwargs)
self.enabled = kwargs['enabled']
self.drain_timeout_in_sec = kwargs['drain_timeout_in_sec']
class ApplicationGatewayCustomError(msrest.serialization.Model):
"""Customer error of an application gateway.
:param status_code: Status code of the application gateway customer error. Possible values
include: "HttpStatus403", "HttpStatus502".
:type status_code: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayCustomErrorStatusCode
:param custom_error_page_url: Error page URL of the application gateway customer error.
:type custom_error_page_url: str
"""
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'custom_error_page_url': {'key': 'customErrorPageUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayCustomError, self).__init__(**kwargs)
self.status_code = kwargs.get('status_code', None)
self.custom_error_page_url = kwargs.get('custom_error_page_url', None)
class ApplicationGatewayFirewallDisabledRuleGroup(msrest.serialization.Model):
"""Allows to disable rules within a rule group or an entire rule group.
All required parameters must be populated in order to send to Azure.
:param rule_group_name: Required. The name of the rule group that will be disabled.
:type rule_group_name: str
:param rules: The list of rules that will be disabled. If null, all rules of the rule group
will be disabled.
:type rules: list[int]
"""
_validation = {
'rule_group_name': {'required': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallDisabledRuleGroup, self).__init__(**kwargs)
self.rule_group_name = kwargs['rule_group_name']
self.rules = kwargs.get('rules', None)
class ApplicationGatewayFirewallExclusion(msrest.serialization.Model):
"""Allow to exclude some variable satisfy the condition for the WAF check.
All required parameters must be populated in order to send to Azure.
:param match_variable: Required. The variable to be excluded.
:type match_variable: str
:param selector_match_operator: Required. When matchVariable is a collection, operate on the
selector to specify which elements in the collection this exclusion applies to.
:type selector_match_operator: str
:param selector: Required. When matchVariable is a collection, operator used to specify which
elements in the collection this exclusion applies to.
:type selector: str
"""
_validation = {
'match_variable': {'required': True},
'selector_match_operator': {'required': True},
'selector': {'required': True},
}
_attribute_map = {
'match_variable': {'key': 'matchVariable', 'type': 'str'},
'selector_match_operator': {'key': 'selectorMatchOperator', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallExclusion, self).__init__(**kwargs)
self.match_variable = kwargs['match_variable']
self.selector_match_operator = kwargs['selector_match_operator']
self.selector = kwargs['selector']
class ApplicationGatewayFirewallRule(msrest.serialization.Model):
"""A web application firewall rule.
All required parameters must be populated in order to send to Azure.
:param rule_id: Required. The identifier of the web application firewall rule.
:type rule_id: int
:param description: The description of the web application firewall rule.
:type description: str
"""
_validation = {
'rule_id': {'required': True},
}
_attribute_map = {
'rule_id': {'key': 'ruleId', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallRule, self).__init__(**kwargs)
self.rule_id = kwargs['rule_id']
self.description = kwargs.get('description', None)
class ApplicationGatewayFirewallRuleGroup(msrest.serialization.Model):
"""A web application firewall rule group.
All required parameters must be populated in order to send to Azure.
:param rule_group_name: Required. The name of the web application firewall rule group.
:type rule_group_name: str
:param description: The description of the web application firewall rule group.
:type description: str
:param rules: Required. The rules of the web application firewall rule group.
:type rules: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFirewallRule]
"""
_validation = {
'rule_group_name': {'required': True},
'rules': {'required': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[ApplicationGatewayFirewallRule]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallRuleGroup, self).__init__(**kwargs)
self.rule_group_name = kwargs['rule_group_name']
self.description = kwargs.get('description', None)
self.rules = kwargs['rules']
class ApplicationGatewayFirewallRuleSet(Resource):
"""A web application firewall rule set.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param provisioning_state: The provisioning state of the web application firewall rule set.
:type provisioning_state: str
:param rule_set_type: The type of the web application firewall rule set.
:type rule_set_type: str
:param rule_set_version: The version of the web application firewall rule set type.
:type rule_set_version: str
:param rule_groups: The rule groups of the web application firewall rule set.
:type rule_groups:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFirewallRuleGroup]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'rule_set_type': {'key': 'properties.ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'properties.ruleSetVersion', 'type': 'str'},
'rule_groups': {'key': 'properties.ruleGroups', 'type': '[ApplicationGatewayFirewallRuleGroup]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallRuleSet, self).__init__(**kwargs)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.rule_set_type = kwargs.get('rule_set_type', None)
self.rule_set_version = kwargs.get('rule_set_version', None)
self.rule_groups = kwargs.get('rule_groups', None)
class ApplicationGatewayFrontendIPConfiguration(SubResource):
"""Frontend IP configuration of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the frontend IP configuration that is unique within an Application
Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param private_ip_address: PrivateIPAddress of the network interface IP Configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
:param subnet: Reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param public_ip_address: Reference of the PublicIP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param provisioning_state: Provisioning state of the public IP resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFrontendIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayFrontendPort(SubResource):
"""Frontend port of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the frontend port that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param port: Frontend port.
:type port: int
:param provisioning_state: Provisioning state of the frontend port resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFrontendPort, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayHeaderConfiguration(msrest.serialization.Model):
"""Header configuration of the Actions set in Application Gateway.
:param header_name: Header name of the header configuration.
:type header_name: str
:param header_value: Header value of the header configuration.
:type header_value: str
"""
_attribute_map = {
'header_name': {'key': 'headerName', 'type': 'str'},
'header_value': {'key': 'headerValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayHeaderConfiguration, self).__init__(**kwargs)
self.header_name = kwargs.get('header_name', None)
self.header_value = kwargs.get('header_value', None)
class ApplicationGatewayHttpListener(SubResource):
"""Http listener of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the HTTP listener that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param frontend_ip_configuration: Frontend IP configuration resource of an application gateway.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param frontend_port: Frontend port resource of an application gateway.
:type frontend_port: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param protocol: Protocol of the HTTP listener. Possible values include: "Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayProtocol
:param host_name: Host name of HTTP listener.
:type host_name: str
:param ssl_certificate: SSL certificate resource of an application gateway.
:type ssl_certificate: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param require_server_name_indication: Applicable only if protocol is https. Enables SNI for
multi-hosting.
:type require_server_name_indication: bool
:param provisioning_state: Provisioning state of the HTTP listener resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param custom_error_configurations: Custom error configurations of the HTTP listener.
:type custom_error_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayCustomError]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'frontend_port': {'key': 'properties.frontendPort', 'type': 'SubResource'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'ssl_certificate': {'key': 'properties.sslCertificate', 'type': 'SubResource'},
'require_server_name_indication': {'key': 'properties.requireServerNameIndication', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'custom_error_configurations': {'key': 'properties.customErrorConfigurations', 'type': '[ApplicationGatewayCustomError]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayHttpListener, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.frontend_port = kwargs.get('frontend_port', None)
self.protocol = kwargs.get('protocol', None)
self.host_name = kwargs.get('host_name', None)
self.ssl_certificate = kwargs.get('ssl_certificate', None)
self.require_server_name_indication = kwargs.get('require_server_name_indication', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.custom_error_configurations = kwargs.get('custom_error_configurations', None)
class ApplicationGatewayIPConfiguration(SubResource):
"""IP configuration of an application gateway. Currently 1 public and 1 private IP configuration is allowed.
:param id: Resource ID.
:type id: str
:param name: Name of the IP configuration that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param subnet: Reference of the subnet resource. A subnet from where application gateway gets
its private address.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param provisioning_state: Provisioning state of the application gateway subnet resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.subnet = kwargs.get('subnet', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayListResult(msrest.serialization.Model):
"""Response for ListApplicationGateways API service call.
:param value: List of an application gateways in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGateway]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ApplicationGatewayOnDemandProbe(msrest.serialization.Model):
"""Details of on demand test probe request.
:param protocol: The protocol used for the probe. Possible values include: "Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayProtocol
:param host: Host name to send the probe to.
:type host: str
:param path: Relative path of probe. Valid path starts from '/'. Probe is sent to
:code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`.
:type path: str
:param timeout: The probe timeout in seconds. Probe marked as failed if valid response is not
received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
:type timeout: int
:param pick_host_name_from_backend_http_settings: Whether the host header should be picked from
the backend http settings. Default value is false.
:type pick_host_name_from_backend_http_settings: bool
:param match: Criterion for classifying a healthy probe response.
:type match: ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayProbeHealthResponseMatch
:param backend_address_pool: Reference of backend pool of application gateway to which probe
request will be sent.
:type backend_address_pool: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param backend_http_settings: Reference of backend http setting of application gateway to be
used for test probe.
:type backend_http_settings: ~azure.mgmt.network.v2019_04_01.models.SubResource
"""
_attribute_map = {
'protocol': {'key': 'protocol', 'type': 'str'},
'host': {'key': 'host', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'timeout': {'key': 'timeout', 'type': 'int'},
'pick_host_name_from_backend_http_settings': {'key': 'pickHostNameFromBackendHttpSettings', 'type': 'bool'},
'match': {'key': 'match', 'type': 'ApplicationGatewayProbeHealthResponseMatch'},
'backend_address_pool': {'key': 'backendAddressPool', 'type': 'SubResource'},
'backend_http_settings': {'key': 'backendHttpSettings', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayOnDemandProbe, self).__init__(**kwargs)
self.protocol = kwargs.get('protocol', None)
self.host = kwargs.get('host', None)
self.path = kwargs.get('path', None)
self.timeout = kwargs.get('timeout', None)
self.pick_host_name_from_backend_http_settings = kwargs.get('pick_host_name_from_backend_http_settings', None)
self.match = kwargs.get('match', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
class ApplicationGatewayPathRule(SubResource):
"""Path rule of URL path map of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the path rule that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param paths: Path rules of URL path map.
:type paths: list[str]
:param backend_address_pool: Backend address pool resource of URL path map path rule.
:type backend_address_pool: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param backend_http_settings: Backend http settings resource of URL path map path rule.
:type backend_http_settings: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param redirect_configuration: Redirect configuration resource of URL path map path rule.
:type redirect_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param rewrite_rule_set: Rewrite rule set resource of URL path map path rule.
:type rewrite_rule_set: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param provisioning_state: Path rule of URL path map resource. Possible values are: 'Updating',
'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'paths': {'key': 'properties.paths', 'type': '[str]'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'backend_http_settings': {'key': 'properties.backendHttpSettings', 'type': 'SubResource'},
'redirect_configuration': {'key': 'properties.redirectConfiguration', 'type': 'SubResource'},
'rewrite_rule_set': {'key': 'properties.rewriteRuleSet', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayPathRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.paths = kwargs.get('paths', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
self.redirect_configuration = kwargs.get('redirect_configuration', None)
self.rewrite_rule_set = kwargs.get('rewrite_rule_set', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayProbe(SubResource):
"""Probe of the application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the probe that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param protocol: The protocol used for the probe. Possible values include: "Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayProtocol
:param host: Host name to send the probe to.
:type host: str
:param path: Relative path of probe. Valid path starts from '/'. Probe is sent to
:code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`.
:type path: str
:param interval: The probing interval in seconds. This is the time interval between two
consecutive probes. Acceptable values are from 1 second to 86400 seconds.
:type interval: int
:param timeout: The probe timeout in seconds. Probe marked as failed if valid response is not
received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
:type timeout: int
:param unhealthy_threshold: The probe retry count. Backend server is marked down after
consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second
to 20.
:type unhealthy_threshold: int
:param pick_host_name_from_backend_http_settings: Whether the host header should be picked from
the backend http settings. Default value is false.
:type pick_host_name_from_backend_http_settings: bool
:param min_servers: Minimum number of servers that are always marked healthy. Default value is
0.
:type min_servers: int
:param match: Criterion for classifying a healthy probe response.
:type match: ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayProbeHealthResponseMatch
:param provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param port: Custom port which will be used for probing the backend servers. The valid value
ranges from 1 to 65535. In case not set, port from http settings will be used. This property is
valid for Standard_v2 and WAF_v2 only.
:type port: int
"""
_validation = {
'port': {'maximum': 65535, 'minimum': 1},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'host': {'key': 'properties.host', 'type': 'str'},
'path': {'key': 'properties.path', 'type': 'str'},
'interval': {'key': 'properties.interval', 'type': 'int'},
'timeout': {'key': 'properties.timeout', 'type': 'int'},
'unhealthy_threshold': {'key': 'properties.unhealthyThreshold', 'type': 'int'},
'pick_host_name_from_backend_http_settings': {'key': 'properties.pickHostNameFromBackendHttpSettings', 'type': 'bool'},
'min_servers': {'key': 'properties.minServers', 'type': 'int'},
'match': {'key': 'properties.match', 'type': 'ApplicationGatewayProbeHealthResponseMatch'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayProbe, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.protocol = kwargs.get('protocol', None)
self.host = kwargs.get('host', None)
self.path = kwargs.get('path', None)
self.interval = kwargs.get('interval', None)
self.timeout = kwargs.get('timeout', None)
self.unhealthy_threshold = kwargs.get('unhealthy_threshold', None)
self.pick_host_name_from_backend_http_settings = kwargs.get('pick_host_name_from_backend_http_settings', None)
self.min_servers = kwargs.get('min_servers', None)
self.match = kwargs.get('match', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.port = kwargs.get('port', None)
class ApplicationGatewayProbeHealthResponseMatch(msrest.serialization.Model):
"""Application gateway probe health response match.
:param body: Body that must be contained in the health response. Default value is empty.
:type body: str
:param status_codes: Allowed ranges of healthy status codes. Default range of healthy status
codes is 200-399.
:type status_codes: list[str]
"""
_attribute_map = {
'body': {'key': 'body', 'type': 'str'},
'status_codes': {'key': 'statusCodes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayProbeHealthResponseMatch, self).__init__(**kwargs)
self.body = kwargs.get('body', None)
self.status_codes = kwargs.get('status_codes', None)
class ApplicationGatewayRedirectConfiguration(SubResource):
"""Redirect configuration of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the redirect configuration that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param redirect_type: HTTP redirection type. Possible values include: "Permanent", "Found",
"SeeOther", "Temporary".
:type redirect_type: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRedirectType
:param target_listener: Reference to a listener to redirect the request to.
:type target_listener: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param target_url: Url to redirect the request to.
:type target_url: str
:param include_path: Include path in the redirected url.
:type include_path: bool
:param include_query_string: Include query string in the redirected url.
:type include_query_string: bool
:param request_routing_rules: Request routing specifying redirect configuration.
:type request_routing_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param url_path_maps: Url path maps specifying default redirect configuration.
:type url_path_maps: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param path_rules: Path rules specifying redirect configuration.
:type path_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'redirect_type': {'key': 'properties.redirectType', 'type': 'str'},
'target_listener': {'key': 'properties.targetListener', 'type': 'SubResource'},
'target_url': {'key': 'properties.targetUrl', 'type': 'str'},
'include_path': {'key': 'properties.includePath', 'type': 'bool'},
'include_query_string': {'key': 'properties.includeQueryString', 'type': 'bool'},
'request_routing_rules': {'key': 'properties.requestRoutingRules', 'type': '[SubResource]'},
'url_path_maps': {'key': 'properties.urlPathMaps', 'type': '[SubResource]'},
'path_rules': {'key': 'properties.pathRules', 'type': '[SubResource]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRedirectConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.redirect_type = kwargs.get('redirect_type', None)
self.target_listener = kwargs.get('target_listener', None)
self.target_url = kwargs.get('target_url', None)
self.include_path = kwargs.get('include_path', None)
self.include_query_string = kwargs.get('include_query_string', None)
self.request_routing_rules = kwargs.get('request_routing_rules', None)
self.url_path_maps = kwargs.get('url_path_maps', None)
self.path_rules = kwargs.get('path_rules', None)
class ApplicationGatewayRequestRoutingRule(SubResource):
"""Request routing rule of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the request routing rule that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param rule_type: Rule type. Possible values include: "Basic", "PathBasedRouting".
:type rule_type: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRequestRoutingRuleType
:param backend_address_pool: Backend address pool resource of the application gateway.
:type backend_address_pool: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param backend_http_settings: Backend http settings resource of the application gateway.
:type backend_http_settings: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param http_listener: Http listener resource of the application gateway.
:type http_listener: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param url_path_map: URL path map resource of the application gateway.
:type url_path_map: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param rewrite_rule_set: Rewrite Rule Set resource in Basic rule of the application gateway.
:type rewrite_rule_set: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param redirect_configuration: Redirect configuration resource of the application gateway.
:type redirect_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param provisioning_state: Provisioning state of the request routing rule resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'rule_type': {'key': 'properties.ruleType', 'type': 'str'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'backend_http_settings': {'key': 'properties.backendHttpSettings', 'type': 'SubResource'},
'http_listener': {'key': 'properties.httpListener', 'type': 'SubResource'},
'url_path_map': {'key': 'properties.urlPathMap', 'type': 'SubResource'},
'rewrite_rule_set': {'key': 'properties.rewriteRuleSet', 'type': 'SubResource'},
'redirect_configuration': {'key': 'properties.redirectConfiguration', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRequestRoutingRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.rule_type = kwargs.get('rule_type', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
self.http_listener = kwargs.get('http_listener', None)
self.url_path_map = kwargs.get('url_path_map', None)
self.rewrite_rule_set = kwargs.get('rewrite_rule_set', None)
self.redirect_configuration = kwargs.get('redirect_configuration', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayRewriteRule(msrest.serialization.Model):
"""Rewrite rule of an application gateway.
:param name: Name of the rewrite rule that is unique within an Application Gateway.
:type name: str
:param rule_sequence: Rule Sequence of the rewrite rule that determines the order of execution
of a particular rule in a RewriteRuleSet.
:type rule_sequence: int
:param conditions: Conditions based on which the action set execution will be evaluated.
:type conditions:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRewriteRuleCondition]
:param action_set: Set of actions to be done as part of the rewrite Rule.
:type action_set: ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRewriteRuleActionSet
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'rule_sequence': {'key': 'ruleSequence', 'type': 'int'},
'conditions': {'key': 'conditions', 'type': '[ApplicationGatewayRewriteRuleCondition]'},
'action_set': {'key': 'actionSet', 'type': 'ApplicationGatewayRewriteRuleActionSet'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.rule_sequence = kwargs.get('rule_sequence', None)
self.conditions = kwargs.get('conditions', None)
self.action_set = kwargs.get('action_set', None)
class ApplicationGatewayRewriteRuleActionSet(msrest.serialization.Model):
"""Set of actions in the Rewrite Rule in Application Gateway.
:param request_header_configurations: Request Header Actions in the Action Set.
:type request_header_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayHeaderConfiguration]
:param response_header_configurations: Response Header Actions in the Action Set.
:type response_header_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayHeaderConfiguration]
"""
_attribute_map = {
'request_header_configurations': {'key': 'requestHeaderConfigurations', 'type': '[ApplicationGatewayHeaderConfiguration]'},
'response_header_configurations': {'key': 'responseHeaderConfigurations', 'type': '[ApplicationGatewayHeaderConfiguration]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRuleActionSet, self).__init__(**kwargs)
self.request_header_configurations = kwargs.get('request_header_configurations', None)
self.response_header_configurations = kwargs.get('response_header_configurations', None)
class ApplicationGatewayRewriteRuleCondition(msrest.serialization.Model):
"""Set of conditions in the Rewrite Rule in Application Gateway.
:param variable: The condition parameter of the RewriteRuleCondition.
:type variable: str
:param pattern: The pattern, either fixed string or regular expression, that evaluates the
truthfulness of the condition.
:type pattern: str
:param ignore_case: Setting this parameter to truth value with force the pattern to do a case
in-sensitive comparison.
:type ignore_case: bool
:param negate: Setting this value as truth will force to check the negation of the condition
given by the user.
:type negate: bool
"""
_attribute_map = {
'variable': {'key': 'variable', 'type': 'str'},
'pattern': {'key': 'pattern', 'type': 'str'},
'ignore_case': {'key': 'ignoreCase', 'type': 'bool'},
'negate': {'key': 'negate', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRuleCondition, self).__init__(**kwargs)
self.variable = kwargs.get('variable', None)
self.pattern = kwargs.get('pattern', None)
self.ignore_case = kwargs.get('ignore_case', None)
self.negate = kwargs.get('negate', None)
class ApplicationGatewayRewriteRuleSet(SubResource):
"""Rewrite rule set of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the rewrite rule set that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param rewrite_rules: Rewrite rules in the rewrite rule set.
:type rewrite_rules: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayRewriteRule]
:ivar provisioning_state: Provisioning state of the rewrite rule set resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'rewrite_rules': {'key': 'properties.rewriteRules', 'type': '[ApplicationGatewayRewriteRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRuleSet, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.rewrite_rules = kwargs.get('rewrite_rules', None)
self.provisioning_state = None
class ApplicationGatewaySku(msrest.serialization.Model):
"""SKU of an application gateway.
:param name: Name of an application gateway SKU. Possible values include: "Standard_Small",
"Standard_Medium", "Standard_Large", "WAF_Medium", "WAF_Large", "Standard_v2", "WAF_v2".
:type name: str or ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySkuName
:param tier: Tier of an application gateway. Possible values include: "Standard", "WAF",
"Standard_v2", "WAF_v2".
:type tier: str or ~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayTier
:param capacity: Capacity (instance count) of an application gateway.
:type capacity: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.capacity = kwargs.get('capacity', None)
class ApplicationGatewaySslCertificate(SubResource):
"""SSL certificates of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the SSL certificate that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param data: Base-64 encoded pfx certificate. Only applicable in PUT Request.
:type data: str
:param password: Password for the pfx file specified in data. Only applicable in PUT request.
:type password: str
:param public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in
data. Only applicable in GET request.
:type public_cert_data: str
:param key_vault_secret_id: Secret Id of (base-64 encoded unencrypted pfx) 'Secret' or
'Certificate' object stored in KeyVault.
:type key_vault_secret_id: str
:param provisioning_state: Provisioning state of the SSL certificate resource Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data': {'key': 'properties.data', 'type': 'str'},
'password': {'key': 'properties.password', 'type': 'str'},
'public_cert_data': {'key': 'properties.publicCertData', 'type': 'str'},
'key_vault_secret_id': {'key': 'properties.keyVaultSecretId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySslCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.data = kwargs.get('data', None)
self.password = kwargs.get('password', None)
self.public_cert_data = kwargs.get('public_cert_data', None)
self.key_vault_secret_id = kwargs.get('key_vault_secret_id', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewaySslPolicy(msrest.serialization.Model):
"""Application Gateway Ssl policy.
:param disabled_ssl_protocols: Ssl protocols to be disabled on application gateway.
:type disabled_ssl_protocols: list[str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslProtocol]
:param policy_type: Type of Ssl Policy. Possible values include: "Predefined", "Custom".
:type policy_type: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslPolicyType
:param policy_name: Name of Ssl predefined policy. Possible values include:
"AppGwSslPolicy20150501", "AppGwSslPolicy20170401", "AppGwSslPolicy20170401S".
:type policy_name: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslPolicyName
:param cipher_suites: Ssl cipher suites to be enabled in the specified order to application
gateway.
:type cipher_suites: list[str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslCipherSuite]
:param min_protocol_version: Minimum version of Ssl protocol to be supported on application
gateway. Possible values include: "TLSv1_0", "TLSv1_1", "TLSv1_2".
:type min_protocol_version: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslProtocol
"""
_attribute_map = {
'disabled_ssl_protocols': {'key': 'disabledSslProtocols', 'type': '[str]'},
'policy_type': {'key': 'policyType', 'type': 'str'},
'policy_name': {'key': 'policyName', 'type': 'str'},
'cipher_suites': {'key': 'cipherSuites', 'type': '[str]'},
'min_protocol_version': {'key': 'minProtocolVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySslPolicy, self).__init__(**kwargs)
self.disabled_ssl_protocols = kwargs.get('disabled_ssl_protocols', None)
self.policy_type = kwargs.get('policy_type', None)
self.policy_name = kwargs.get('policy_name', None)
self.cipher_suites = kwargs.get('cipher_suites', None)
self.min_protocol_version = kwargs.get('min_protocol_version', None)
class ApplicationGatewaySslPredefinedPolicy(SubResource):
"""An Ssl predefined policy.
:param id: Resource ID.
:type id: str
:param name: Name of the Ssl predefined policy.
:type name: str
:param cipher_suites: Ssl cipher suites to be enabled in the specified order for application
gateway.
:type cipher_suites: list[str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslCipherSuite]
:param min_protocol_version: Minimum version of Ssl protocol to be supported on application
gateway. Possible values include: "TLSv1_0", "TLSv1_1", "TLSv1_2".
:type min_protocol_version: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewaySslProtocol
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'cipher_suites': {'key': 'properties.cipherSuites', 'type': '[str]'},
'min_protocol_version': {'key': 'properties.minProtocolVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySslPredefinedPolicy, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.cipher_suites = kwargs.get('cipher_suites', None)
self.min_protocol_version = kwargs.get('min_protocol_version', None)
class ApplicationGatewayTrustedRootCertificate(SubResource):
"""Trusted Root certificates of an application gateway.
:param id: Resource ID.
:type id: str
:param name: Name of the trusted root certificate that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param data: Certificate public data.
:type data: str
:param key_vault_secret_id: Secret Id of (base-64 encoded unencrypted pfx) 'Secret' or
'Certificate' object stored in KeyVault.
:type key_vault_secret_id: str
:param provisioning_state: Provisioning state of the trusted root certificate resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data': {'key': 'properties.data', 'type': 'str'},
'key_vault_secret_id': {'key': 'properties.keyVaultSecretId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayTrustedRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.data = kwargs.get('data', None)
self.key_vault_secret_id = kwargs.get('key_vault_secret_id', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayUrlPathMap(SubResource):
"""UrlPathMaps give a url path to the backend mapping information for PathBasedRouting.
:param id: Resource ID.
:type id: str
:param name: Name of the URL path map that is unique within an Application Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
:param default_backend_address_pool: Default backend address pool resource of URL path map.
:type default_backend_address_pool: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param default_backend_http_settings: Default backend http settings resource of URL path map.
:type default_backend_http_settings: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param default_rewrite_rule_set: Default Rewrite rule set resource of URL path map.
:type default_rewrite_rule_set: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param default_redirect_configuration: Default redirect configuration resource of URL path map.
:type default_redirect_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param path_rules: Path rule of URL path map resource.
:type path_rules: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayPathRule]
:param provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default_backend_address_pool': {'key': 'properties.defaultBackendAddressPool', 'type': 'SubResource'},
'default_backend_http_settings': {'key': 'properties.defaultBackendHttpSettings', 'type': 'SubResource'},
'default_rewrite_rule_set': {'key': 'properties.defaultRewriteRuleSet', 'type': 'SubResource'},
'default_redirect_configuration': {'key': 'properties.defaultRedirectConfiguration', 'type': 'SubResource'},
'path_rules': {'key': 'properties.pathRules', 'type': '[ApplicationGatewayPathRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayUrlPathMap, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
self.default_backend_address_pool = kwargs.get('default_backend_address_pool', None)
self.default_backend_http_settings = kwargs.get('default_backend_http_settings', None)
self.default_rewrite_rule_set = kwargs.get('default_rewrite_rule_set', None)
self.default_redirect_configuration = kwargs.get('default_redirect_configuration', None)
self.path_rules = kwargs.get('path_rules', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ApplicationGatewayWebApplicationFirewallConfiguration(msrest.serialization.Model):
"""Application gateway web application firewall configuration.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether the web application firewall is enabled or not.
:type enabled: bool
:param firewall_mode: Required. Web application firewall mode. Possible values include:
"Detection", "Prevention".
:type firewall_mode: str or
~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFirewallMode
:param rule_set_type: Required. The type of the web application firewall rule set. Possible
values are: 'OWASP'.
:type rule_set_type: str
:param rule_set_version: Required. The version of the rule set type.
:type rule_set_version: str
:param disabled_rule_groups: The disabled rule groups.
:type disabled_rule_groups:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFirewallDisabledRuleGroup]
:param request_body_check: Whether allow WAF to check request Body.
:type request_body_check: bool
:param max_request_body_size: Maximum request body size for WAF.
:type max_request_body_size: int
:param max_request_body_size_in_kb: Maximum request body size in Kb for WAF.
:type max_request_body_size_in_kb: int
:param file_upload_limit_in_mb: Maximum file upload size in Mb for WAF.
:type file_upload_limit_in_mb: int
:param exclusions: The exclusion list.
:type exclusions:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayFirewallExclusion]
"""
_validation = {
'enabled': {'required': True},
'firewall_mode': {'required': True},
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
'max_request_body_size': {'maximum': 128, 'minimum': 8},
'max_request_body_size_in_kb': {'maximum': 128, 'minimum': 8},
'file_upload_limit_in_mb': {'minimum': 0},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'firewall_mode': {'key': 'firewallMode', 'type': 'str'},
'rule_set_type': {'key': 'ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'ruleSetVersion', 'type': 'str'},
'disabled_rule_groups': {'key': 'disabledRuleGroups', 'type': '[ApplicationGatewayFirewallDisabledRuleGroup]'},
'request_body_check': {'key': 'requestBodyCheck', 'type': 'bool'},
'max_request_body_size': {'key': 'maxRequestBodySize', 'type': 'int'},
'max_request_body_size_in_kb': {'key': 'maxRequestBodySizeInKb', 'type': 'int'},
'file_upload_limit_in_mb': {'key': 'fileUploadLimitInMb', 'type': 'int'},
'exclusions': {'key': 'exclusions', 'type': '[ApplicationGatewayFirewallExclusion]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayWebApplicationFirewallConfiguration, self).__init__(**kwargs)
self.enabled = kwargs['enabled']
self.firewall_mode = kwargs['firewall_mode']
self.rule_set_type = kwargs['rule_set_type']
self.rule_set_version = kwargs['rule_set_version']
self.disabled_rule_groups = kwargs.get('disabled_rule_groups', None)
self.request_body_check = kwargs.get('request_body_check', None)
self.max_request_body_size = kwargs.get('max_request_body_size', None)
self.max_request_body_size_in_kb = kwargs.get('max_request_body_size_in_kb', None)
self.file_upload_limit_in_mb = kwargs.get('file_upload_limit_in_mb', None)
self.exclusions = kwargs.get('exclusions', None)
class ApplicationSecurityGroup(Resource):
"""An application security group in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar resource_guid: The resource GUID property of the application security group resource. It
uniquely identifies a resource, even if the user changes its name or migrate the resource
across subscriptions or resource groups.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the application security group resource.
Possible values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationSecurityGroup, self).__init__(**kwargs)
self.etag = None
self.resource_guid = None
self.provisioning_state = None
class ApplicationSecurityGroupListResult(msrest.serialization.Model):
"""A list of application security groups.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of application security groups.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ApplicationSecurityGroup]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationSecurityGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationSecurityGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class AuthorizationListResult(msrest.serialization.Model):
"""Response for ListAuthorizations API service call retrieves all authorizations that belongs to an ExpressRouteCircuit.
:param value: The authorizations in an ExpressRoute Circuit.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitAuthorization]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitAuthorization]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AuthorizationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class AutoApprovedPrivateLinkService(msrest.serialization.Model):
"""The information of an AutoApprovedPrivateLinkService.
:param private_link_service: The id of the private link service resource.
:type private_link_service: str
"""
_attribute_map = {
'private_link_service': {'key': 'privateLinkService', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AutoApprovedPrivateLinkService, self).__init__(**kwargs)
self.private_link_service = kwargs.get('private_link_service', None)
class AutoApprovedPrivateLinkServicesResult(msrest.serialization.Model):
"""An array of private link service id that can be linked to a private end point with auto approved.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: An array of auto approved private link service.
:type value: list[~azure.mgmt.network.v2019_04_01.models.AutoApprovedPrivateLinkService]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AutoApprovedPrivateLinkService]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AutoApprovedPrivateLinkServicesResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class Availability(msrest.serialization.Model):
"""Availability of the metric.
:param time_grain: The time grain of the availability.
:type time_grain: str
:param retention: The retention of the availability.
:type retention: str
:param blob_duration: Duration of the availability blob.
:type blob_duration: str
"""
_attribute_map = {
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'retention': {'key': 'retention', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Availability, self).__init__(**kwargs)
self.time_grain = kwargs.get('time_grain', None)
self.retention = kwargs.get('retention', None)
self.blob_duration = kwargs.get('blob_duration', None)
class AvailableDelegation(msrest.serialization.Model):
"""The serviceName of an AvailableDelegation indicates a possible delegation for a subnet.
:param name: The name of the AvailableDelegation resource.
:type name: str
:param id: A unique identifier of the AvailableDelegation resource.
:type id: str
:param type: Resource type.
:type type: str
:param service_name: The name of the service and resource.
:type service_name: str
:param actions: Describes the actions permitted to the service upon delegation.
:type actions: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'service_name': {'key': 'serviceName', 'type': 'str'},
'actions': {'key': 'actions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AvailableDelegation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.type = kwargs.get('type', None)
self.service_name = kwargs.get('service_name', None)
self.actions = kwargs.get('actions', None)
class AvailableDelegationsResult(msrest.serialization.Model):
"""An array of available delegations.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: An array of available delegations.
:type value: list[~azure.mgmt.network.v2019_04_01.models.AvailableDelegation]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AvailableDelegation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailableDelegationsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class AvailablePrivateEndpointType(msrest.serialization.Model):
"""The information of an AvailablePrivateEndpointType.
:param name: The name of the service and resource.
:type name: str
:param id: A unique identifier of the AvailablePrivateEndpoint Type resource.
:type id: str
:param type: Resource type.
:type type: str
:param resource_name: The name of the service and resource.
:type resource_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'resource_name': {'key': 'resourceName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailablePrivateEndpointType, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.type = kwargs.get('type', None)
self.resource_name = kwargs.get('resource_name', None)
class AvailablePrivateEndpointTypesResult(msrest.serialization.Model):
"""An array of available PrivateEndpoint types.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: An array of available privateEndpoint type.
:type value: list[~azure.mgmt.network.v2019_04_01.models.AvailablePrivateEndpointType]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AvailablePrivateEndpointType]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailablePrivateEndpointTypesResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class AvailableProvidersList(msrest.serialization.Model):
"""List of available countries with details.
All required parameters must be populated in order to send to Azure.
:param countries: Required. List of available countries.
:type countries: list[~azure.mgmt.network.v2019_04_01.models.AvailableProvidersListCountry]
"""
_validation = {
'countries': {'required': True},
}
_attribute_map = {
'countries': {'key': 'countries', 'type': '[AvailableProvidersListCountry]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersList, self).__init__(**kwargs)
self.countries = kwargs['countries']
class AvailableProvidersListCity(msrest.serialization.Model):
"""City or town details.
:param city_name: The city or town name.
:type city_name: str
:param providers: A list of Internet service providers.
:type providers: list[str]
"""
_attribute_map = {
'city_name': {'key': 'cityName', 'type': 'str'},
'providers': {'key': 'providers', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListCity, self).__init__(**kwargs)
self.city_name = kwargs.get('city_name', None)
self.providers = kwargs.get('providers', None)
class AvailableProvidersListCountry(msrest.serialization.Model):
"""Country details.
:param country_name: The country name.
:type country_name: str
:param providers: A list of Internet service providers.
:type providers: list[str]
:param states: List of available states in the country.
:type states: list[~azure.mgmt.network.v2019_04_01.models.AvailableProvidersListState]
"""
_attribute_map = {
'country_name': {'key': 'countryName', 'type': 'str'},
'providers': {'key': 'providers', 'type': '[str]'},
'states': {'key': 'states', 'type': '[AvailableProvidersListState]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListCountry, self).__init__(**kwargs)
self.country_name = kwargs.get('country_name', None)
self.providers = kwargs.get('providers', None)
self.states = kwargs.get('states', None)
class AvailableProvidersListParameters(msrest.serialization.Model):
"""Constraints that determine the list of available Internet service providers.
:param azure_locations: A list of Azure regions.
:type azure_locations: list[str]
:param country: The country for available providers list.
:type country: str
:param state: The state for available providers list.
:type state: str
:param city: The city or town for available providers list.
:type city: str
"""
_attribute_map = {
'azure_locations': {'key': 'azureLocations', 'type': '[str]'},
'country': {'key': 'country', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'city': {'key': 'city', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListParameters, self).__init__(**kwargs)
self.azure_locations = kwargs.get('azure_locations', None)
self.country = kwargs.get('country', None)
self.state = kwargs.get('state', None)
self.city = kwargs.get('city', None)
class AvailableProvidersListState(msrest.serialization.Model):
"""State details.
:param state_name: The state name.
:type state_name: str
:param providers: A list of Internet service providers.
:type providers: list[str]
:param cities: List of available cities or towns in the state.
:type cities: list[~azure.mgmt.network.v2019_04_01.models.AvailableProvidersListCity]
"""
_attribute_map = {
'state_name': {'key': 'stateName', 'type': 'str'},
'providers': {'key': 'providers', 'type': '[str]'},
'cities': {'key': 'cities', 'type': '[AvailableProvidersListCity]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListState, self).__init__(**kwargs)
self.state_name = kwargs.get('state_name', None)
self.providers = kwargs.get('providers', None)
self.cities = kwargs.get('cities', None)
class AzureAsyncOperationResult(msrest.serialization.Model):
"""The response body contains the status of the specified asynchronous operation, indicating whether it has succeeded, is in progress, or has failed. Note that this status is distinct from the HTTP status code returned for the Get Operation Status operation itself. If the asynchronous operation succeeded, the response body includes the HTTP status code for the successful request. If the asynchronous operation failed, the response body includes the HTTP status code for the failed request and error information regarding the failure.
:param status: Status of the Azure async operation. Possible values include: "InProgress",
"Succeeded", "Failed".
:type status: str or ~azure.mgmt.network.v2019_04_01.models.NetworkOperationStatus
:param error: Details of the error occurred during specified asynchronous operation.
:type error: ~azure.mgmt.network.v2019_04_01.models.Error
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'Error'},
}
def __init__(
self,
**kwargs
):
super(AzureAsyncOperationResult, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.error = kwargs.get('error', None)
class AzureFirewall(Resource):
"""Azure Firewall resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param zones: A list of availability zones denoting where the resource needs to come from.
:type zones: list[str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param application_rule_collections: Collection of application rule collections used by Azure
Firewall.
:type application_rule_collections:
list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallApplicationRuleCollection]
:param nat_rule_collections: Collection of NAT rule collections used by Azure Firewall.
:type nat_rule_collections:
list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallNatRuleCollection]
:param network_rule_collections: Collection of network rule collections used by Azure Firewall.
:type network_rule_collections:
list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallNetworkRuleCollection]
:param ip_configurations: IP configuration of the Azure Firewall resource.
:type ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallIPConfiguration]
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param threat_intel_mode: The operation mode for Threat Intelligence. Possible values include:
"Alert", "Deny", "Off".
:type threat_intel_mode: str or
~azure.mgmt.network.v2019_04_01.models.AzureFirewallThreatIntelMode
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'zones': {'key': 'zones', 'type': '[str]'},
'etag': {'key': 'etag', 'type': 'str'},
'application_rule_collections': {'key': 'properties.applicationRuleCollections', 'type': '[AzureFirewallApplicationRuleCollection]'},
'nat_rule_collections': {'key': 'properties.natRuleCollections', 'type': '[AzureFirewallNatRuleCollection]'},
'network_rule_collections': {'key': 'properties.networkRuleCollections', 'type': '[AzureFirewallNetworkRuleCollection]'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[AzureFirewallIPConfiguration]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'threat_intel_mode': {'key': 'properties.threatIntelMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewall, self).__init__(**kwargs)
self.zones = kwargs.get('zones', None)
self.etag = None
self.application_rule_collections = kwargs.get('application_rule_collections', None)
self.nat_rule_collections = kwargs.get('nat_rule_collections', None)
self.network_rule_collections = kwargs.get('network_rule_collections', None)
self.ip_configurations = kwargs.get('ip_configurations', None)
self.provisioning_state = None
self.threat_intel_mode = kwargs.get('threat_intel_mode', None)
class AzureFirewallApplicationRule(msrest.serialization.Model):
"""Properties of an application rule.
:param name: Name of the application rule.
:type name: str
:param description: Description of the rule.
:type description: str
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param protocols: Array of ApplicationRuleProtocols.
:type protocols:
list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallApplicationRuleProtocol]
:param target_fqdns: List of FQDNs for this rule.
:type target_fqdns: list[str]
:param fqdn_tags: List of FQDN Tags for this rule.
:type fqdn_tags: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'protocols': {'key': 'protocols', 'type': '[AzureFirewallApplicationRuleProtocol]'},
'target_fqdns': {'key': 'targetFqdns', 'type': '[str]'},
'fqdn_tags': {'key': 'fqdnTags', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallApplicationRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.source_addresses = kwargs.get('source_addresses', None)
self.protocols = kwargs.get('protocols', None)
self.target_fqdns = kwargs.get('target_fqdns', None)
self.fqdn_tags = kwargs.get('fqdn_tags', None)
class AzureFirewallApplicationRuleCollection(SubResource):
"""Application rule collection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Priority of the application rule collection resource.
:type priority: int
:param action: The action type of a rule collection.
:type action: ~azure.mgmt.network.v2019_04_01.models.AzureFirewallRCAction
:param rules: Collection of rules used by a application rule collection.
:type rules: list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallApplicationRule]
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'priority': {'maximum': 65000, 'minimum': 100},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'action': {'key': 'properties.action', 'type': 'AzureFirewallRCAction'},
'rules': {'key': 'properties.rules', 'type': '[AzureFirewallApplicationRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallApplicationRuleCollection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs.get('priority', None)
self.action = kwargs.get('action', None)
self.rules = kwargs.get('rules', None)
self.provisioning_state = None
class AzureFirewallApplicationRuleProtocol(msrest.serialization.Model):
"""Properties of the application rule protocol.
:param protocol_type: Protocol type. Possible values include: "Http", "Https".
:type protocol_type: str or
~azure.mgmt.network.v2019_04_01.models.AzureFirewallApplicationRuleProtocolType
:param port: Port number for the protocol, cannot be greater than 64000. This field is
optional.
:type port: int
"""
_validation = {
'port': {'maximum': 64000, 'minimum': 0},
}
_attribute_map = {
'protocol_type': {'key': 'protocolType', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallApplicationRuleProtocol, self).__init__(**kwargs)
self.protocol_type = kwargs.get('protocol_type', None)
self.port = kwargs.get('port', None)
class AzureFirewallFqdnTag(Resource):
"""Azure Firewall FQDN Tag Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
:ivar fqdn_tag_name: The name of this FQDN Tag.
:vartype fqdn_tag_name: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'fqdn_tag_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'fqdn_tag_name': {'key': 'properties.fqdnTagName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallFqdnTag, self).__init__(**kwargs)
self.etag = None
self.provisioning_state = None
self.fqdn_tag_name = None
class AzureFirewallFqdnTagListResult(msrest.serialization.Model):
"""Response for ListAzureFirewallFqdnTags API service call.
:param value: List of Azure Firewall FQDN Tags in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallFqdnTag]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[AzureFirewallFqdnTag]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallFqdnTagListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class AzureFirewallIPConfiguration(SubResource):
"""IP configuration of an Azure Firewall.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar private_ip_address: The Firewall Internal Load Balancer IP to be used as the next hop in
User Defined Routes.
:vartype private_ip_address: str
:param subnet: Reference of the subnet resource. This resource must be named
'AzureFirewallSubnet'.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param public_ip_address: Reference of the PublicIP resource. This field is a mandatory input
if subnet is not null.
:type public_ip_address: ~azure.mgmt.network.v2019_04_01.models.SubResource
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'private_ip_address': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.private_ip_address = None
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
class AzureFirewallListResult(msrest.serialization.Model):
"""Response for ListAzureFirewalls API service call.
:param value: List of Azure Firewalls in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.AzureFirewall]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[AzureFirewall]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class AzureFirewallNatRCAction(msrest.serialization.Model):
"""AzureFirewall NAT Rule Collection Action.
:param type: The type of action. Possible values include: "Snat", "Dnat".
:type type: str or ~azure.mgmt.network.v2019_04_01.models.AzureFirewallNatRCActionType
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNatRCAction, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
class AzureFirewallNatRule(msrest.serialization.Model):
"""Properties of a NAT rule.
:param name: Name of the NAT rule.
:type name: str
:param description: Description of the rule.
:type description: str
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param destination_addresses: List of destination IP addresses for this rule. Supports IP
ranges, prefixes, and service tags.
:type destination_addresses: list[str]
:param destination_ports: List of destination ports.
:type destination_ports: list[str]
:param protocols: Array of AzureFirewallNetworkRuleProtocols applicable to this NAT rule.
:type protocols: list[str or
~azure.mgmt.network.v2019_04_01.models.AzureFirewallNetworkRuleProtocol]
:param translated_address: The translated address for this NAT rule.
:type translated_address: str
:param translated_port: The translated port for this NAT rule.
:type translated_port: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'destination_ports': {'key': 'destinationPorts', 'type': '[str]'},
'protocols': {'key': 'protocols', 'type': '[str]'},
'translated_address': {'key': 'translatedAddress', 'type': 'str'},
'translated_port': {'key': 'translatedPort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNatRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.source_addresses = kwargs.get('source_addresses', None)
self.destination_addresses = kwargs.get('destination_addresses', None)
self.destination_ports = kwargs.get('destination_ports', None)
self.protocols = kwargs.get('protocols', None)
self.translated_address = kwargs.get('translated_address', None)
self.translated_port = kwargs.get('translated_port', None)
class AzureFirewallNatRuleCollection(SubResource):
"""NAT rule collection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Priority of the NAT rule collection resource.
:type priority: int
:param action: The action type of a NAT rule collection.
:type action: ~azure.mgmt.network.v2019_04_01.models.AzureFirewallNatRCAction
:param rules: Collection of rules used by a NAT rule collection.
:type rules: list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallNatRule]
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'priority': {'maximum': 65000, 'minimum': 100},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'action': {'key': 'properties.action', 'type': 'AzureFirewallNatRCAction'},
'rules': {'key': 'properties.rules', 'type': '[AzureFirewallNatRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNatRuleCollection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs.get('priority', None)
self.action = kwargs.get('action', None)
self.rules = kwargs.get('rules', None)
self.provisioning_state = None
class AzureFirewallNetworkRule(msrest.serialization.Model):
"""Properties of the network rule.
:param name: Name of the network rule.
:type name: str
:param description: Description of the rule.
:type description: str
:param protocols: Array of AzureFirewallNetworkRuleProtocols.
:type protocols: list[str or
~azure.mgmt.network.v2019_04_01.models.AzureFirewallNetworkRuleProtocol]
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param destination_addresses: List of destination IP addresses.
:type destination_addresses: list[str]
:param destination_ports: List of destination ports.
:type destination_ports: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'protocols': {'key': 'protocols', 'type': '[str]'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'destination_ports': {'key': 'destinationPorts', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNetworkRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.protocols = kwargs.get('protocols', None)
self.source_addresses = kwargs.get('source_addresses', None)
self.destination_addresses = kwargs.get('destination_addresses', None)
self.destination_ports = kwargs.get('destination_ports', None)
class AzureFirewallNetworkRuleCollection(SubResource):
"""Network rule collection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Priority of the network rule collection resource.
:type priority: int
:param action: The action type of a rule collection.
:type action: ~azure.mgmt.network.v2019_04_01.models.AzureFirewallRCAction
:param rules: Collection of rules used by a network rule collection.
:type rules: list[~azure.mgmt.network.v2019_04_01.models.AzureFirewallNetworkRule]
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'priority': {'maximum': 65000, 'minimum': 100},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'action': {'key': 'properties.action', 'type': 'AzureFirewallRCAction'},
'rules': {'key': 'properties.rules', 'type': '[AzureFirewallNetworkRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNetworkRuleCollection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs.get('priority', None)
self.action = kwargs.get('action', None)
self.rules = kwargs.get('rules', None)
self.provisioning_state = None
class AzureFirewallRCAction(msrest.serialization.Model):
"""Properties of the AzureFirewallRCAction.
:param type: The type of action. Possible values include: "Allow", "Deny".
:type type: str or ~azure.mgmt.network.v2019_04_01.models.AzureFirewallRCActionType
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallRCAction, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
class AzureReachabilityReport(msrest.serialization.Model):
"""Azure reachability report details.
All required parameters must be populated in order to send to Azure.
:param aggregation_level: Required. The aggregation level of Azure reachability report. Can be
Country, State or City.
:type aggregation_level: str
:param provider_location: Required. Parameters that define a geographic location.
:type provider_location: ~azure.mgmt.network.v2019_04_01.models.AzureReachabilityReportLocation
:param reachability_report: Required. List of Azure reachability report items.
:type reachability_report:
list[~azure.mgmt.network.v2019_04_01.models.AzureReachabilityReportItem]
"""
_validation = {
'aggregation_level': {'required': True},
'provider_location': {'required': True},
'reachability_report': {'required': True},
}
_attribute_map = {
'aggregation_level': {'key': 'aggregationLevel', 'type': 'str'},
'provider_location': {'key': 'providerLocation', 'type': 'AzureReachabilityReportLocation'},
'reachability_report': {'key': 'reachabilityReport', 'type': '[AzureReachabilityReportItem]'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReport, self).__init__(**kwargs)
self.aggregation_level = kwargs['aggregation_level']
self.provider_location = kwargs['provider_location']
self.reachability_report = kwargs['reachability_report']
class AzureReachabilityReportItem(msrest.serialization.Model):
"""Azure reachability report details for a given provider location.
:param provider: The Internet service provider.
:type provider: str
:param azure_location: The Azure region.
:type azure_location: str
:param latencies: List of latency details for each of the time series.
:type latencies:
list[~azure.mgmt.network.v2019_04_01.models.AzureReachabilityReportLatencyInfo]
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'azure_location': {'key': 'azureLocation', 'type': 'str'},
'latencies': {'key': 'latencies', 'type': '[AzureReachabilityReportLatencyInfo]'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportItem, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.azure_location = kwargs.get('azure_location', None)
self.latencies = kwargs.get('latencies', None)
class AzureReachabilityReportLatencyInfo(msrest.serialization.Model):
"""Details on latency for a time series.
:param time_stamp: The time stamp.
:type time_stamp: ~datetime.datetime
:param score: The relative latency score between 1 and 100, higher values indicating a faster
connection.
:type score: int
"""
_validation = {
'score': {'maximum': 100, 'minimum': 1},
}
_attribute_map = {
'time_stamp': {'key': 'timeStamp', 'type': 'iso-8601'},
'score': {'key': 'score', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportLatencyInfo, self).__init__(**kwargs)
self.time_stamp = kwargs.get('time_stamp', None)
self.score = kwargs.get('score', None)
class AzureReachabilityReportLocation(msrest.serialization.Model):
"""Parameters that define a geographic location.
All required parameters must be populated in order to send to Azure.
:param country: Required. The name of the country.
:type country: str
:param state: The name of the state.
:type state: str
:param city: The name of the city or town.
:type city: str
"""
_validation = {
'country': {'required': True},
}
_attribute_map = {
'country': {'key': 'country', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'city': {'key': 'city', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportLocation, self).__init__(**kwargs)
self.country = kwargs['country']
self.state = kwargs.get('state', None)
self.city = kwargs.get('city', None)
class AzureReachabilityReportParameters(msrest.serialization.Model):
"""Geographic and time constraints for Azure reachability report.
All required parameters must be populated in order to send to Azure.
:param provider_location: Required. Parameters that define a geographic location.
:type provider_location: ~azure.mgmt.network.v2019_04_01.models.AzureReachabilityReportLocation
:param providers: List of Internet service providers.
:type providers: list[str]
:param azure_locations: Optional Azure regions to scope the query to.
:type azure_locations: list[str]
:param start_time: Required. The start time for the Azure reachability report.
:type start_time: ~datetime.datetime
:param end_time: Required. The end time for the Azure reachability report.
:type end_time: ~datetime.datetime
"""
_validation = {
'provider_location': {'required': True},
'start_time': {'required': True},
'end_time': {'required': True},
}
_attribute_map = {
'provider_location': {'key': 'providerLocation', 'type': 'AzureReachabilityReportLocation'},
'providers': {'key': 'providers', 'type': '[str]'},
'azure_locations': {'key': 'azureLocations', 'type': '[str]'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportParameters, self).__init__(**kwargs)
self.provider_location = kwargs['provider_location']
self.providers = kwargs.get('providers', None)
self.azure_locations = kwargs.get('azure_locations', None)
self.start_time = kwargs['start_time']
self.end_time = kwargs['end_time']
class BackendAddressPool(SubResource):
"""Pool of backend IP addresses.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar backend_ip_configurations: Gets collection of references to IP addresses defined in
network interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceIPConfiguration]
:ivar load_balancing_rules: Gets load balancing rules that use this backend address pool.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar outbound_rule: Gets outbound rules that use this backend address pool.
:vartype outbound_rule: ~azure.mgmt.network.v2019_04_01.models.SubResource
:ivar outbound_rules: Gets outbound rules that use this backend address pool.
:vartype outbound_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param provisioning_state: Get provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'backend_ip_configurations': {'readonly': True},
'load_balancing_rules': {'readonly': True},
'outbound_rule': {'readonly': True},
'outbound_rules': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'},
'outbound_rule': {'key': 'properties.outboundRule', 'type': 'SubResource'},
'outbound_rules': {'key': 'properties.outboundRules', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BackendAddressPool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.backend_ip_configurations = None
self.load_balancing_rules = None
self.outbound_rule = None
self.outbound_rules = None
self.provisioning_state = kwargs.get('provisioning_state', None)
class BastionHost(Resource):
"""Bastion Host resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param ip_configurations: IP configuration of the Bastion Host resource.
:type ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.BastionHostIPConfiguration]
:param dns_name: FQDN for the endpoint on which bastion host is accessible.
:type dns_name: str
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[BastionHostIPConfiguration]'},
'dns_name': {'key': 'properties.dnsName', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionHost, self).__init__(**kwargs)
self.etag = None
self.ip_configurations = kwargs.get('ip_configurations', None)
self.dns_name = kwargs.get('dns_name', None)
self.provisioning_state = None
class BastionHostIPConfiguration(SubResource):
"""IP configuration of an Bastion Host.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Ip configuration type.
:vartype type: str
:param subnet: Reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param public_ip_address: Reference of the PublicIP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_04_01.models.SubResource
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param private_ip_allocation_method: Private IP allocation method. Possible values include:
"Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionHostIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
class BastionHostListResult(msrest.serialization.Model):
"""Response for ListBastionHosts API service call.
:param value: List of Bastion Hosts in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.BastionHost]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BastionHost]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionHostListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class BGPCommunity(msrest.serialization.Model):
"""Contains bgp community information offered in Service Community resources.
:param service_supported_region: The region which the service support. e.g. For O365, region is
Global.
:type service_supported_region: str
:param community_name: The name of the bgp community. e.g. Skype.
:type community_name: str
:param community_value: The value of the bgp community. For more information:
https://docs.microsoft.com/en-us/azure/expressroute/expressroute-routing.
:type community_value: str
:param community_prefixes: The prefixes that the bgp community contains.
:type community_prefixes: list[str]
:param is_authorized_to_use: Customer is authorized to use bgp community or not.
:type is_authorized_to_use: bool
:param service_group: The service group of the bgp community contains.
:type service_group: str
"""
_attribute_map = {
'service_supported_region': {'key': 'serviceSupportedRegion', 'type': 'str'},
'community_name': {'key': 'communityName', 'type': 'str'},
'community_value': {'key': 'communityValue', 'type': 'str'},
'community_prefixes': {'key': 'communityPrefixes', 'type': '[str]'},
'is_authorized_to_use': {'key': 'isAuthorizedToUse', 'type': 'bool'},
'service_group': {'key': 'serviceGroup', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BGPCommunity, self).__init__(**kwargs)
self.service_supported_region = kwargs.get('service_supported_region', None)
self.community_name = kwargs.get('community_name', None)
self.community_value = kwargs.get('community_value', None)
self.community_prefixes = kwargs.get('community_prefixes', None)
self.is_authorized_to_use = kwargs.get('is_authorized_to_use', None)
self.service_group = kwargs.get('service_group', None)
class BgpPeerStatus(msrest.serialization.Model):
"""BGP peer status details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The virtual network gateway's local address.
:vartype local_address: str
:ivar neighbor: The remote BGP peer.
:vartype neighbor: str
:ivar asn: The autonomous system number of the remote BGP peer.
:vartype asn: int
:ivar state: The BGP peer state. Possible values include: "Unknown", "Stopped", "Idle",
"Connecting", "Connected".
:vartype state: str or ~azure.mgmt.network.v2019_04_01.models.BgpPeerState
:ivar connected_duration: For how long the peering has been up.
:vartype connected_duration: str
:ivar routes_received: The number of routes learned from this peer.
:vartype routes_received: long
:ivar messages_sent: The number of BGP messages sent.
:vartype messages_sent: long
:ivar messages_received: The number of BGP messages received.
:vartype messages_received: long
"""
_validation = {
'local_address': {'readonly': True},
'neighbor': {'readonly': True},
'asn': {'readonly': True},
'state': {'readonly': True},
'connected_duration': {'readonly': True},
'routes_received': {'readonly': True},
'messages_sent': {'readonly': True},
'messages_received': {'readonly': True},
}
_attribute_map = {
'local_address': {'key': 'localAddress', 'type': 'str'},
'neighbor': {'key': 'neighbor', 'type': 'str'},
'asn': {'key': 'asn', 'type': 'int'},
'state': {'key': 'state', 'type': 'str'},
'connected_duration': {'key': 'connectedDuration', 'type': 'str'},
'routes_received': {'key': 'routesReceived', 'type': 'long'},
'messages_sent': {'key': 'messagesSent', 'type': 'long'},
'messages_received': {'key': 'messagesReceived', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(BgpPeerStatus, self).__init__(**kwargs)
self.local_address = None
self.neighbor = None
self.asn = None
self.state = None
self.connected_duration = None
self.routes_received = None
self.messages_sent = None
self.messages_received = None
class BgpPeerStatusListResult(msrest.serialization.Model):
"""Response for list BGP peer status API service call.
:param value: List of BGP peers.
:type value: list[~azure.mgmt.network.v2019_04_01.models.BgpPeerStatus]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BgpPeerStatus]'},
}
def __init__(
self,
**kwargs
):
super(BgpPeerStatusListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class BgpServiceCommunity(Resource):
"""Service Community Properties.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param service_name: The name of the bgp community. e.g. Skype.
:type service_name: str
:param bgp_communities: Get a list of bgp communities.
:type bgp_communities: list[~azure.mgmt.network.v2019_04_01.models.BGPCommunity]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'service_name': {'key': 'properties.serviceName', 'type': 'str'},
'bgp_communities': {'key': 'properties.bgpCommunities', 'type': '[BGPCommunity]'},
}
def __init__(
self,
**kwargs
):
super(BgpServiceCommunity, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
self.bgp_communities = kwargs.get('bgp_communities', None)
class BgpServiceCommunityListResult(msrest.serialization.Model):
"""Response for the ListServiceCommunity API service call.
:param value: A list of service community resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.BgpServiceCommunity]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BgpServiceCommunity]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BgpServiceCommunityListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class BgpSettings(msrest.serialization.Model):
"""BGP settings details.
:param asn: The BGP speaker's ASN.
:type asn: long
:param bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker.
:type bgp_peering_address: str
:param peer_weight: The weight added to routes learned from this BGP speaker.
:type peer_weight: int
"""
_attribute_map = {
'asn': {'key': 'asn', 'type': 'long'},
'bgp_peering_address': {'key': 'bgpPeeringAddress', 'type': 'str'},
'peer_weight': {'key': 'peerWeight', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(BgpSettings, self).__init__(**kwargs)
self.asn = kwargs.get('asn', None)
self.bgp_peering_address = kwargs.get('bgp_peering_address', None)
self.peer_weight = kwargs.get('peer_weight', None)
class CheckPrivateLinkServiceVisibilityRequest(msrest.serialization.Model):
"""Request body of the CheckPrivateLinkServiceVisibility API service call.
:param private_link_service_alias: The alias of the private link service.
:type private_link_service_alias: str
"""
_attribute_map = {
'private_link_service_alias': {'key': 'privateLinkServiceAlias', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CheckPrivateLinkServiceVisibilityRequest, self).__init__(**kwargs)
self.private_link_service_alias = kwargs.get('private_link_service_alias', None)
class CloudErrorBody(msrest.serialization.Model):
"""An error response from the Batch service.
:param code: An identifier for the error. Codes are invariant and are intended to be consumed
programmatically.
:type code: str
:param message: A message describing the error, intended to be suitable for display in a user
interface.
:type message: str
:param target: The target of the particular error. For example, the name of the property in
error.
:type target: str
:param details: A list of additional details about the error.
:type details: list[~azure.mgmt.network.v2019_04_01.models.CloudErrorBody]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudErrorBody]'},
}
def __init__(
self,
**kwargs
):
super(CloudErrorBody, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
class Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model):
"""Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id: The principal id of user assigned identity.
:vartype principal_id: str
:ivar client_id: The client id of user assigned identity.
:vartype client_id: str
"""
_validation = {
'principal_id': {'readonly': True},
'client_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs)
self.principal_id = None
self.client_id = None
class ConnectionMonitor(msrest.serialization.Model):
"""Parameters that define the operation to create a connection monitor.
All required parameters must be populated in order to send to Azure.
:param location: Connection monitor location.
:type location: str
:param tags: A set of tags. Connection monitor tags.
:type tags: dict[str, str]
:param source: Required. Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorSource
:param destination: Required. Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'source': {'key': 'properties.source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'properties.destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'properties.autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'properties.monitoringIntervalInSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitor, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.source = kwargs['source']
self.destination = kwargs['destination']
self.auto_start = kwargs.get('auto_start', True)
self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
class ConnectionMonitorDestination(msrest.serialization.Model):
"""Describes the destination of connection monitor.
:param resource_id: The ID of the resource used as the destination by connection monitor.
:type resource_id: str
:param address: Address of the connection monitor destination (IP or domain name).
:type address: str
:param port: The destination port used by connection monitor.
:type port: int
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorDestination, self).__init__(**kwargs)
self.resource_id = kwargs.get('resource_id', None)
self.address = kwargs.get('address', None)
self.port = kwargs.get('port', None)
class ConnectionMonitorListResult(msrest.serialization.Model):
"""List of connection monitors.
:param value: Information about connection monitors.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorResult]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ConnectionMonitorResult]'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ConnectionMonitorParameters(msrest.serialization.Model):
"""Parameters that define the operation to create a connection monitor.
All required parameters must be populated in order to send to Azure.
:param source: Required. Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorSource
:param destination: Required. Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorParameters, self).__init__(**kwargs)
self.source = kwargs['source']
self.destination = kwargs['destination']
self.auto_start = kwargs.get('auto_start', True)
self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
class ConnectionMonitorQueryResult(msrest.serialization.Model):
"""List of connection states snapshots.
:param source_status: Status of connection monitor source. Possible values include: "Unknown",
"Active", "Inactive".
:type source_status: str or
~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorSourceStatus
:param states: Information about connection states.
:type states: list[~azure.mgmt.network.v2019_04_01.models.ConnectionStateSnapshot]
"""
_attribute_map = {
'source_status': {'key': 'sourceStatus', 'type': 'str'},
'states': {'key': 'states', 'type': '[ConnectionStateSnapshot]'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorQueryResult, self).__init__(**kwargs)
self.source_status = kwargs.get('source_status', None)
self.states = kwargs.get('states', None)
class ConnectionMonitorResult(msrest.serialization.Model):
"""Information about the connection monitor.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of the connection monitor.
:vartype name: str
:ivar id: ID of the connection monitor.
:vartype id: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar type: Connection monitor type.
:vartype type: str
:param location: Connection monitor location.
:type location: str
:param tags: A set of tags. Connection monitor tags.
:type tags: dict[str, str]
:param source: Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorSource
:param destination: Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
:ivar provisioning_state: The provisioning state of the connection monitor. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param start_time: The date and time when the connection monitor was started.
:type start_time: ~datetime.datetime
:param monitoring_status: The monitoring status of the connection monitor.
:type monitoring_status: str
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'source': {'key': 'properties.source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'properties.destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'properties.autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'properties.monitoringIntervalInSeconds', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'monitoring_status': {'key': 'properties.monitoringStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorResult, self).__init__(**kwargs)
self.name = None
self.id = None
self.etag = kwargs.get('etag', "A unique read-only string that changes whenever the resource is updated.")
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.source = kwargs.get('source', None)
self.destination = kwargs.get('destination', None)
self.auto_start = kwargs.get('auto_start', True)
self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
self.provisioning_state = None
self.start_time = kwargs.get('start_time', None)
self.monitoring_status = kwargs.get('monitoring_status', None)
class ConnectionMonitorResultProperties(ConnectionMonitorParameters):
"""Describes the properties of a connection monitor.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param source: Required. Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorSource
:param destination: Required. Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
:ivar provisioning_state: The provisioning state of the connection monitor. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param start_time: The date and time when the connection monitor was started.
:type start_time: ~datetime.datetime
:param monitoring_status: The monitoring status of the connection monitor.
:type monitoring_status: str
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'monitoring_status': {'key': 'monitoringStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorResultProperties, self).__init__(**kwargs)
self.provisioning_state = None
self.start_time = kwargs.get('start_time', None)
self.monitoring_status = kwargs.get('monitoring_status', None)
class ConnectionMonitorSource(msrest.serialization.Model):
"""Describes the source of connection monitor.
All required parameters must be populated in order to send to Azure.
:param resource_id: Required. The ID of the resource used as the source by connection monitor.
:type resource_id: str
:param port: The source port used by connection monitor.
:type port: int
"""
_validation = {
'resource_id': {'required': True},
}
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorSource, self).__init__(**kwargs)
self.resource_id = kwargs['resource_id']
self.port = kwargs.get('port', None)
class ConnectionResetSharedKey(msrest.serialization.Model):
"""The virtual network connection reset shared key.
All required parameters must be populated in order to send to Azure.
:param key_length: Required. The virtual network connection reset shared key length, should
between 1 and 128.
:type key_length: int
"""
_validation = {
'key_length': {'required': True, 'maximum': 128, 'minimum': 1},
}
_attribute_map = {
'key_length': {'key': 'keyLength', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionResetSharedKey, self).__init__(**kwargs)
self.key_length = kwargs['key_length']
class ConnectionSharedKey(SubResource):
"""Response for GetConnectionSharedKey API service call.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param value: Required. The virtual network connection shared key value.
:type value: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ConnectionSharedKey, self).__init__(**kwargs)
self.value = kwargs['value']
class ConnectionStateSnapshot(msrest.serialization.Model):
"""Connection state snapshot.
Variables are only populated by the server, and will be ignored when sending a request.
:param connection_state: The connection state. Possible values include: "Reachable",
"Unreachable", "Unknown".
:type connection_state: str or ~azure.mgmt.network.v2019_04_01.models.ConnectionState
:param start_time: The start time of the connection snapshot.
:type start_time: ~datetime.datetime
:param end_time: The end time of the connection snapshot.
:type end_time: ~datetime.datetime
:param evaluation_state: Connectivity analysis evaluation state. Possible values include:
"NotStarted", "InProgress", "Completed".
:type evaluation_state: str or ~azure.mgmt.network.v2019_04_01.models.EvaluationState
:param avg_latency_in_ms: Average latency in ms.
:type avg_latency_in_ms: int
:param min_latency_in_ms: Minimum latency in ms.
:type min_latency_in_ms: int
:param max_latency_in_ms: Maximum latency in ms.
:type max_latency_in_ms: int
:param probes_sent: The number of sent probes.
:type probes_sent: int
:param probes_failed: The number of failed probes.
:type probes_failed: int
:ivar hops: List of hops between the source and the destination.
:vartype hops: list[~azure.mgmt.network.v2019_04_01.models.ConnectivityHop]
"""
_validation = {
'hops': {'readonly': True},
}
_attribute_map = {
'connection_state': {'key': 'connectionState', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'evaluation_state': {'key': 'evaluationState', 'type': 'str'},
'avg_latency_in_ms': {'key': 'avgLatencyInMs', 'type': 'int'},
'min_latency_in_ms': {'key': 'minLatencyInMs', 'type': 'int'},
'max_latency_in_ms': {'key': 'maxLatencyInMs', 'type': 'int'},
'probes_sent': {'key': 'probesSent', 'type': 'int'},
'probes_failed': {'key': 'probesFailed', 'type': 'int'},
'hops': {'key': 'hops', 'type': '[ConnectivityHop]'},
}
def __init__(
self,
**kwargs
):
super(ConnectionStateSnapshot, self).__init__(**kwargs)
self.connection_state = kwargs.get('connection_state', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.evaluation_state = kwargs.get('evaluation_state', None)
self.avg_latency_in_ms = kwargs.get('avg_latency_in_ms', None)
self.min_latency_in_ms = kwargs.get('min_latency_in_ms', None)
self.max_latency_in_ms = kwargs.get('max_latency_in_ms', None)
self.probes_sent = kwargs.get('probes_sent', None)
self.probes_failed = kwargs.get('probes_failed', None)
self.hops = None
class ConnectivityDestination(msrest.serialization.Model):
"""Parameters that define destination of connection.
:param resource_id: The ID of the resource to which a connection attempt will be made.
:type resource_id: str
:param address: The IP address or URI the resource to which a connection attempt will be made.
:type address: str
:param port: Port on which check connectivity will be performed.
:type port: int
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityDestination, self).__init__(**kwargs)
self.resource_id = kwargs.get('resource_id', None)
self.address = kwargs.get('address', None)
self.port = kwargs.get('port', None)
class ConnectivityHop(msrest.serialization.Model):
"""Information about a hop between the source and the destination.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The type of the hop.
:vartype type: str
:ivar id: The ID of the hop.
:vartype id: str
:ivar address: The IP address of the hop.
:vartype address: str
:ivar resource_id: The ID of the resource corresponding to this hop.
:vartype resource_id: str
:ivar next_hop_ids: List of next hop identifiers.
:vartype next_hop_ids: list[str]
:ivar issues: List of issues.
:vartype issues: list[~azure.mgmt.network.v2019_04_01.models.ConnectivityIssue]
"""
_validation = {
'type': {'readonly': True},
'id': {'readonly': True},
'address': {'readonly': True},
'resource_id': {'readonly': True},
'next_hop_ids': {'readonly': True},
'issues': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'next_hop_ids': {'key': 'nextHopIds', 'type': '[str]'},
'issues': {'key': 'issues', 'type': '[ConnectivityIssue]'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityHop, self).__init__(**kwargs)
self.type = None
self.id = None
self.address = None
self.resource_id = None
self.next_hop_ids = None
self.issues = None
class ConnectivityInformation(msrest.serialization.Model):
"""Information on the connectivity status.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar hops: List of hops between the source and the destination.
:vartype hops: list[~azure.mgmt.network.v2019_04_01.models.ConnectivityHop]
:ivar connection_status: The connection status. Possible values include: "Unknown",
"Connected", "Disconnected", "Degraded".
:vartype connection_status: str or ~azure.mgmt.network.v2019_04_01.models.ConnectionStatus
:ivar avg_latency_in_ms: Average latency in milliseconds.
:vartype avg_latency_in_ms: int
:ivar min_latency_in_ms: Minimum latency in milliseconds.
:vartype min_latency_in_ms: int
:ivar max_latency_in_ms: Maximum latency in milliseconds.
:vartype max_latency_in_ms: int
:ivar probes_sent: Total number of probes sent.
:vartype probes_sent: int
:ivar probes_failed: Number of failed probes.
:vartype probes_failed: int
"""
_validation = {
'hops': {'readonly': True},
'connection_status': {'readonly': True},
'avg_latency_in_ms': {'readonly': True},
'min_latency_in_ms': {'readonly': True},
'max_latency_in_ms': {'readonly': True},
'probes_sent': {'readonly': True},
'probes_failed': {'readonly': True},
}
_attribute_map = {
'hops': {'key': 'hops', 'type': '[ConnectivityHop]'},
'connection_status': {'key': 'connectionStatus', 'type': 'str'},
'avg_latency_in_ms': {'key': 'avgLatencyInMs', 'type': 'int'},
'min_latency_in_ms': {'key': 'minLatencyInMs', 'type': 'int'},
'max_latency_in_ms': {'key': 'maxLatencyInMs', 'type': 'int'},
'probes_sent': {'key': 'probesSent', 'type': 'int'},
'probes_failed': {'key': 'probesFailed', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityInformation, self).__init__(**kwargs)
self.hops = None
self.connection_status = None
self.avg_latency_in_ms = None
self.min_latency_in_ms = None
self.max_latency_in_ms = None
self.probes_sent = None
self.probes_failed = None
class ConnectivityIssue(msrest.serialization.Model):
"""Information about an issue encountered in the process of checking for connectivity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar origin: The origin of the issue. Possible values include: "Local", "Inbound", "Outbound".
:vartype origin: str or ~azure.mgmt.network.v2019_04_01.models.Origin
:ivar severity: The severity of the issue. Possible values include: "Error", "Warning".
:vartype severity: str or ~azure.mgmt.network.v2019_04_01.models.Severity
:ivar type: The type of issue. Possible values include: "Unknown", "AgentStopped",
"GuestFirewall", "DnsResolution", "SocketBind", "NetworkSecurityRule", "UserDefinedRoute",
"PortThrottled", "Platform".
:vartype type: str or ~azure.mgmt.network.v2019_04_01.models.IssueType
:ivar context: Provides additional context on the issue.
:vartype context: list[dict[str, str]]
"""
_validation = {
'origin': {'readonly': True},
'severity': {'readonly': True},
'type': {'readonly': True},
'context': {'readonly': True},
}
_attribute_map = {
'origin': {'key': 'origin', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'context': {'key': 'context', 'type': '[{str}]'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityIssue, self).__init__(**kwargs)
self.origin = None
self.severity = None
self.type = None
self.context = None
class ConnectivityParameters(msrest.serialization.Model):
"""Parameters that determine how the connectivity check will be performed.
All required parameters must be populated in order to send to Azure.
:param source: Required. Describes the source of the connection.
:type source: ~azure.mgmt.network.v2019_04_01.models.ConnectivitySource
:param destination: Required. Describes the destination of connection.
:type destination: ~azure.mgmt.network.v2019_04_01.models.ConnectivityDestination
:param protocol: Network protocol. Possible values include: "Tcp", "Http", "Https", "Icmp".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.Protocol
:param protocol_configuration: Configuration of the protocol.
:type protocol_configuration: ~azure.mgmt.network.v2019_04_01.models.ProtocolConfiguration
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ConnectivitySource'},
'destination': {'key': 'destination', 'type': 'ConnectivityDestination'},
'protocol': {'key': 'protocol', 'type': 'str'},
'protocol_configuration': {'key': 'protocolConfiguration', 'type': 'ProtocolConfiguration'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityParameters, self).__init__(**kwargs)
self.source = kwargs['source']
self.destination = kwargs['destination']
self.protocol = kwargs.get('protocol', None)
self.protocol_configuration = kwargs.get('protocol_configuration', None)
class ConnectivitySource(msrest.serialization.Model):
"""Parameters that define the source of the connection.
All required parameters must be populated in order to send to Azure.
:param resource_id: Required. The ID of the resource from which a connectivity check will be
initiated.
:type resource_id: str
:param port: The source port from which a connectivity check will be performed.
:type port: int
"""
_validation = {
'resource_id': {'required': True},
}
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectivitySource, self).__init__(**kwargs)
self.resource_id = kwargs['resource_id']
self.port = kwargs.get('port', None)
class Container(SubResource):
"""Reference to container resource in remote resource provider.
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Container, self).__init__(**kwargs)
class ContainerNetworkInterface(SubResource):
"""Container network interface child resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param container_network_interface_configuration: Container network interface configuration
from which this container network interface is created.
:type container_network_interface_configuration:
~azure.mgmt.network.v2019_04_01.models.ContainerNetworkInterfaceConfiguration
:param container: Reference to the container to which this container network interface is
attached.
:type container: ~azure.mgmt.network.v2019_04_01.models.Container
:param ip_configurations: Reference to the ip configuration on this container nic.
:type ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ContainerNetworkInterfaceIpConfiguration]
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'container_network_interface_configuration': {'key': 'properties.containerNetworkInterfaceConfiguration', 'type': 'ContainerNetworkInterfaceConfiguration'},
'container': {'key': 'properties.container', 'type': 'Container'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[ContainerNetworkInterfaceIpConfiguration]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerNetworkInterface, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = kwargs.get('etag', None)
self.container_network_interface_configuration = kwargs.get('container_network_interface_configuration', None)
self.container = kwargs.get('container', None)
self.ip_configurations = kwargs.get('ip_configurations', None)
self.provisioning_state = None
class ContainerNetworkInterfaceConfiguration(SubResource):
"""Container network interface configuration child resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param ip_configurations: A list of ip configurations of the container network interface
configuration.
:type ip_configurations: list[~azure.mgmt.network.v2019_04_01.models.IPConfigurationProfile]
:param container_network_interfaces: A list of container network interfaces created from this
container network interface configuration.
:type container_network_interfaces: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[IPConfigurationProfile]'},
'container_network_interfaces': {'key': 'properties.containerNetworkInterfaces', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerNetworkInterfaceConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = kwargs.get('etag', None)
self.ip_configurations = kwargs.get('ip_configurations', None)
self.container_network_interfaces = kwargs.get('container_network_interfaces', None)
self.provisioning_state = None
class ContainerNetworkInterfaceIpConfiguration(msrest.serialization.Model):
"""The ip configuration for a container network interface.
Variables are only populated by the server, and will be ignored when sending a request.
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerNetworkInterfaceIpConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = kwargs.get('etag', None)
self.provisioning_state = None
class DdosCustomPolicy(Resource):
"""A DDoS custom policy in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar resource_guid: The resource GUID property of the DDoS custom policy resource. It uniquely
identifies the resource, even if the user changes its name or migrate the resource across
subscriptions or resource groups.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the DDoS custom policy resource. Possible
values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:ivar public_ip_addresses: The list of public IPs associated with the DDoS custom policy
resource. This list is read-only.
:vartype public_ip_addresses: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param protocol_custom_settings: The protocol-specific DDoS policy customization parameters.
:type protocol_custom_settings:
list[~azure.mgmt.network.v2019_04_01.models.ProtocolCustomSettingsFormat]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
'public_ip_addresses': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'public_ip_addresses': {'key': 'properties.publicIPAddresses', 'type': '[SubResource]'},
'protocol_custom_settings': {'key': 'properties.protocolCustomSettings', 'type': '[ProtocolCustomSettingsFormat]'},
}
def __init__(
self,
**kwargs
):
super(DdosCustomPolicy, self).__init__(**kwargs)
self.etag = None
self.resource_guid = None
self.provisioning_state = None
self.public_ip_addresses = None
self.protocol_custom_settings = kwargs.get('protocol_custom_settings', None)
class DdosProtectionPlan(msrest.serialization.Model):
"""A DDoS protection plan in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar resource_guid: The resource GUID property of the DDoS protection plan resource. It
uniquely identifies the resource, even if the user changes its name or migrate the resource
across subscriptions or resource groups.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the DDoS protection plan resource. Possible
values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:ivar virtual_networks: The list of virtual networks associated with the DDoS protection plan
resource. This list is read-only.
:vartype virtual_networks: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
'virtual_networks': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'virtual_networks': {'key': 'properties.virtualNetworks', 'type': '[SubResource]'},
}
def __init__(
self,
**kwargs
):
super(DdosProtectionPlan, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.etag = None
self.resource_guid = None
self.provisioning_state = None
self.virtual_networks = None
class DdosProtectionPlanListResult(msrest.serialization.Model):
"""A list of DDoS protection plans.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of DDoS protection plans.
:type value: list[~azure.mgmt.network.v2019_04_01.models.DdosProtectionPlan]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[DdosProtectionPlan]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DdosProtectionPlanListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class DdosSettings(msrest.serialization.Model):
"""Contains the DDoS protection settings of the public IP.
:param ddos_custom_policy: The DDoS custom policy associated with the public IP.
:type ddos_custom_policy: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param protection_coverage: The DDoS protection policy customizability of the public IP. Only
standard coverage will have the ability to be customized. Possible values include: "Basic",
"Standard".
:type protection_coverage: str or
~azure.mgmt.network.v2019_04_01.models.DdosSettingsProtectionCoverage
"""
_attribute_map = {
'ddos_custom_policy': {'key': 'ddosCustomPolicy', 'type': 'SubResource'},
'protection_coverage': {'key': 'protectionCoverage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DdosSettings, self).__init__(**kwargs)
self.ddos_custom_policy = kwargs.get('ddos_custom_policy', None)
self.protection_coverage = kwargs.get('protection_coverage', None)
class Delegation(SubResource):
"""Details the service to which the subnet is delegated.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a subnet. This name can be used to
access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param service_name: The name of the service to whom the subnet should be delegated (e.g.
Microsoft.Sql/servers).
:type service_name: str
:param actions: Describes the actions permitted to the service upon delegation.
:type actions: list[str]
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'service_name': {'key': 'properties.serviceName', 'type': 'str'},
'actions': {'key': 'properties.actions', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Delegation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.service_name = kwargs.get('service_name', None)
self.actions = kwargs.get('actions', None)
self.provisioning_state = None
class DeviceProperties(msrest.serialization.Model):
"""List of properties of the device.
:param device_vendor: Name of the device Vendor.
:type device_vendor: str
:param device_model: Model of the device.
:type device_model: str
:param link_speed_in_mbps: Link speed.
:type link_speed_in_mbps: int
"""
_attribute_map = {
'device_vendor': {'key': 'deviceVendor', 'type': 'str'},
'device_model': {'key': 'deviceModel', 'type': 'str'},
'link_speed_in_mbps': {'key': 'linkSpeedInMbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(DeviceProperties, self).__init__(**kwargs)
self.device_vendor = kwargs.get('device_vendor', None)
self.device_model = kwargs.get('device_model', None)
self.link_speed_in_mbps = kwargs.get('link_speed_in_mbps', None)
class DhcpOptions(msrest.serialization.Model):
"""DhcpOptions contains an array of DNS servers available to VMs deployed in the virtual network. Standard DHCP option for a subnet overrides VNET DHCP options.
:param dns_servers: The list of DNS servers IP addresses.
:type dns_servers: list[str]
"""
_attribute_map = {
'dns_servers': {'key': 'dnsServers', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(DhcpOptions, self).__init__(**kwargs)
self.dns_servers = kwargs.get('dns_servers', None)
class Dimension(msrest.serialization.Model):
"""Dimension of the metric.
:param name: The name of the dimension.
:type name: str
:param display_name: The display name of the dimension.
:type display_name: str
:param internal_name: The internal name of the dimension.
:type internal_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'internal_name': {'key': 'internalName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Dimension, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.internal_name = kwargs.get('internal_name', None)
class DnsNameAvailabilityResult(msrest.serialization.Model):
"""Response for the CheckDnsNameAvailability API service call.
:param available: Domain availability (True/False).
:type available: bool
"""
_attribute_map = {
'available': {'key': 'available', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(DnsNameAvailabilityResult, self).__init__(**kwargs)
self.available = kwargs.get('available', None)
class EffectiveNetworkSecurityGroup(msrest.serialization.Model):
"""Effective network security group.
:param network_security_group: The ID of network security group that is applied.
:type network_security_group: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param association: Associated resources.
:type association:
~azure.mgmt.network.v2019_04_01.models.EffectiveNetworkSecurityGroupAssociation
:param effective_security_rules: A collection of effective security rules.
:type effective_security_rules:
list[~azure.mgmt.network.v2019_04_01.models.EffectiveNetworkSecurityRule]
:param tag_map: Mapping of tags to list of IP Addresses included within the tag.
:type tag_map: str
"""
_attribute_map = {
'network_security_group': {'key': 'networkSecurityGroup', 'type': 'SubResource'},
'association': {'key': 'association', 'type': 'EffectiveNetworkSecurityGroupAssociation'},
'effective_security_rules': {'key': 'effectiveSecurityRules', 'type': '[EffectiveNetworkSecurityRule]'},
'tag_map': {'key': 'tagMap', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityGroup, self).__init__(**kwargs)
self.network_security_group = kwargs.get('network_security_group', None)
self.association = kwargs.get('association', None)
self.effective_security_rules = kwargs.get('effective_security_rules', None)
self.tag_map = kwargs.get('tag_map', None)
class EffectiveNetworkSecurityGroupAssociation(msrest.serialization.Model):
"""The effective network security group association.
:param subnet: The ID of the subnet if assigned.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param network_interface: The ID of the network interface if assigned.
:type network_interface: ~azure.mgmt.network.v2019_04_01.models.SubResource
"""
_attribute_map = {
'subnet': {'key': 'subnet', 'type': 'SubResource'},
'network_interface': {'key': 'networkInterface', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityGroupAssociation, self).__init__(**kwargs)
self.subnet = kwargs.get('subnet', None)
self.network_interface = kwargs.get('network_interface', None)
class EffectiveNetworkSecurityGroupListResult(msrest.serialization.Model):
"""Response for list effective network security groups API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of effective network security groups.
:type value: list[~azure.mgmt.network.v2019_04_01.models.EffectiveNetworkSecurityGroup]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[EffectiveNetworkSecurityGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class EffectiveNetworkSecurityRule(msrest.serialization.Model):
"""Effective network security rules.
:param name: The name of the security rule specified by the user (if created by the user).
:type name: str
:param protocol: The network protocol this rule applies to. Possible values include: "Tcp",
"Udp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.EffectiveSecurityRuleProtocol
:param source_port_range: The source port or range.
:type source_port_range: str
:param destination_port_range: The destination port or range.
:type destination_port_range: str
:param source_port_ranges: The source port ranges. Expected values include a single integer
between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*).
:type source_port_ranges: list[str]
:param destination_port_ranges: The destination port ranges. Expected values include a single
integer between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*).
:type destination_port_ranges: list[str]
:param source_address_prefix: The source address prefix.
:type source_address_prefix: str
:param destination_address_prefix: The destination address prefix.
:type destination_address_prefix: str
:param source_address_prefixes: The source address prefixes. Expected values include CIDR IP
ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and the
asterisk (*).
:type source_address_prefixes: list[str]
:param destination_address_prefixes: The destination address prefixes. Expected values include
CIDR IP ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and
the asterisk (*).
:type destination_address_prefixes: list[str]
:param expanded_source_address_prefix: The expanded source address prefix.
:type expanded_source_address_prefix: list[str]
:param expanded_destination_address_prefix: Expanded destination address prefix.
:type expanded_destination_address_prefix: list[str]
:param access: Whether network traffic is allowed or denied. Possible values include: "Allow",
"Deny".
:type access: str or ~azure.mgmt.network.v2019_04_01.models.SecurityRuleAccess
:param priority: The priority of the rule.
:type priority: int
:param direction: The direction of the rule. Possible values include: "Inbound", "Outbound".
:type direction: str or ~azure.mgmt.network.v2019_04_01.models.SecurityRuleDirection
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'source_port_range': {'key': 'sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'destinationPortRange', 'type': 'str'},
'source_port_ranges': {'key': 'sourcePortRanges', 'type': '[str]'},
'destination_port_ranges': {'key': 'destinationPortRanges', 'type': '[str]'},
'source_address_prefix': {'key': 'sourceAddressPrefix', 'type': 'str'},
'destination_address_prefix': {'key': 'destinationAddressPrefix', 'type': 'str'},
'source_address_prefixes': {'key': 'sourceAddressPrefixes', 'type': '[str]'},
'destination_address_prefixes': {'key': 'destinationAddressPrefixes', 'type': '[str]'},
'expanded_source_address_prefix': {'key': 'expandedSourceAddressPrefix', 'type': '[str]'},
'expanded_destination_address_prefix': {'key': 'expandedDestinationAddressPrefix', 'type': '[str]'},
'access': {'key': 'access', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'direction': {'key': 'direction', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.protocol = kwargs.get('protocol', None)
self.source_port_range = kwargs.get('source_port_range', None)
self.destination_port_range = kwargs.get('destination_port_range', None)
self.source_port_ranges = kwargs.get('source_port_ranges', None)
self.destination_port_ranges = kwargs.get('destination_port_ranges', None)
self.source_address_prefix = kwargs.get('source_address_prefix', None)
self.destination_address_prefix = kwargs.get('destination_address_prefix', None)
self.source_address_prefixes = kwargs.get('source_address_prefixes', None)
self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None)
self.expanded_source_address_prefix = kwargs.get('expanded_source_address_prefix', None)
self.expanded_destination_address_prefix = kwargs.get('expanded_destination_address_prefix', None)
self.access = kwargs.get('access', None)
self.priority = kwargs.get('priority', None)
self.direction = kwargs.get('direction', None)
class EffectiveRoute(msrest.serialization.Model):
"""Effective Route.
:param name: The name of the user defined route. This is optional.
:type name: str
:param disable_bgp_route_propagation: If true, on-premises routes are not propagated to the
network interfaces in the subnet.
:type disable_bgp_route_propagation: bool
:param source: Who created the route. Possible values include: "Unknown", "User",
"VirtualNetworkGateway", "Default".
:type source: str or ~azure.mgmt.network.v2019_04_01.models.EffectiveRouteSource
:param state: The value of effective route. Possible values include: "Active", "Invalid".
:type state: str or ~azure.mgmt.network.v2019_04_01.models.EffectiveRouteState
:param address_prefix: The address prefixes of the effective routes in CIDR notation.
:type address_prefix: list[str]
:param next_hop_ip_address: The IP address of the next hop of the effective route.
:type next_hop_ip_address: list[str]
:param next_hop_type: The type of Azure hop the packet should be sent to. Possible values
include: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", "None".
:type next_hop_type: str or ~azure.mgmt.network.v2019_04_01.models.RouteNextHopType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'disable_bgp_route_propagation': {'key': 'disableBgpRoutePropagation', 'type': 'bool'},
'source': {'key': 'source', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'address_prefix': {'key': 'addressPrefix', 'type': '[str]'},
'next_hop_ip_address': {'key': 'nextHopIpAddress', 'type': '[str]'},
'next_hop_type': {'key': 'nextHopType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveRoute, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.disable_bgp_route_propagation = kwargs.get('disable_bgp_route_propagation', None)
self.source = kwargs.get('source', None)
self.state = kwargs.get('state', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
self.next_hop_type = kwargs.get('next_hop_type', None)
class EffectiveRouteListResult(msrest.serialization.Model):
"""Response for list effective route API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of effective routes.
:type value: list[~azure.mgmt.network.v2019_04_01.models.EffectiveRoute]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[EffectiveRoute]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveRouteListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class EndpointServiceResult(SubResource):
"""Endpoint service.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Name of the endpoint service.
:vartype name: str
:ivar type: Type of the endpoint service.
:vartype type: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EndpointServiceResult, self).__init__(**kwargs)
self.name = None
self.type = None
class EndpointServicesListResult(msrest.serialization.Model):
"""Response for the ListAvailableEndpointServices API service call.
:param value: List of available endpoint services in a region.
:type value: list[~azure.mgmt.network.v2019_04_01.models.EndpointServiceResult]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[EndpointServiceResult]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EndpointServicesListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class Error(msrest.serialization.Model):
"""Common error representation.
:param code: Error code.
:type code: str
:param message: Error message.
:type message: str
:param target: Error target.
:type target: str
:param details: Error details.
:type details: list[~azure.mgmt.network.v2019_04_01.models.ErrorDetails]
:param inner_error: Inner error message.
:type inner_error: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[ErrorDetails]'},
'inner_error': {'key': 'innerError', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Error, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.inner_error = kwargs.get('inner_error', None)
class ErrorDetails(msrest.serialization.Model):
"""Common error details representation.
:param code: Error code.
:type code: str
:param target: Error target.
:type target: str
:param message: Error message.
:type message: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorDetails, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.target = kwargs.get('target', None)
self.message = kwargs.get('message', None)
class ErrorResponse(msrest.serialization.Model):
"""The error object.
:param error: The error details object.
:type error: ~azure.mgmt.network.v2019_04_01.models.ErrorDetails
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDetails'},
}
def __init__(
self,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
class EvaluatedNetworkSecurityGroup(msrest.serialization.Model):
"""Results of network security group evaluation.
Variables are only populated by the server, and will be ignored when sending a request.
:param network_security_group_id: Network security group ID.
:type network_security_group_id: str
:param applied_to: Resource ID of nic or subnet to which network security group is applied.
:type applied_to: str
:param matched_rule: Matched network security rule.
:type matched_rule: ~azure.mgmt.network.v2019_04_01.models.MatchedRule
:ivar rules_evaluation_result: List of network security rules evaluation results.
:vartype rules_evaluation_result:
list[~azure.mgmt.network.v2019_04_01.models.NetworkSecurityRulesEvaluationResult]
"""
_validation = {
'rules_evaluation_result': {'readonly': True},
}
_attribute_map = {
'network_security_group_id': {'key': 'networkSecurityGroupId', 'type': 'str'},
'applied_to': {'key': 'appliedTo', 'type': 'str'},
'matched_rule': {'key': 'matchedRule', 'type': 'MatchedRule'},
'rules_evaluation_result': {'key': 'rulesEvaluationResult', 'type': '[NetworkSecurityRulesEvaluationResult]'},
}
def __init__(
self,
**kwargs
):
super(EvaluatedNetworkSecurityGroup, self).__init__(**kwargs)
self.network_security_group_id = kwargs.get('network_security_group_id', None)
self.applied_to = kwargs.get('applied_to', None)
self.matched_rule = kwargs.get('matched_rule', None)
self.rules_evaluation_result = None
class ExpressRouteCircuit(Resource):
"""ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The SKU.
:type sku: ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitSku
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param allow_classic_operations: Allow classic operations.
:type allow_classic_operations: bool
:param circuit_provisioning_state: The CircuitProvisioningState state of the resource.
:type circuit_provisioning_state: str
:param service_provider_provisioning_state: The ServiceProviderProvisioningState state of the
resource. Possible values include: "NotProvisioned", "Provisioning", "Provisioned",
"Deprovisioning".
:type service_provider_provisioning_state: str or
~azure.mgmt.network.v2019_04_01.models.ServiceProviderProvisioningState
:param authorizations: The list of authorizations.
:type authorizations:
list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitAuthorization]
:param peerings: The list of peerings.
:type peerings: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeering]
:param service_key: The ServiceKey.
:type service_key: str
:param service_provider_notes: The ServiceProviderNotes.
:type service_provider_notes: str
:param service_provider_properties: The ServiceProviderProperties.
:type service_provider_properties:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitServiceProviderProperties
:param express_route_port: The reference to the ExpressRoutePort resource when the circuit is
provisioned on an ExpressRoutePort resource.
:type express_route_port: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param bandwidth_in_gbps: The bandwidth of the circuit when the circuit is provisioned on an
ExpressRoutePort resource.
:type bandwidth_in_gbps: float
:ivar stag: The identifier of the circuit traffic. Outer tag for QinQ encapsulation.
:vartype stag: int
:param provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param gateway_manager_etag: The GatewayManager Etag.
:type gateway_manager_etag: str
:param global_reach_enabled: Flag denoting Global reach status.
:type global_reach_enabled: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'stag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'ExpressRouteCircuitSku'},
'etag': {'key': 'etag', 'type': 'str'},
'allow_classic_operations': {'key': 'properties.allowClassicOperations', 'type': 'bool'},
'circuit_provisioning_state': {'key': 'properties.circuitProvisioningState', 'type': 'str'},
'service_provider_provisioning_state': {'key': 'properties.serviceProviderProvisioningState', 'type': 'str'},
'authorizations': {'key': 'properties.authorizations', 'type': '[ExpressRouteCircuitAuthorization]'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'},
'service_key': {'key': 'properties.serviceKey', 'type': 'str'},
'service_provider_notes': {'key': 'properties.serviceProviderNotes', 'type': 'str'},
'service_provider_properties': {'key': 'properties.serviceProviderProperties', 'type': 'ExpressRouteCircuitServiceProviderProperties'},
'express_route_port': {'key': 'properties.expressRoutePort', 'type': 'SubResource'},
'bandwidth_in_gbps': {'key': 'properties.bandwidthInGbps', 'type': 'float'},
'stag': {'key': 'properties.stag', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'gateway_manager_etag': {'key': 'properties.gatewayManagerEtag', 'type': 'str'},
'global_reach_enabled': {'key': 'properties.globalReachEnabled', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuit, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = None
self.allow_classic_operations = kwargs.get('allow_classic_operations', None)
self.circuit_provisioning_state = kwargs.get('circuit_provisioning_state', None)
self.service_provider_provisioning_state = kwargs.get('service_provider_provisioning_state', None)
self.authorizations = kwargs.get('authorizations', None)
self.peerings = kwargs.get('peerings', None)
self.service_key = kwargs.get('service_key', None)
self.service_provider_notes = kwargs.get('service_provider_notes', None)
self.service_provider_properties = kwargs.get('service_provider_properties', None)
self.express_route_port = kwargs.get('express_route_port', None)
self.bandwidth_in_gbps = kwargs.get('bandwidth_in_gbps', None)
self.stag = None
self.provisioning_state = kwargs.get('provisioning_state', None)
self.gateway_manager_etag = kwargs.get('gateway_manager_etag', None)
self.global_reach_enabled = kwargs.get('global_reach_enabled', None)
class ExpressRouteCircuitArpTable(msrest.serialization.Model):
"""The ARP table associated with the ExpressRouteCircuit.
:param age: Entry age in minutes.
:type age: int
:param interface: Interface address.
:type interface: str
:param ip_address: The IP address.
:type ip_address: str
:param mac_address: The MAC address.
:type mac_address: str
"""
_attribute_map = {
'age': {'key': 'age', 'type': 'int'},
'interface': {'key': 'interface', 'type': 'str'},
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'mac_address': {'key': 'macAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitArpTable, self).__init__(**kwargs)
self.age = kwargs.get('age', None)
self.interface = kwargs.get('interface', None)
self.ip_address = kwargs.get('ip_address', None)
self.mac_address = kwargs.get('mac_address', None)
class ExpressRouteCircuitAuthorization(SubResource):
"""Authorization in an ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param authorization_key: The authorization key.
:type authorization_key: str
:param authorization_use_status: The authorization use status. Possible values include:
"Available", "InUse".
:type authorization_use_status: str or
~azure.mgmt.network.v2019_04_01.models.AuthorizationUseStatus
:param provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'authorization_use_status': {'key': 'properties.authorizationUseStatus', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitAuthorization, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.authorization_key = kwargs.get('authorization_key', None)
self.authorization_use_status = kwargs.get('authorization_use_status', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ExpressRouteCircuitConnection(SubResource):
"""Express Route Circuit Connection in an ExpressRouteCircuitPeering resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the circuit initiating connection.
:type express_route_circuit_peering: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param peer_express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the peered circuit.
:type peer_express_route_circuit_peering: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param address_prefix: /29 IP address space to carve out Customer addresses for tunnels.
:type address_prefix: str
:param authorization_key: The authorization key.
:type authorization_key: str
:ivar circuit_connection_status: Express Route Circuit connection state. Possible values
include: "Connected", "Connecting", "Disconnected".
:vartype circuit_connection_status: str or
~azure.mgmt.network.v2019_04_01.models.CircuitConnectionStatus
:ivar provisioning_state: Provisioning state of the circuit connection resource. Possible
values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'circuit_connection_status': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'express_route_circuit_peering': {'key': 'properties.expressRouteCircuitPeering', 'type': 'SubResource'},
'peer_express_route_circuit_peering': {'key': 'properties.peerExpressRouteCircuitPeering', 'type': 'SubResource'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'circuit_connection_status': {'key': 'properties.circuitConnectionStatus', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.express_route_circuit_peering = kwargs.get('express_route_circuit_peering', None)
self.peer_express_route_circuit_peering = kwargs.get('peer_express_route_circuit_peering', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.authorization_key = kwargs.get('authorization_key', None)
self.circuit_connection_status = None
self.provisioning_state = None
class ExpressRouteCircuitConnectionListResult(msrest.serialization.Model):
"""Response for ListConnections API service call retrieves all global reach connections that belongs to a Private Peering for an ExpressRouteCircuit.
:param value: The global reach connection associated with Private Peering in an ExpressRoute
Circuit.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitConnection]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitListResult(msrest.serialization.Model):
"""Response for ListExpressRouteCircuit API service call.
:param value: A list of ExpressRouteCircuits in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuit]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuit]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitPeering(SubResource):
"""Peering in an ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param peering_type: The peering type. Possible values include: "AzurePublicPeering",
"AzurePrivatePeering", "MicrosoftPeering".
:type peering_type: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRoutePeeringType
:param state: The peering state. Possible values include: "Disabled", "Enabled".
:type state: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRoutePeeringState
:param azure_asn: The Azure ASN.
:type azure_asn: int
:param peer_asn: The peer ASN.
:type peer_asn: long
:param primary_peer_address_prefix: The primary address prefix.
:type primary_peer_address_prefix: str
:param secondary_peer_address_prefix: The secondary address prefix.
:type secondary_peer_address_prefix: str
:param primary_azure_port: The primary port.
:type primary_azure_port: str
:param secondary_azure_port: The secondary port.
:type secondary_azure_port: str
:param shared_key: The shared key.
:type shared_key: str
:param vlan_id: The VLAN ID.
:type vlan_id: int
:param microsoft_peering_config: The Microsoft peering configuration.
:type microsoft_peering_config:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeeringConfig
:param stats: Gets peering stats.
:type stats: ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitStats
:param provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param gateway_manager_etag: The GatewayManager Etag.
:type gateway_manager_etag: str
:param last_modified_by: Gets whether the provider or the customer last modified the peering.
:type last_modified_by: str
:param route_filter: The reference of the RouteFilter resource.
:type route_filter: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param ipv6_peering_config: The IPv6 peering configuration.
:type ipv6_peering_config:
~azure.mgmt.network.v2019_04_01.models.Ipv6ExpressRouteCircuitPeeringConfig
:param express_route_connection: The ExpressRoute connection.
:type express_route_connection: ~azure.mgmt.network.v2019_04_01.models.ExpressRouteConnectionId
:param connections: The list of circuit connections associated with Azure Private Peering for
this circuit.
:type connections: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitConnection]
:ivar peered_connections: The list of peered circuit connections associated with Azure Private
Peering for this circuit.
:vartype peered_connections:
list[~azure.mgmt.network.v2019_04_01.models.PeerExpressRouteCircuitConnection]
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'peer_asn': {'maximum': 4294967295, 'minimum': 1},
'peered_connections': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'peering_type': {'key': 'properties.peeringType', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'azure_asn': {'key': 'properties.azureASN', 'type': 'int'},
'peer_asn': {'key': 'properties.peerASN', 'type': 'long'},
'primary_peer_address_prefix': {'key': 'properties.primaryPeerAddressPrefix', 'type': 'str'},
'secondary_peer_address_prefix': {'key': 'properties.secondaryPeerAddressPrefix', 'type': 'str'},
'primary_azure_port': {'key': 'properties.primaryAzurePort', 'type': 'str'},
'secondary_azure_port': {'key': 'properties.secondaryAzurePort', 'type': 'str'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'vlan_id': {'key': 'properties.vlanId', 'type': 'int'},
'microsoft_peering_config': {'key': 'properties.microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'},
'stats': {'key': 'properties.stats', 'type': 'ExpressRouteCircuitStats'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'gateway_manager_etag': {'key': 'properties.gatewayManagerEtag', 'type': 'str'},
'last_modified_by': {'key': 'properties.lastModifiedBy', 'type': 'str'},
'route_filter': {'key': 'properties.routeFilter', 'type': 'SubResource'},
'ipv6_peering_config': {'key': 'properties.ipv6PeeringConfig', 'type': 'Ipv6ExpressRouteCircuitPeeringConfig'},
'express_route_connection': {'key': 'properties.expressRouteConnection', 'type': 'ExpressRouteConnectionId'},
'connections': {'key': 'properties.connections', 'type': '[ExpressRouteCircuitConnection]'},
'peered_connections': {'key': 'properties.peeredConnections', 'type': '[PeerExpressRouteCircuitConnection]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeering, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.peering_type = kwargs.get('peering_type', None)
self.state = kwargs.get('state', None)
self.azure_asn = kwargs.get('azure_asn', None)
self.peer_asn = kwargs.get('peer_asn', None)
self.primary_peer_address_prefix = kwargs.get('primary_peer_address_prefix', None)
self.secondary_peer_address_prefix = kwargs.get('secondary_peer_address_prefix', None)
self.primary_azure_port = kwargs.get('primary_azure_port', None)
self.secondary_azure_port = kwargs.get('secondary_azure_port', None)
self.shared_key = kwargs.get('shared_key', None)
self.vlan_id = kwargs.get('vlan_id', None)
self.microsoft_peering_config = kwargs.get('microsoft_peering_config', None)
self.stats = kwargs.get('stats', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.gateway_manager_etag = kwargs.get('gateway_manager_etag', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.route_filter = kwargs.get('route_filter', None)
self.ipv6_peering_config = kwargs.get('ipv6_peering_config', None)
self.express_route_connection = kwargs.get('express_route_connection', None)
self.connections = kwargs.get('connections', None)
self.peered_connections = None
class ExpressRouteCircuitPeeringConfig(msrest.serialization.Model):
"""Specifies the peering configuration.
:param advertised_public_prefixes: The reference of AdvertisedPublicPrefixes.
:type advertised_public_prefixes: list[str]
:param advertised_communities: The communities of bgp peering. Specified for microsoft peering.
:type advertised_communities: list[str]
:param advertised_public_prefixes_state: The advertised public prefix state of the Peering
resource. Possible values include: "NotConfigured", "Configuring", "Configured",
"ValidationNeeded".
:type advertised_public_prefixes_state: str or
~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState
:param legacy_mode: The legacy mode of the peering.
:type legacy_mode: int
:param customer_asn: The CustomerASN of the peering.
:type customer_asn: int
:param routing_registry_name: The RoutingRegistryName of the configuration.
:type routing_registry_name: str
"""
_attribute_map = {
'advertised_public_prefixes': {'key': 'advertisedPublicPrefixes', 'type': '[str]'},
'advertised_communities': {'key': 'advertisedCommunities', 'type': '[str]'},
'advertised_public_prefixes_state': {'key': 'advertisedPublicPrefixesState', 'type': 'str'},
'legacy_mode': {'key': 'legacyMode', 'type': 'int'},
'customer_asn': {'key': 'customerASN', 'type': 'int'},
'routing_registry_name': {'key': 'routingRegistryName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeeringConfig, self).__init__(**kwargs)
self.advertised_public_prefixes = kwargs.get('advertised_public_prefixes', None)
self.advertised_communities = kwargs.get('advertised_communities', None)
self.advertised_public_prefixes_state = kwargs.get('advertised_public_prefixes_state', None)
self.legacy_mode = kwargs.get('legacy_mode', None)
self.customer_asn = kwargs.get('customer_asn', None)
self.routing_registry_name = kwargs.get('routing_registry_name', None)
class ExpressRouteCircuitPeeringId(msrest.serialization.Model):
"""ExpressRoute circuit peering identifier.
:param id: The ID of the ExpressRoute circuit peering.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeeringId, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ExpressRouteCircuitPeeringListResult(msrest.serialization.Model):
"""Response for ListPeering API service call retrieves all peerings that belong to an ExpressRouteCircuit.
:param value: The peerings in an express route circuit.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeering]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitPeering]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeeringListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitReference(msrest.serialization.Model):
"""Reference to an express route circuit.
:param id: Corresponding Express Route Circuit Id.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ExpressRouteCircuitRoutesTable(msrest.serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:param network: IP address of a network entity.
:type network: str
:param next_hop: NextHop address.
:type next_hop: str
:param loc_prf: Local preference value as set with the set local-preference route-map
configuration command.
:type loc_prf: str
:param weight: Route Weight.
:type weight: int
:param path: Autonomous system paths to the destination network.
:type path: str
"""
_attribute_map = {
'network': {'key': 'network', 'type': 'str'},
'next_hop': {'key': 'nextHop', 'type': 'str'},
'loc_prf': {'key': 'locPrf', 'type': 'str'},
'weight': {'key': 'weight', 'type': 'int'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitRoutesTable, self).__init__(**kwargs)
self.network = kwargs.get('network', None)
self.next_hop = kwargs.get('next_hop', None)
self.loc_prf = kwargs.get('loc_prf', None)
self.weight = kwargs.get('weight', None)
self.path = kwargs.get('path', None)
class ExpressRouteCircuitRoutesTableSummary(msrest.serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:param neighbor: IP address of the neighbor.
:type neighbor: str
:param v: BGP version number spoken to the neighbor.
:type v: int
:param as_property: Autonomous system number.
:type as_property: int
:param up_down: The length of time that the BGP session has been in the Established state, or
the current status if not in the Established state.
:type up_down: str
:param state_pfx_rcd: Current state of the BGP session, and the number of prefixes that have
been received from a neighbor or peer group.
:type state_pfx_rcd: str
"""
_attribute_map = {
'neighbor': {'key': 'neighbor', 'type': 'str'},
'v': {'key': 'v', 'type': 'int'},
'as_property': {'key': 'as', 'type': 'int'},
'up_down': {'key': 'upDown', 'type': 'str'},
'state_pfx_rcd': {'key': 'statePfxRcd', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitRoutesTableSummary, self).__init__(**kwargs)
self.neighbor = kwargs.get('neighbor', None)
self.v = kwargs.get('v', None)
self.as_property = kwargs.get('as_property', None)
self.up_down = kwargs.get('up_down', None)
self.state_pfx_rcd = kwargs.get('state_pfx_rcd', None)
class ExpressRouteCircuitsArpTableListResult(msrest.serialization.Model):
"""Response for ListArpTable associated with the Express Route Circuits API.
:param value: Gets list of the ARP table.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitArpTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitArpTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}<|fim▁hole|> **kwargs
):
super(ExpressRouteCircuitsArpTableListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitServiceProviderProperties(msrest.serialization.Model):
"""Contains ServiceProviderProperties in an ExpressRouteCircuit.
:param service_provider_name: The serviceProviderName.
:type service_provider_name: str
:param peering_location: The peering location.
:type peering_location: str
:param bandwidth_in_mbps: The BandwidthInMbps.
:type bandwidth_in_mbps: int
"""
_attribute_map = {
'service_provider_name': {'key': 'serviceProviderName', 'type': 'str'},
'peering_location': {'key': 'peeringLocation', 'type': 'str'},
'bandwidth_in_mbps': {'key': 'bandwidthInMbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitServiceProviderProperties, self).__init__(**kwargs)
self.service_provider_name = kwargs.get('service_provider_name', None)
self.peering_location = kwargs.get('peering_location', None)
self.bandwidth_in_mbps = kwargs.get('bandwidth_in_mbps', None)
class ExpressRouteCircuitSku(msrest.serialization.Model):
"""Contains SKU in an ExpressRouteCircuit.
:param name: The name of the SKU.
:type name: str
:param tier: The tier of the SKU. Possible values include: "Standard", "Premium", "Basic",
"Local".
:type tier: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitSkuTier
:param family: The family of the SKU. Possible values include: "UnlimitedData", "MeteredData".
:type family: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitSkuFamily
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'family': {'key': 'family', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.family = kwargs.get('family', None)
class ExpressRouteCircuitsRoutesTableListResult(msrest.serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Circuits API.
:param value: The list of routes table.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitRoutesTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitRoutesTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitsRoutesTableListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitsRoutesTableSummaryListResult(msrest.serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Circuits API.
:param value: A list of the routes table.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitRoutesTableSummary]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitRoutesTableSummary]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitsRoutesTableSummaryListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitStats(msrest.serialization.Model):
"""Contains stats associated with the peering.
:param primarybytes_in: Gets BytesIn of the peering.
:type primarybytes_in: long
:param primarybytes_out: Gets BytesOut of the peering.
:type primarybytes_out: long
:param secondarybytes_in: Gets BytesIn of the peering.
:type secondarybytes_in: long
:param secondarybytes_out: Gets BytesOut of the peering.
:type secondarybytes_out: long
"""
_attribute_map = {
'primarybytes_in': {'key': 'primarybytesIn', 'type': 'long'},
'primarybytes_out': {'key': 'primarybytesOut', 'type': 'long'},
'secondarybytes_in': {'key': 'secondarybytesIn', 'type': 'long'},
'secondarybytes_out': {'key': 'secondarybytesOut', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitStats, self).__init__(**kwargs)
self.primarybytes_in = kwargs.get('primarybytes_in', None)
self.primarybytes_out = kwargs.get('primarybytes_out', None)
self.secondarybytes_in = kwargs.get('secondarybytes_in', None)
self.secondarybytes_out = kwargs.get('secondarybytes_out', None)
class ExpressRouteConnection(SubResource):
"""ExpressRouteConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param name: Required. The name of the resource.
:type name: str
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param express_route_circuit_peering: The ExpressRoute circuit peering.
:type express_route_circuit_peering:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeeringId
:param authorization_key: Authorization key to establish the connection.
:type authorization_key: str
:param routing_weight: The routing weight associated to the connection.
:type routing_weight: int
"""
_validation = {
'name': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'express_route_circuit_peering': {'key': 'properties.expressRouteCircuitPeering', 'type': 'ExpressRouteCircuitPeeringId'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteConnection, self).__init__(**kwargs)
self.name = kwargs['name']
self.provisioning_state = None
self.express_route_circuit_peering = kwargs.get('express_route_circuit_peering', None)
self.authorization_key = kwargs.get('authorization_key', None)
self.routing_weight = kwargs.get('routing_weight', None)
class ExpressRouteConnectionId(msrest.serialization.Model):
"""The ID of the ExpressRouteConnection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the ExpressRouteConnection.
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteConnectionId, self).__init__(**kwargs)
self.id = None
class ExpressRouteConnectionList(msrest.serialization.Model):
"""ExpressRouteConnection list.
:param value: The list of ExpressRoute connections.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteConnection]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteConnection]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteConnectionList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ExpressRouteCrossConnection(Resource):
"""ExpressRouteCrossConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar primary_azure_port: The name of the primary port.
:vartype primary_azure_port: str
:ivar secondary_azure_port: The name of the secondary port.
:vartype secondary_azure_port: str
:ivar s_tag: The identifier of the circuit traffic.
:vartype s_tag: int
:param peering_location: The peering location of the ExpressRoute circuit.
:type peering_location: str
:param bandwidth_in_mbps: The circuit bandwidth In Mbps.
:type bandwidth_in_mbps: int
:param express_route_circuit: The ExpressRouteCircuit.
:type express_route_circuit:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitReference
:param service_provider_provisioning_state: The provisioning state of the circuit in the
connectivity provider system. Possible values include: "NotProvisioned", "Provisioning",
"Provisioned", "Deprovisioning".
:type service_provider_provisioning_state: str or
~azure.mgmt.network.v2019_04_01.models.ServiceProviderProvisioningState
:param service_provider_notes: Additional read only notes set by the connectivity provider.
:type service_provider_notes: str
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param peerings: The list of peerings.
:type peerings: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCrossConnectionPeering]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'primary_azure_port': {'readonly': True},
'secondary_azure_port': {'readonly': True},
's_tag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'primary_azure_port': {'key': 'properties.primaryAzurePort', 'type': 'str'},
'secondary_azure_port': {'key': 'properties.secondaryAzurePort', 'type': 'str'},
's_tag': {'key': 'properties.sTag', 'type': 'int'},
'peering_location': {'key': 'properties.peeringLocation', 'type': 'str'},
'bandwidth_in_mbps': {'key': 'properties.bandwidthInMbps', 'type': 'int'},
'express_route_circuit': {'key': 'properties.expressRouteCircuit', 'type': 'ExpressRouteCircuitReference'},
'service_provider_provisioning_state': {'key': 'properties.serviceProviderProvisioningState', 'type': 'str'},
'service_provider_notes': {'key': 'properties.serviceProviderNotes', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCrossConnectionPeering]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnection, self).__init__(**kwargs)
self.etag = None
self.primary_azure_port = None
self.secondary_azure_port = None
self.s_tag = None
self.peering_location = kwargs.get('peering_location', None)
self.bandwidth_in_mbps = kwargs.get('bandwidth_in_mbps', None)
self.express_route_circuit = kwargs.get('express_route_circuit', None)
self.service_provider_provisioning_state = kwargs.get('service_provider_provisioning_state', None)
self.service_provider_notes = kwargs.get('service_provider_notes', None)
self.provisioning_state = None
self.peerings = kwargs.get('peerings', None)
class ExpressRouteCrossConnectionListResult(msrest.serialization.Model):
"""Response for ListExpressRouteCrossConnection API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of ExpressRouteCrossConnection resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCrossConnection]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCrossConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ExpressRouteCrossConnectionPeering(SubResource):
"""Peering in an ExpressRoute Cross Connection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param peering_type: The peering type. Possible values include: "AzurePublicPeering",
"AzurePrivatePeering", "MicrosoftPeering".
:type peering_type: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRoutePeeringType
:param state: The peering state. Possible values include: "Disabled", "Enabled".
:type state: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRoutePeeringState
:ivar azure_asn: The Azure ASN.
:vartype azure_asn: int
:param peer_asn: The peer ASN.
:type peer_asn: long
:param primary_peer_address_prefix: The primary address prefix.
:type primary_peer_address_prefix: str
:param secondary_peer_address_prefix: The secondary address prefix.
:type secondary_peer_address_prefix: str
:ivar primary_azure_port: The primary port.
:vartype primary_azure_port: str
:ivar secondary_azure_port: The secondary port.
:vartype secondary_azure_port: str
:param shared_key: The shared key.
:type shared_key: str
:param vlan_id: The VLAN ID.
:type vlan_id: int
:param microsoft_peering_config: The Microsoft peering configuration.
:type microsoft_peering_config:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeeringConfig
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param gateway_manager_etag: The GatewayManager Etag.
:type gateway_manager_etag: str
:param last_modified_by: Gets whether the provider or the customer last modified the peering.
:type last_modified_by: str
:param ipv6_peering_config: The IPv6 peering configuration.
:type ipv6_peering_config:
~azure.mgmt.network.v2019_04_01.models.Ipv6ExpressRouteCircuitPeeringConfig
"""
_validation = {
'etag': {'readonly': True},
'azure_asn': {'readonly': True},
'peer_asn': {'maximum': 4294967295, 'minimum': 1},
'primary_azure_port': {'readonly': True},
'secondary_azure_port': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'peering_type': {'key': 'properties.peeringType', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'azure_asn': {'key': 'properties.azureASN', 'type': 'int'},
'peer_asn': {'key': 'properties.peerASN', 'type': 'long'},
'primary_peer_address_prefix': {'key': 'properties.primaryPeerAddressPrefix', 'type': 'str'},
'secondary_peer_address_prefix': {'key': 'properties.secondaryPeerAddressPrefix', 'type': 'str'},
'primary_azure_port': {'key': 'properties.primaryAzurePort', 'type': 'str'},
'secondary_azure_port': {'key': 'properties.secondaryAzurePort', 'type': 'str'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'vlan_id': {'key': 'properties.vlanId', 'type': 'int'},
'microsoft_peering_config': {'key': 'properties.microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'gateway_manager_etag': {'key': 'properties.gatewayManagerEtag', 'type': 'str'},
'last_modified_by': {'key': 'properties.lastModifiedBy', 'type': 'str'},
'ipv6_peering_config': {'key': 'properties.ipv6PeeringConfig', 'type': 'Ipv6ExpressRouteCircuitPeeringConfig'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionPeering, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.peering_type = kwargs.get('peering_type', None)
self.state = kwargs.get('state', None)
self.azure_asn = None
self.peer_asn = kwargs.get('peer_asn', None)
self.primary_peer_address_prefix = kwargs.get('primary_peer_address_prefix', None)
self.secondary_peer_address_prefix = kwargs.get('secondary_peer_address_prefix', None)
self.primary_azure_port = None
self.secondary_azure_port = None
self.shared_key = kwargs.get('shared_key', None)
self.vlan_id = kwargs.get('vlan_id', None)
self.microsoft_peering_config = kwargs.get('microsoft_peering_config', None)
self.provisioning_state = None
self.gateway_manager_etag = kwargs.get('gateway_manager_etag', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.ipv6_peering_config = kwargs.get('ipv6_peering_config', None)
class ExpressRouteCrossConnectionPeeringList(msrest.serialization.Model):
"""Response for ListPeering API service call retrieves all peerings that belong to an ExpressRouteCrossConnection.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: The peerings in an express route cross connection.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCrossConnectionPeering]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCrossConnectionPeering]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionPeeringList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ExpressRouteCrossConnectionRoutesTableSummary(msrest.serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:param neighbor: IP address of Neighbor router.
:type neighbor: str
:param asn: Autonomous system number.
:type asn: int
:param up_down: The length of time that the BGP session has been in the Established state, or
the current status if not in the Established state.
:type up_down: str
:param state_or_prefixes_received: Current state of the BGP session, and the number of prefixes
that have been received from a neighbor or peer group.
:type state_or_prefixes_received: str
"""
_attribute_map = {
'neighbor': {'key': 'neighbor', 'type': 'str'},
'asn': {'key': 'asn', 'type': 'int'},
'up_down': {'key': 'upDown', 'type': 'str'},
'state_or_prefixes_received': {'key': 'stateOrPrefixesReceived', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionRoutesTableSummary, self).__init__(**kwargs)
self.neighbor = kwargs.get('neighbor', None)
self.asn = kwargs.get('asn', None)
self.up_down = kwargs.get('up_down', None)
self.state_or_prefixes_received = kwargs.get('state_or_prefixes_received', None)
class ExpressRouteCrossConnectionsRoutesTableSummaryListResult(msrest.serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Cross Connections.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of the routes table.
:type value:
list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCrossConnectionRoutesTableSummary]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCrossConnectionRoutesTableSummary]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionsRoutesTableSummaryListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ExpressRouteGateway(Resource):
"""ExpressRoute gateway resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param auto_scale_configuration: Configuration for auto scaling.
:type auto_scale_configuration:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteGatewayPropertiesAutoScaleConfiguration
:ivar express_route_connections: List of ExpressRoute connections to the ExpressRoute gateway.
:vartype express_route_connections:
list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteConnection]
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param virtual_hub: The Virtual Hub where the ExpressRoute gateway is or will be deployed.
:type virtual_hub: ~azure.mgmt.network.v2019_04_01.models.VirtualHubId
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'express_route_connections': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'auto_scale_configuration': {'key': 'properties.autoScaleConfiguration', 'type': 'ExpressRouteGatewayPropertiesAutoScaleConfiguration'},
'express_route_connections': {'key': 'properties.expressRouteConnections', 'type': '[ExpressRouteConnection]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'VirtualHubId'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGateway, self).__init__(**kwargs)
self.etag = None
self.auto_scale_configuration = kwargs.get('auto_scale_configuration', None)
self.express_route_connections = None
self.provisioning_state = None
self.virtual_hub = kwargs.get('virtual_hub', None)
class ExpressRouteGatewayList(msrest.serialization.Model):
"""List of ExpressRoute gateways.
:param value: List of ExpressRoute gateways.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteGateway]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteGateway]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGatewayList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ExpressRouteGatewayPropertiesAutoScaleConfiguration(msrest.serialization.Model):
"""Configuration for auto scaling.
:param bounds: Minimum and maximum number of scale units to deploy.
:type bounds:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds
"""
_attribute_map = {
'bounds': {'key': 'bounds', 'type': 'ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGatewayPropertiesAutoScaleConfiguration, self).__init__(**kwargs)
self.bounds = kwargs.get('bounds', None)
class ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds(msrest.serialization.Model):
"""Minimum and maximum number of scale units to deploy.
:param min: Minimum number of scale units deployed for ExpressRoute gateway.
:type min: int
:param max: Maximum number of scale units deployed for ExpressRoute gateway.
:type max: int
"""
_attribute_map = {
'min': {'key': 'min', 'type': 'int'},
'max': {'key': 'max', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds, self).__init__(**kwargs)
self.min = kwargs.get('min', None)
self.max = kwargs.get('max', None)
class ExpressRouteLink(SubResource):
"""ExpressRouteLink child resource definition.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of child port resource that is unique among child port resources of the
parent.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar router_name: Name of Azure router associated with physical port.
:vartype router_name: str
:ivar interface_name: Name of Azure router interface.
:vartype interface_name: str
:ivar patch_panel_id: Mapping between physical port to patch panel port.
:vartype patch_panel_id: str
:ivar rack_id: Mapping of physical patch panel to rack.
:vartype rack_id: str
:ivar connector_type: Physical fiber port type. Possible values include: "LC", "SC".
:vartype connector_type: str or
~azure.mgmt.network.v2019_04_01.models.ExpressRouteLinkConnectorType
:param admin_state: Administrative state of the physical port. Possible values include:
"Enabled", "Disabled".
:type admin_state: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRouteLinkAdminState
:ivar provisioning_state: The provisioning state of the ExpressRouteLink resource. Possible
values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'etag': {'readonly': True},
'router_name': {'readonly': True},
'interface_name': {'readonly': True},
'patch_panel_id': {'readonly': True},
'rack_id': {'readonly': True},
'connector_type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'router_name': {'key': 'properties.routerName', 'type': 'str'},
'interface_name': {'key': 'properties.interfaceName', 'type': 'str'},
'patch_panel_id': {'key': 'properties.patchPanelId', 'type': 'str'},
'rack_id': {'key': 'properties.rackId', 'type': 'str'},
'connector_type': {'key': 'properties.connectorType', 'type': 'str'},
'admin_state': {'key': 'properties.adminState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteLink, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.router_name = None
self.interface_name = None
self.patch_panel_id = None
self.rack_id = None
self.connector_type = None
self.admin_state = kwargs.get('admin_state', None)
self.provisioning_state = None
class ExpressRouteLinkListResult(msrest.serialization.Model):
"""Response for ListExpressRouteLinks API service call.
:param value: The list of ExpressRouteLink sub-resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteLink]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteLinkListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRoutePort(Resource):
"""ExpressRoutePort resource definition.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param peering_location: The name of the peering location that the ExpressRoutePort is mapped
to physically.
:type peering_location: str
:param bandwidth_in_gbps: Bandwidth of procured ports in Gbps.
:type bandwidth_in_gbps: int
:ivar provisioned_bandwidth_in_gbps: Aggregate Gbps of associated circuit bandwidths.
:vartype provisioned_bandwidth_in_gbps: float
:ivar mtu: Maximum transmission unit of the physical port pair(s).
:vartype mtu: str
:param encapsulation: Encapsulation method on physical ports. Possible values include: "Dot1Q",
"QinQ".
:type encapsulation: str or
~azure.mgmt.network.v2019_04_01.models.ExpressRoutePortsEncapsulation
:ivar ether_type: Ether type of the physical port.
:vartype ether_type: str
:ivar allocation_date: Date of the physical port allocation to be used in Letter of
Authorization.
:vartype allocation_date: str
:param links: The set of physical links of the ExpressRoutePort resource.
:type links: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteLink]
:ivar circuits: Reference the ExpressRoute circuit(s) that are provisioned on this
ExpressRoutePort resource.
:vartype circuits: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the ExpressRoutePort resource. Possible
values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param resource_guid: The resource GUID property of the ExpressRoutePort resource.
:type resource_guid: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioned_bandwidth_in_gbps': {'readonly': True},
'mtu': {'readonly': True},
'ether_type': {'readonly': True},
'allocation_date': {'readonly': True},
'circuits': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'peering_location': {'key': 'properties.peeringLocation', 'type': 'str'},
'bandwidth_in_gbps': {'key': 'properties.bandwidthInGbps', 'type': 'int'},
'provisioned_bandwidth_in_gbps': {'key': 'properties.provisionedBandwidthInGbps', 'type': 'float'},
'mtu': {'key': 'properties.mtu', 'type': 'str'},
'encapsulation': {'key': 'properties.encapsulation', 'type': 'str'},
'ether_type': {'key': 'properties.etherType', 'type': 'str'},
'allocation_date': {'key': 'properties.allocationDate', 'type': 'str'},
'links': {'key': 'properties.links', 'type': '[ExpressRouteLink]'},
'circuits': {'key': 'properties.circuits', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePort, self).__init__(**kwargs)
self.etag = None
self.peering_location = kwargs.get('peering_location', None)
self.bandwidth_in_gbps = kwargs.get('bandwidth_in_gbps', None)
self.provisioned_bandwidth_in_gbps = None
self.mtu = None
self.encapsulation = kwargs.get('encapsulation', None)
self.ether_type = None
self.allocation_date = None
self.links = kwargs.get('links', None)
self.circuits = None
self.provisioning_state = None
self.resource_guid = kwargs.get('resource_guid', None)
class ExpressRoutePortListResult(msrest.serialization.Model):
"""Response for ListExpressRoutePorts API service call.
:param value: A list of ExpressRoutePort resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRoutePort]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRoutePort]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRoutePortsLocation(Resource):
"""Definition of the ExpressRoutePorts peering location resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar address: Address of peering location.
:vartype address: str
:ivar contact: Contact details of peering locations.
:vartype contact: str
:param available_bandwidths: The inventory of available ExpressRoutePort bandwidths.
:type available_bandwidths:
list[~azure.mgmt.network.v2019_04_01.models.ExpressRoutePortsLocationBandwidths]
:ivar provisioning_state: The provisioning state of the ExpressRoutePortLocation resource.
Possible values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'address': {'readonly': True},
'contact': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'address': {'key': 'properties.address', 'type': 'str'},
'contact': {'key': 'properties.contact', 'type': 'str'},
'available_bandwidths': {'key': 'properties.availableBandwidths', 'type': '[ExpressRoutePortsLocationBandwidths]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortsLocation, self).__init__(**kwargs)
self.address = None
self.contact = None
self.available_bandwidths = kwargs.get('available_bandwidths', None)
self.provisioning_state = None
class ExpressRoutePortsLocationBandwidths(msrest.serialization.Model):
"""Real-time inventory of available ExpressRoute port bandwidths.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar offer_name: Bandwidth descriptive name.
:vartype offer_name: str
:ivar value_in_gbps: Bandwidth value in Gbps.
:vartype value_in_gbps: int
"""
_validation = {
'offer_name': {'readonly': True},
'value_in_gbps': {'readonly': True},
}
_attribute_map = {
'offer_name': {'key': 'offerName', 'type': 'str'},
'value_in_gbps': {'key': 'valueInGbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortsLocationBandwidths, self).__init__(**kwargs)
self.offer_name = None
self.value_in_gbps = None
class ExpressRoutePortsLocationListResult(msrest.serialization.Model):
"""Response for ListExpressRoutePortsLocations API service call.
:param value: The list of all ExpressRoutePort peering locations.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRoutePortsLocation]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRoutePortsLocation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortsLocationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteServiceProvider(Resource):
"""A ExpressRouteResourceProvider object.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param peering_locations: Get a list of peering locations.
:type peering_locations: list[str]
:param bandwidths_offered: Gets bandwidths offered.
:type bandwidths_offered:
list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteServiceProviderBandwidthsOffered]
:param provisioning_state: Gets the provisioning state of the resource.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'peering_locations': {'key': 'properties.peeringLocations', 'type': '[str]'},
'bandwidths_offered': {'key': 'properties.bandwidthsOffered', 'type': '[ExpressRouteServiceProviderBandwidthsOffered]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteServiceProvider, self).__init__(**kwargs)
self.peering_locations = kwargs.get('peering_locations', None)
self.bandwidths_offered = kwargs.get('bandwidths_offered', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ExpressRouteServiceProviderBandwidthsOffered(msrest.serialization.Model):
"""Contains bandwidths offered in ExpressRouteServiceProvider resources.
:param offer_name: The OfferName.
:type offer_name: str
:param value_in_mbps: The ValueInMbps.
:type value_in_mbps: int
"""
_attribute_map = {
'offer_name': {'key': 'offerName', 'type': 'str'},
'value_in_mbps': {'key': 'valueInMbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteServiceProviderBandwidthsOffered, self).__init__(**kwargs)
self.offer_name = kwargs.get('offer_name', None)
self.value_in_mbps = kwargs.get('value_in_mbps', None)
class ExpressRouteServiceProviderListResult(msrest.serialization.Model):
"""Response for the ListExpressRouteServiceProvider API service call.
:param value: A list of ExpressRouteResourceProvider resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteServiceProvider]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteServiceProvider]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteServiceProviderListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class FlowLogFormatParameters(msrest.serialization.Model):
"""Parameters that define the flow log format.
:param type: The file type of flow log. Possible values include: "JSON".
:type type: str or ~azure.mgmt.network.v2019_04_01.models.FlowLogFormatType
:param version: The version (revision) of the flow log.
:type version: int
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(FlowLogFormatParameters, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', 0)
class FlowLogInformation(msrest.serialization.Model):
"""Information on the configuration of flow log and traffic analytics (optional) .
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The ID of the resource to configure for flow log and
traffic analytics (optional) .
:type target_resource_id: str
:param flow_analytics_configuration: Parameters that define the configuration of traffic
analytics.
:type flow_analytics_configuration:
~azure.mgmt.network.v2019_04_01.models.TrafficAnalyticsProperties
:param storage_id: Required. ID of the storage account which is used to store the flow log.
:type storage_id: str
:param enabled: Required. Flag to enable/disable flow logging.
:type enabled: bool
:param retention_policy: Parameters that define the retention policy for flow log.
:type retention_policy: ~azure.mgmt.network.v2019_04_01.models.RetentionPolicyParameters
:param format: Parameters that define the flow log format.
:type format: ~azure.mgmt.network.v2019_04_01.models.FlowLogFormatParameters
"""
_validation = {
'target_resource_id': {'required': True},
'storage_id': {'required': True},
'enabled': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'flow_analytics_configuration': {'key': 'flowAnalyticsConfiguration', 'type': 'TrafficAnalyticsProperties'},
'storage_id': {'key': 'properties.storageId', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'retention_policy': {'key': 'properties.retentionPolicy', 'type': 'RetentionPolicyParameters'},
'format': {'key': 'properties.format', 'type': 'FlowLogFormatParameters'},
}
def __init__(
self,
**kwargs
):
super(FlowLogInformation, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.flow_analytics_configuration = kwargs.get('flow_analytics_configuration', None)
self.storage_id = kwargs['storage_id']
self.enabled = kwargs['enabled']
self.retention_policy = kwargs.get('retention_policy', None)
self.format = kwargs.get('format', None)
class FlowLogStatusParameters(msrest.serialization.Model):
"""Parameters that define a resource to query flow log and traffic analytics (optional) status.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The target resource where getting the flow log and traffic
analytics (optional) status.
:type target_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FlowLogStatusParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
class FrontendIPConfiguration(SubResource):
"""Frontend IP address of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param zones: A list of availability zones denoting the IP allocated for the resource needs to
come from.
:type zones: list[str]
:ivar inbound_nat_rules: Read only. Inbound rules URIs that use this frontend IP.
:vartype inbound_nat_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar inbound_nat_pools: Read only. Inbound pools URIs that use this frontend IP.
:vartype inbound_nat_pools: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar outbound_rules: Read only. Outbound rules URIs that use this frontend IP.
:vartype outbound_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar load_balancing_rules: Gets load balancing rules URIs that use this frontend IP.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The Private IP allocation method. Possible values include:
"Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
:param private_ip_address_version: It represents whether the specific ipconfiguration is IPv4
or IPv6. Default is taken as IPv4. Possible values include: "IPv4", "IPv6".
:type private_ip_address_version: str or ~azure.mgmt.network.v2019_04_01.models.IPVersion
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.Subnet
:param public_ip_address: The reference of the Public IP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_04_01.models.PublicIPAddress
:param public_ip_prefix: The reference of the Public IP Prefix resource.
:type public_ip_prefix: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'inbound_nat_rules': {'readonly': True},
'inbound_nat_pools': {'readonly': True},
'outbound_rules': {'readonly': True},
'load_balancing_rules': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'inbound_nat_rules': {'key': 'properties.inboundNatRules', 'type': '[SubResource]'},
'inbound_nat_pools': {'key': 'properties.inboundNatPools', 'type': '[SubResource]'},
'outbound_rules': {'key': 'properties.outboundRules', 'type': '[SubResource]'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'private_ip_address_version': {'key': 'properties.privateIPAddressVersion', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'public_ip_prefix': {'key': 'properties.publicIPPrefix', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FrontendIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.zones = kwargs.get('zones', None)
self.inbound_nat_rules = None
self.inbound_nat_pools = None
self.outbound_rules = None
self.load_balancing_rules = None
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.private_ip_address_version = kwargs.get('private_ip_address_version', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.public_ip_prefix = kwargs.get('public_ip_prefix', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class GatewayRoute(msrest.serialization.Model):
"""Gateway routing details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The gateway's local address.
:vartype local_address: str
:ivar network: The route's network prefix.
:vartype network: str
:ivar next_hop: The route's next hop.
:vartype next_hop: str
:ivar source_peer: The peer this route was learned from.
:vartype source_peer: str
:ivar origin: The source this route was learned from.
:vartype origin: str
:ivar as_path: The route's AS path sequence.
:vartype as_path: str
:ivar weight: The route's weight.
:vartype weight: int
"""
_validation = {
'local_address': {'readonly': True},
'network': {'readonly': True},
'next_hop': {'readonly': True},
'source_peer': {'readonly': True},
'origin': {'readonly': True},
'as_path': {'readonly': True},
'weight': {'readonly': True},
}
_attribute_map = {
'local_address': {'key': 'localAddress', 'type': 'str'},
'network': {'key': 'network', 'type': 'str'},
'next_hop': {'key': 'nextHop', 'type': 'str'},
'source_peer': {'key': 'sourcePeer', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'as_path': {'key': 'asPath', 'type': 'str'},
'weight': {'key': 'weight', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(GatewayRoute, self).__init__(**kwargs)
self.local_address = None
self.network = None
self.next_hop = None
self.source_peer = None
self.origin = None
self.as_path = None
self.weight = None
class GatewayRouteListResult(msrest.serialization.Model):
"""List of virtual network gateway routes.
:param value: List of gateway routes.
:type value: list[~azure.mgmt.network.v2019_04_01.models.GatewayRoute]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[GatewayRoute]'},
}
def __init__(
self,
**kwargs
):
super(GatewayRouteListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class GetVpnSitesConfigurationRequest(msrest.serialization.Model):
"""List of Vpn-Sites.
All required parameters must be populated in order to send to Azure.
:param vpn_sites: List of resource-ids of the vpn-sites for which config is to be downloaded.
:type vpn_sites: list[str]
:param output_blob_sas_url: Required. The sas-url to download the configurations for vpn-sites.
:type output_blob_sas_url: str
"""
_validation = {
'output_blob_sas_url': {'required': True},
}
_attribute_map = {
'vpn_sites': {'key': 'vpnSites', 'type': '[str]'},
'output_blob_sas_url': {'key': 'outputBlobSasUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(GetVpnSitesConfigurationRequest, self).__init__(**kwargs)
self.vpn_sites = kwargs.get('vpn_sites', None)
self.output_blob_sas_url = kwargs['output_blob_sas_url']
class HTTPConfiguration(msrest.serialization.Model):
"""HTTP configuration of the connectivity check.
:param method: HTTP method. Possible values include: "Get".
:type method: str or ~azure.mgmt.network.v2019_04_01.models.HTTPMethod
:param headers: List of HTTP headers.
:type headers: list[~azure.mgmt.network.v2019_04_01.models.HTTPHeader]
:param valid_status_codes: Valid status codes.
:type valid_status_codes: list[int]
"""
_attribute_map = {
'method': {'key': 'method', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[HTTPHeader]'},
'valid_status_codes': {'key': 'validStatusCodes', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
super(HTTPConfiguration, self).__init__(**kwargs)
self.method = kwargs.get('method', None)
self.headers = kwargs.get('headers', None)
self.valid_status_codes = kwargs.get('valid_status_codes', None)
class HTTPHeader(msrest.serialization.Model):
"""Describes the HTTP header.
:param name: The name in HTTP header.
:type name: str
:param value: The value in HTTP header.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(HTTPHeader, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class HubVirtualNetworkConnection(SubResource):
"""HubVirtualNetworkConnection Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param remote_virtual_network: Reference to the remote virtual network.
:type remote_virtual_network: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param allow_hub_to_remote_vnet_transit: VirtualHub to RemoteVnet transit to enabled or not.
:type allow_hub_to_remote_vnet_transit: bool
:param allow_remote_vnet_to_use_hub_vnet_gateways: Allow RemoteVnet to use Virtual Hub's
gateways.
:type allow_remote_vnet_to_use_hub_vnet_gateways: bool
:param enable_internet_security: Enable internet security.
:type enable_internet_security: bool
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'allow_hub_to_remote_vnet_transit': {'key': 'properties.allowHubToRemoteVnetTransit', 'type': 'bool'},
'allow_remote_vnet_to_use_hub_vnet_gateways': {'key': 'properties.allowRemoteVnetToUseHubVnetGateways', 'type': 'bool'},
'enable_internet_security': {'key': 'properties.enableInternetSecurity', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(HubVirtualNetworkConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.remote_virtual_network = kwargs.get('remote_virtual_network', None)
self.allow_hub_to_remote_vnet_transit = kwargs.get('allow_hub_to_remote_vnet_transit', None)
self.allow_remote_vnet_to_use_hub_vnet_gateways = kwargs.get('allow_remote_vnet_to_use_hub_vnet_gateways', None)
self.enable_internet_security = kwargs.get('enable_internet_security', None)
self.provisioning_state = None
class InboundNatPool(SubResource):
"""Inbound NAT pool of the load balancer.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param frontend_ip_configuration: A reference to frontend IP addresses.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param protocol: The reference to the transport protocol used by the inbound NAT pool. Possible
values include: "Udp", "Tcp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.TransportProtocol
:param frontend_port_range_start: The first port number in the range of external ports that
will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values
range between 1 and 65534.
:type frontend_port_range_start: int
:param frontend_port_range_end: The last port number in the range of external ports that will
be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range
between 1 and 65535.
:type frontend_port_range_end: int
:param backend_port: The port used for internal connections on the endpoint. Acceptable values
are between 1 and 65535.
:type backend_port: int
:param idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:type idle_timeout_in_minutes: int
:param enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:type enable_floating_ip: bool
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:param provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'frontend_port_range_start': {'key': 'properties.frontendPortRangeStart', 'type': 'int'},
'frontend_port_range_end': {'key': 'properties.frontendPortRangeEnd', 'type': 'int'},
'backend_port': {'key': 'properties.backendPort', 'type': 'int'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'enable_floating_ip': {'key': 'properties.enableFloatingIP', 'type': 'bool'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(InboundNatPool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.protocol = kwargs.get('protocol', None)
self.frontend_port_range_start = kwargs.get('frontend_port_range_start', None)
self.frontend_port_range_end = kwargs.get('frontend_port_range_end', None)
self.backend_port = kwargs.get('backend_port', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.enable_floating_ip = kwargs.get('enable_floating_ip', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class InboundNatRule(SubResource):
"""Inbound NAT rule of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param frontend_ip_configuration: A reference to frontend IP addresses.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:ivar backend_ip_configuration: A reference to a private IP address defined on a network
interface of a VM. Traffic sent to the frontend port of each of the frontend IP configurations
is forwarded to the backend IP.
:vartype backend_ip_configuration:
~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceIPConfiguration
:param protocol: The reference to the transport protocol used by the load balancing rule.
Possible values include: "Udp", "Tcp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.TransportProtocol
:param frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values range from 1 to 65534.
:type frontend_port: int
:param backend_port: The port used for the internal endpoint. Acceptable values range from 1 to
65535.
:type backend_port: int
:param idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:type idle_timeout_in_minutes: int
:param enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:type enable_floating_ip: bool
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:param provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'backend_ip_configuration': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'backend_ip_configuration': {'key': 'properties.backendIPConfiguration', 'type': 'NetworkInterfaceIPConfiguration'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'frontend_port': {'key': 'properties.frontendPort', 'type': 'int'},
'backend_port': {'key': 'properties.backendPort', 'type': 'int'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'enable_floating_ip': {'key': 'properties.enableFloatingIP', 'type': 'bool'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(InboundNatRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.backend_ip_configuration = None
self.protocol = kwargs.get('protocol', None)
self.frontend_port = kwargs.get('frontend_port', None)
self.backend_port = kwargs.get('backend_port', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.enable_floating_ip = kwargs.get('enable_floating_ip', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class InboundNatRuleListResult(msrest.serialization.Model):
"""Response for ListInboundNatRule API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of inbound nat rules in a load balancer.
:type value: list[~azure.mgmt.network.v2019_04_01.models.InboundNatRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[InboundNatRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(InboundNatRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class IPAddressAvailabilityResult(msrest.serialization.Model):
"""Response for CheckIPAddressAvailability API service call.
:param available: Private IP address availability.
:type available: bool
:param available_ip_addresses: Contains other available private IP addresses if the asked for
address is taken.
:type available_ip_addresses: list[str]
"""
_attribute_map = {
'available': {'key': 'available', 'type': 'bool'},
'available_ip_addresses': {'key': 'availableIPAddresses', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(IPAddressAvailabilityResult, self).__init__(**kwargs)
self.available = kwargs.get('available', None)
self.available_ip_addresses = kwargs.get('available_ip_addresses', None)
class IPConfiguration(SubResource):
"""IP configuration.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.Subnet
:param public_ip_address: The reference of the public IP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_04_01.models.PublicIPAddress
:param provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class IPConfigurationProfile(SubResource):
"""IP configuration profile child resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param subnet: The reference of the subnet resource to create a container network interface ip
configuration.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.Subnet
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IPConfigurationProfile, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = kwargs.get('etag', None)
self.subnet = kwargs.get('subnet', None)
self.provisioning_state = None
class IpsecPolicy(msrest.serialization.Model):
"""An IPSec Policy configuration for a virtual network gateway connection.
All required parameters must be populated in order to send to Azure.
:param sa_life_time_seconds: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) lifetime in seconds for a site to site VPN tunnel.
:type sa_life_time_seconds: int
:param sa_data_size_kilobytes: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) payload size in KB for a site to site VPN tunnel.
:type sa_data_size_kilobytes: int
:param ipsec_encryption: Required. The IPSec encryption algorithm (IKE phase 1). Possible
values include: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192",
"GCMAES256".
:type ipsec_encryption: str or ~azure.mgmt.network.v2019_04_01.models.IpsecEncryption
:param ipsec_integrity: Required. The IPSec integrity algorithm (IKE phase 1). Possible values
include: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", "GCMAES256".
:type ipsec_integrity: str or ~azure.mgmt.network.v2019_04_01.models.IpsecIntegrity
:param ike_encryption: Required. The IKE encryption algorithm (IKE phase 2). Possible values
include: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", "GCMAES128".
:type ike_encryption: str or ~azure.mgmt.network.v2019_04_01.models.IkeEncryption
:param ike_integrity: Required. The IKE integrity algorithm (IKE phase 2). Possible values
include: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", "GCMAES128".
:type ike_integrity: str or ~azure.mgmt.network.v2019_04_01.models.IkeIntegrity
:param dh_group: Required. The DH Group used in IKE Phase 1 for initial SA. Possible values
include: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384",
"DHGroup24".
:type dh_group: str or ~azure.mgmt.network.v2019_04_01.models.DhGroup
:param pfs_group: Required. The Pfs Group used in IKE Phase 2 for new child SA. Possible values
include: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", "PFSMM".
:type pfs_group: str or ~azure.mgmt.network.v2019_04_01.models.PfsGroup
"""
_validation = {
'sa_life_time_seconds': {'required': True},
'sa_data_size_kilobytes': {'required': True},
'ipsec_encryption': {'required': True},
'ipsec_integrity': {'required': True},
'ike_encryption': {'required': True},
'ike_integrity': {'required': True},
'dh_group': {'required': True},
'pfs_group': {'required': True},
}
_attribute_map = {
'sa_life_time_seconds': {'key': 'saLifeTimeSeconds', 'type': 'int'},
'sa_data_size_kilobytes': {'key': 'saDataSizeKilobytes', 'type': 'int'},
'ipsec_encryption': {'key': 'ipsecEncryption', 'type': 'str'},
'ipsec_integrity': {'key': 'ipsecIntegrity', 'type': 'str'},
'ike_encryption': {'key': 'ikeEncryption', 'type': 'str'},
'ike_integrity': {'key': 'ikeIntegrity', 'type': 'str'},
'dh_group': {'key': 'dhGroup', 'type': 'str'},
'pfs_group': {'key': 'pfsGroup', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IpsecPolicy, self).__init__(**kwargs)
self.sa_life_time_seconds = kwargs['sa_life_time_seconds']
self.sa_data_size_kilobytes = kwargs['sa_data_size_kilobytes']
self.ipsec_encryption = kwargs['ipsec_encryption']
self.ipsec_integrity = kwargs['ipsec_integrity']
self.ike_encryption = kwargs['ike_encryption']
self.ike_integrity = kwargs['ike_integrity']
self.dh_group = kwargs['dh_group']
self.pfs_group = kwargs['pfs_group']
class IpTag(msrest.serialization.Model):
"""Contains the IpTag associated with the object.
:param ip_tag_type: Gets or sets the ipTag type: Example FirstPartyUsage.
:type ip_tag_type: str
:param tag: Gets or sets value of the IpTag associated with the public IP. Example SQL, Storage
etc.
:type tag: str
"""
_attribute_map = {
'ip_tag_type': {'key': 'ipTagType', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IpTag, self).__init__(**kwargs)
self.ip_tag_type = kwargs.get('ip_tag_type', None)
self.tag = kwargs.get('tag', None)
class Ipv6ExpressRouteCircuitPeeringConfig(msrest.serialization.Model):
"""Contains IPv6 peering config.
:param primary_peer_address_prefix: The primary address prefix.
:type primary_peer_address_prefix: str
:param secondary_peer_address_prefix: The secondary address prefix.
:type secondary_peer_address_prefix: str
:param microsoft_peering_config: The Microsoft peering configuration.
:type microsoft_peering_config:
~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeeringConfig
:param route_filter: The reference of the RouteFilter resource.
:type route_filter: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param state: The state of peering. Possible values include: "Disabled", "Enabled".
:type state: str or ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeeringState
"""
_attribute_map = {
'primary_peer_address_prefix': {'key': 'primaryPeerAddressPrefix', 'type': 'str'},
'secondary_peer_address_prefix': {'key': 'secondaryPeerAddressPrefix', 'type': 'str'},
'microsoft_peering_config': {'key': 'microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'},
'route_filter': {'key': 'routeFilter', 'type': 'SubResource'},
'state': {'key': 'state', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Ipv6ExpressRouteCircuitPeeringConfig, self).__init__(**kwargs)
self.primary_peer_address_prefix = kwargs.get('primary_peer_address_prefix', None)
self.secondary_peer_address_prefix = kwargs.get('secondary_peer_address_prefix', None)
self.microsoft_peering_config = kwargs.get('microsoft_peering_config', None)
self.route_filter = kwargs.get('route_filter', None)
self.state = kwargs.get('state', None)
class ListHubVirtualNetworkConnectionsResult(msrest.serialization.Model):
"""List of HubVirtualNetworkConnections and a URL nextLink to get the next set of results.
:param value: List of HubVirtualNetworkConnections.
:type value: list[~azure.mgmt.network.v2019_04_01.models.HubVirtualNetworkConnection]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[HubVirtualNetworkConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListHubVirtualNetworkConnectionsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListP2SVpnGatewaysResult(msrest.serialization.Model):
"""Result of the request to list P2SVpnGateways. It contains a list of P2SVpnGateways and a URL nextLink to get the next set of results.
:param value: List of P2SVpnGateways.
:type value: list[~azure.mgmt.network.v2019_04_01.models.P2SVpnGateway]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[P2SVpnGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListP2SVpnGatewaysResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListP2SVpnServerConfigurationsResult(msrest.serialization.Model):
"""Result of the request to list all P2SVpnServerConfigurations associated to a VirtualWan. It contains a list of P2SVpnServerConfigurations and a URL nextLink to get the next set of results.
:param value: List of P2SVpnServerConfigurations.
:type value: list[~azure.mgmt.network.v2019_04_01.models.P2SVpnServerConfiguration]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[P2SVpnServerConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListP2SVpnServerConfigurationsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVirtualHubsResult(msrest.serialization.Model):
"""Result of the request to list VirtualHubs. It contains a list of VirtualHubs and a URL nextLink to get the next set of results.
:param value: List of VirtualHubs.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VirtualHub]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualHub]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVirtualHubsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVirtualWANsResult(msrest.serialization.Model):
"""Result of the request to list VirtualWANs. It contains a list of VirtualWANs and a URL nextLink to get the next set of results.
:param value: List of VirtualWANs.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VirtualWAN]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualWAN]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVirtualWANsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnConnectionsResult(msrest.serialization.Model):
"""Result of the request to list all vpn connections to a virtual wan vpn gateway. It contains a list of Vpn Connections and a URL nextLink to get the next set of results.
:param value: List of Vpn Connections.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VpnConnection]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnConnectionsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnGatewaysResult(msrest.serialization.Model):
"""Result of the request to list VpnGateways. It contains a list of VpnGateways and a URL nextLink to get the next set of results.
:param value: List of VpnGateways.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VpnGateway]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnGatewaysResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnSitesResult(msrest.serialization.Model):
"""Result of the request to list VpnSites. It contains a list of VpnSites and a URL nextLink to get the next set of results.
:param value: List of VpnSites.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VpnSite]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnSite]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnSitesResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class LoadBalancer(Resource):
"""LoadBalancer resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The load balancer SKU.
:type sku: ~azure.mgmt.network.v2019_04_01.models.LoadBalancerSku
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param frontend_ip_configurations: Object representing the frontend IPs to be used for the load
balancer.
:type frontend_ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.FrontendIPConfiguration]
:param backend_address_pools: Collection of backend address pools used by a load balancer.
:type backend_address_pools: list[~azure.mgmt.network.v2019_04_01.models.BackendAddressPool]
:param load_balancing_rules: Object collection representing the load balancing rules Gets the
provisioning.
:type load_balancing_rules: list[~azure.mgmt.network.v2019_04_01.models.LoadBalancingRule]
:param probes: Collection of probe objects used in the load balancer.
:type probes: list[~azure.mgmt.network.v2019_04_01.models.Probe]
:param inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining
inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT
pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are
associated with individual virtual machines cannot reference an Inbound NAT pool. They have to
reference individual inbound NAT rules.
:type inbound_nat_rules: list[~azure.mgmt.network.v2019_04_01.models.InboundNatRule]
:param inbound_nat_pools: Defines an external port range for inbound NAT to a single backend
port on NICs associated with a load balancer. Inbound NAT rules are created automatically for
each NIC associated with the Load Balancer using an external port from this range. Defining an
Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules.
Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with
individual virtual machines cannot reference an inbound NAT pool. They have to reference
individual inbound NAT rules.
:type inbound_nat_pools: list[~azure.mgmt.network.v2019_04_01.models.InboundNatPool]
:param outbound_rules: The outbound rules.
:type outbound_rules: list[~azure.mgmt.network.v2019_04_01.models.OutboundRule]
:param resource_guid: The resource GUID property of the load balancer resource.
:type resource_guid: str
:param provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'LoadBalancerSku'},
'etag': {'key': 'etag', 'type': 'str'},
'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[FrontendIPConfiguration]'},
'backend_address_pools': {'key': 'properties.backendAddressPools', 'type': '[BackendAddressPool]'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[LoadBalancingRule]'},
'probes': {'key': 'properties.probes', 'type': '[Probe]'},
'inbound_nat_rules': {'key': 'properties.inboundNatRules', 'type': '[InboundNatRule]'},
'inbound_nat_pools': {'key': 'properties.inboundNatPools', 'type': '[InboundNatPool]'},
'outbound_rules': {'key': 'properties.outboundRules', 'type': '[OutboundRule]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancer, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = kwargs.get('etag', None)
self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None)
self.backend_address_pools = kwargs.get('backend_address_pools', None)
self.load_balancing_rules = kwargs.get('load_balancing_rules', None)
self.probes = kwargs.get('probes', None)
self.inbound_nat_rules = kwargs.get('inbound_nat_rules', None)
self.inbound_nat_pools = kwargs.get('inbound_nat_pools', None)
self.outbound_rules = kwargs.get('outbound_rules', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class LoadBalancerBackendAddressPoolListResult(msrest.serialization.Model):
"""Response for ListBackendAddressPool API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of backend address pools in a load balancer.
:type value: list[~azure.mgmt.network.v2019_04_01.models.BackendAddressPool]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[BackendAddressPool]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerBackendAddressPoolListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerFrontendIPConfigurationListResult(msrest.serialization.Model):
"""Response for ListFrontendIPConfiguration API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of frontend IP configurations in a load balancer.
:type value: list[~azure.mgmt.network.v2019_04_01.models.FrontendIPConfiguration]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[FrontendIPConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerFrontendIPConfigurationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerListResult(msrest.serialization.Model):
"""Response for ListLoadBalancers API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of load balancers in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.LoadBalancer]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LoadBalancer]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerLoadBalancingRuleListResult(msrest.serialization.Model):
"""Response for ListLoadBalancingRule API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of load balancing rules in a load balancer.
:type value: list[~azure.mgmt.network.v2019_04_01.models.LoadBalancingRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LoadBalancingRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerLoadBalancingRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerOutboundRuleListResult(msrest.serialization.Model):
"""Response for ListOutboundRule API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of outbound rules in a load balancer.
:type value: list[~azure.mgmt.network.v2019_04_01.models.OutboundRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[OutboundRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerOutboundRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerProbeListResult(msrest.serialization.Model):
"""Response for ListProbe API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of probes in a load balancer.
:type value: list[~azure.mgmt.network.v2019_04_01.models.Probe]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Probe]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerProbeListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerSku(msrest.serialization.Model):
"""SKU of a load balancer.
:param name: Name of a load balancer SKU. Possible values include: "Basic", "Standard".
:type name: str or ~azure.mgmt.network.v2019_04_01.models.LoadBalancerSkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class LoadBalancingRule(SubResource):
"""A load balancing rule for a load balancer.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param frontend_ip_configuration: A reference to frontend IP addresses.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param backend_address_pool: A reference to a pool of DIPs. Inbound traffic is randomly load
balanced across IPs in the backend IPs.
:type backend_address_pool: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param probe: The reference of the load balancer probe used by the load balancing rule.
:type probe: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param protocol: The reference to the transport protocol used by the load balancing rule.
Possible values include: "Udp", "Tcp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.TransportProtocol
:param load_distribution: The load distribution policy for this rule. Possible values include:
"Default", "SourceIP", "SourceIPProtocol".
:type load_distribution: str or ~azure.mgmt.network.v2019_04_01.models.LoadDistribution
:param frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values are between 0 and 65534. Note that value 0
enables "Any Port".
:type frontend_port: int
:param backend_port: The port used for internal connections on the endpoint. Acceptable values
are between 0 and 65535. Note that value 0 enables "Any Port".
:type backend_port: int
:param idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:type idle_timeout_in_minutes: int
:param enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:type enable_floating_ip: bool
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:param disable_outbound_snat: Configures SNAT for the VMs in the backend pool to use the
publicIP address specified in the frontend of the load balancing rule.
:type disable_outbound_snat: bool
:param provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'probe': {'key': 'properties.probe', 'type': 'SubResource'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'load_distribution': {'key': 'properties.loadDistribution', 'type': 'str'},
'frontend_port': {'key': 'properties.frontendPort', 'type': 'int'},
'backend_port': {'key': 'properties.backendPort', 'type': 'int'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'enable_floating_ip': {'key': 'properties.enableFloatingIP', 'type': 'bool'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'disable_outbound_snat': {'key': 'properties.disableOutboundSnat', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancingRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.probe = kwargs.get('probe', None)
self.protocol = kwargs.get('protocol', None)
self.load_distribution = kwargs.get('load_distribution', None)
self.frontend_port = kwargs.get('frontend_port', None)
self.backend_port = kwargs.get('backend_port', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.enable_floating_ip = kwargs.get('enable_floating_ip', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.disable_outbound_snat = kwargs.get('disable_outbound_snat', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class LocalNetworkGateway(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param local_network_address_space: Local network site address space.
:type local_network_address_space: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:param gateway_ip_address: IP address of local network gateway.
:type gateway_ip_address: str
:param bgp_settings: Local network gateway's BGP speaker settings.
:type bgp_settings: ~azure.mgmt.network.v2019_04_01.models.BgpSettings
:param resource_guid: The resource GUID property of the LocalNetworkGateway resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the LocalNetworkGateway resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'local_network_address_space': {'key': 'properties.localNetworkAddressSpace', 'type': 'AddressSpace'},
'gateway_ip_address': {'key': 'properties.gatewayIpAddress', 'type': 'str'},
'bgp_settings': {'key': 'properties.bgpSettings', 'type': 'BgpSettings'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LocalNetworkGateway, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.local_network_address_space = kwargs.get('local_network_address_space', None)
self.gateway_ip_address = kwargs.get('gateway_ip_address', None)
self.bgp_settings = kwargs.get('bgp_settings', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = None
class LocalNetworkGatewayListResult(msrest.serialization.Model):
"""Response for ListLocalNetworkGateways API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of local network gateways that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.LocalNetworkGateway]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LocalNetworkGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LocalNetworkGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LogSpecification(msrest.serialization.Model):
"""Description of logging specification.
:param name: The name of the specification.
:type name: str
:param display_name: The display name of the specification.
:type display_name: str
:param blob_duration: Duration of the blob.
:type blob_duration: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LogSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.blob_duration = kwargs.get('blob_duration', None)
class ManagedServiceIdentity(msrest.serialization.Model):
"""Identity for the resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id: The principal id of the system assigned identity. This property will only
be provided for a system assigned identity.
:vartype principal_id: str
:ivar tenant_id: The tenant id of the system assigned identity. This property will only be
provided for a system assigned identity.
:vartype tenant_id: str
:param type: The type of identity used for the resource. The type 'SystemAssigned,
UserAssigned' includes both an implicitly created identity and a set of user assigned
identities. The type 'None' will remove any identities from the virtual machine. Possible
values include: "SystemAssigned", "UserAssigned", "SystemAssigned, UserAssigned", "None".
:type type: str or ~azure.mgmt.network.v2019_04_01.models.ResourceIdentityType
:param user_assigned_identities: The list of user identities associated with resource. The user
identity dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
:type user_assigned_identities: dict[str,
~azure.mgmt.network.v2019_04_01.models.Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties]
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties}'},
}
def __init__(
self,
**kwargs
):
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
self.type = kwargs.get('type', None)
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
class MatchCondition(msrest.serialization.Model):
"""Define match conditions.
All required parameters must be populated in order to send to Azure.
:param match_variables: Required. List of match variables.
:type match_variables: list[~azure.mgmt.network.v2019_04_01.models.MatchVariable]
:param operator: Required. Describes operator to be matched. Possible values include:
"IPMatch", "Equal", "Contains", "LessThan", "GreaterThan", "LessThanOrEqual",
"GreaterThanOrEqual", "BeginsWith", "EndsWith", "Regex".
:type operator: str or ~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallOperator
:param negation_conditon: Describes if this is negate condition or not.
:type negation_conditon: bool
:param match_values: Required. Match value.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or
~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallTransform]
"""
_validation = {
'match_variables': {'required': True},
'operator': {'required': True},
'match_values': {'required': True},
}
_attribute_map = {
'match_variables': {'key': 'matchVariables', 'type': '[MatchVariable]'},
'operator': {'key': 'operator', 'type': 'str'},
'negation_conditon': {'key': 'negationConditon', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(MatchCondition, self).__init__(**kwargs)
self.match_variables = kwargs['match_variables']
self.operator = kwargs['operator']
self.negation_conditon = kwargs.get('negation_conditon', None)
self.match_values = kwargs['match_values']
self.transforms = kwargs.get('transforms', None)
class MatchedRule(msrest.serialization.Model):
"""Matched rule.
:param rule_name: Name of the matched network security rule.
:type rule_name: str
:param action: The network traffic is allowed or denied. Possible values are 'Allow' and
'Deny'.
:type action: str
"""
_attribute_map = {
'rule_name': {'key': 'ruleName', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MatchedRule, self).__init__(**kwargs)
self.rule_name = kwargs.get('rule_name', None)
self.action = kwargs.get('action', None)
class MatchVariable(msrest.serialization.Model):
"""Define match variables.
All required parameters must be populated in order to send to Azure.
:param variable_name: Required. Match Variable. Possible values include: "RemoteAddr",
"RequestMethod", "QueryString", "PostArgs", "RequestUri", "RequestHeaders", "RequestBody",
"RequestCookies".
:type variable_name: str or
~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallMatchVariable
:param selector: Describes field of the matchVariable collection.
:type selector: str
"""
_validation = {
'variable_name': {'required': True},
}
_attribute_map = {
'variable_name': {'key': 'variableName', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MatchVariable, self).__init__(**kwargs)
self.variable_name = kwargs['variable_name']
self.selector = kwargs.get('selector', None)
class MetricSpecification(msrest.serialization.Model):
"""Description of metrics specification.
:param name: The name of the metric.
:type name: str
:param display_name: The display name of the metric.
:type display_name: str
:param display_description: The description of the metric.
:type display_description: str
:param unit: Units the metric to be displayed in.
:type unit: str
:param aggregation_type: The aggregation type.
:type aggregation_type: str
:param availabilities: List of availability.
:type availabilities: list[~azure.mgmt.network.v2019_04_01.models.Availability]
:param enable_regional_mdm_account: Whether regional MDM account enabled.
:type enable_regional_mdm_account: bool
:param fill_gap_with_zero: Whether gaps would be filled with zeros.
:type fill_gap_with_zero: bool
:param metric_filter_pattern: Pattern for the filter of the metric.
:type metric_filter_pattern: str
:param dimensions: List of dimensions.
:type dimensions: list[~azure.mgmt.network.v2019_04_01.models.Dimension]
:param is_internal: Whether the metric is internal.
:type is_internal: bool
:param source_mdm_account: The source MDM account.
:type source_mdm_account: str
:param source_mdm_namespace: The source MDM namespace.
:type source_mdm_namespace: str
:param resource_id_dimension_name_override: The resource Id dimension name override.
:type resource_id_dimension_name_override: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'availabilities': {'key': 'availabilities', 'type': '[Availability]'},
'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'bool'},
'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'metric_filter_pattern': {'key': 'metricFilterPattern', 'type': 'str'},
'dimensions': {'key': 'dimensions', 'type': '[Dimension]'},
'is_internal': {'key': 'isInternal', 'type': 'bool'},
'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'},
'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'},
'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MetricSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.display_description = kwargs.get('display_description', None)
self.unit = kwargs.get('unit', None)
self.aggregation_type = kwargs.get('aggregation_type', None)
self.availabilities = kwargs.get('availabilities', None)
self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None)
self.fill_gap_with_zero = kwargs.get('fill_gap_with_zero', None)
self.metric_filter_pattern = kwargs.get('metric_filter_pattern', None)
self.dimensions = kwargs.get('dimensions', None)
self.is_internal = kwargs.get('is_internal', None)
self.source_mdm_account = kwargs.get('source_mdm_account', None)
self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None)
self.resource_id_dimension_name_override = kwargs.get('resource_id_dimension_name_override', None)
class NatGateway(Resource):
"""Nat Gateway resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The nat gateway SKU.
:type sku: ~azure.mgmt.network.v2019_04_01.models.NatGatewaySku
:param zones: A list of availability zones denoting the zone in which Nat Gateway should be
deployed.
:type zones: list[str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param idle_timeout_in_minutes: The idle timeout of the nat gateway.
:type idle_timeout_in_minutes: int
:param public_ip_addresses: An array of public ip addresses associated with the nat gateway
resource.
:type public_ip_addresses: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param public_ip_prefixes: An array of public ip prefixes associated with the nat gateway
resource.
:type public_ip_prefixes: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar subnets: An array of references to the subnets using this nat gateway resource.
:vartype subnets: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param resource_guid: The resource GUID property of the nat gateway resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the NatGateway resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'subnets': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'NatGatewaySku'},
'zones': {'key': 'zones', 'type': '[str]'},
'etag': {'key': 'etag', 'type': 'str'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'public_ip_addresses': {'key': 'properties.publicIpAddresses', 'type': '[SubResource]'},
'public_ip_prefixes': {'key': 'properties.publicIpPrefixes', 'type': '[SubResource]'},
'subnets': {'key': 'properties.subnets', 'type': '[SubResource]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NatGateway, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.zones = kwargs.get('zones', None)
self.etag = kwargs.get('etag', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.public_ip_addresses = kwargs.get('public_ip_addresses', None)
self.public_ip_prefixes = kwargs.get('public_ip_prefixes', None)
self.subnets = None
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class NatGatewayListResult(msrest.serialization.Model):
"""Response for ListNatGateways API service call.
:param value: A list of Nat Gateways that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.NatGateway]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NatGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NatGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class NatGatewaySku(msrest.serialization.Model):
"""SKU of nat gateway.
:param name: Name of Nat Gateway SKU. Possible values include: "Standard".
:type name: str or ~azure.mgmt.network.v2019_04_01.models.NatGatewaySkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NatGatewaySku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class NetworkConfigurationDiagnosticParameters(msrest.serialization.Model):
"""Parameters to get network configuration diagnostic.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The ID of the target resource to perform network
configuration diagnostic. Valid options are VM, NetworkInterface, VMSS/NetworkInterface and
Application Gateway.
:type target_resource_id: str
:param verbosity_level: Verbosity level. Possible values include: "Normal", "Minimum", "Full".
:type verbosity_level: str or ~azure.mgmt.network.v2019_04_01.models.VerbosityLevel
:param profiles: Required. List of network configuration diagnostic profiles.
:type profiles:
list[~azure.mgmt.network.v2019_04_01.models.NetworkConfigurationDiagnosticProfile]
"""
_validation = {
'target_resource_id': {'required': True},
'profiles': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'verbosity_level': {'key': 'verbosityLevel', 'type': 'str'},
'profiles': {'key': 'profiles', 'type': '[NetworkConfigurationDiagnosticProfile]'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.verbosity_level = kwargs.get('verbosity_level', None)
self.profiles = kwargs['profiles']
class NetworkConfigurationDiagnosticProfile(msrest.serialization.Model):
"""Parameters to compare with network configuration.
All required parameters must be populated in order to send to Azure.
:param direction: Required. The direction of the traffic. Possible values include: "Inbound",
"Outbound".
:type direction: str or ~azure.mgmt.network.v2019_04_01.models.Direction
:param protocol: Required. Protocol to be verified on. Accepted values are '*', TCP, UDP.
:type protocol: str
:param source: Required. Traffic source. Accepted values are '*', IP Address/CIDR, Service Tag.
:type source: str
:param destination: Required. Traffic destination. Accepted values are: '*', IP Address/CIDR,
Service Tag.
:type destination: str
:param destination_port: Required. Traffic destination port. Accepted values are '*', port (for
example, 3389) and port range (for example, 80-100).
:type destination_port: str
"""
_validation = {
'direction': {'required': True},
'protocol': {'required': True},
'source': {'required': True},
'destination': {'required': True},
'destination_port': {'required': True},
}
_attribute_map = {
'direction': {'key': 'direction', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'destination': {'key': 'destination', 'type': 'str'},
'destination_port': {'key': 'destinationPort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticProfile, self).__init__(**kwargs)
self.direction = kwargs['direction']
self.protocol = kwargs['protocol']
self.source = kwargs['source']
self.destination = kwargs['destination']
self.destination_port = kwargs['destination_port']
class NetworkConfigurationDiagnosticResponse(msrest.serialization.Model):
"""Results of network configuration diagnostic on the target resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar results: List of network configuration diagnostic results.
:vartype results:
list[~azure.mgmt.network.v2019_04_01.models.NetworkConfigurationDiagnosticResult]
"""
_validation = {
'results': {'readonly': True},
}
_attribute_map = {
'results': {'key': 'results', 'type': '[NetworkConfigurationDiagnosticResult]'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticResponse, self).__init__(**kwargs)
self.results = None
class NetworkConfigurationDiagnosticResult(msrest.serialization.Model):
"""Network configuration diagnostic result corresponded to provided traffic query.
:param profile: Network configuration diagnostic profile.
:type profile: ~azure.mgmt.network.v2019_04_01.models.NetworkConfigurationDiagnosticProfile
:param network_security_group_result: Network security group result.
:type network_security_group_result:
~azure.mgmt.network.v2019_04_01.models.NetworkSecurityGroupResult
"""
_attribute_map = {
'profile': {'key': 'profile', 'type': 'NetworkConfigurationDiagnosticProfile'},
'network_security_group_result': {'key': 'networkSecurityGroupResult', 'type': 'NetworkSecurityGroupResult'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticResult, self).__init__(**kwargs)
self.profile = kwargs.get('profile', None)
self.network_security_group_result = kwargs.get('network_security_group_result', None)
class NetworkIntentPolicy(Resource):
"""Network Intent Policy resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkIntentPolicy, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
class NetworkIntentPolicyConfiguration(msrest.serialization.Model):
"""Details of NetworkIntentPolicyConfiguration for PrepareNetworkPoliciesRequest.
:param network_intent_policy_name: The name of the Network Intent Policy for storing in target
subscription.
:type network_intent_policy_name: str
:param source_network_intent_policy: Source network intent policy.
:type source_network_intent_policy: ~azure.mgmt.network.v2019_04_01.models.NetworkIntentPolicy
"""
_attribute_map = {
'network_intent_policy_name': {'key': 'networkIntentPolicyName', 'type': 'str'},
'source_network_intent_policy': {'key': 'sourceNetworkIntentPolicy', 'type': 'NetworkIntentPolicy'},
}
def __init__(
self,
**kwargs
):
super(NetworkIntentPolicyConfiguration, self).__init__(**kwargs)
self.network_intent_policy_name = kwargs.get('network_intent_policy_name', None)
self.source_network_intent_policy = kwargs.get('source_network_intent_policy', None)
class NetworkInterface(Resource):
"""A network interface in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar virtual_machine: The reference of a virtual machine.
:vartype virtual_machine: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param network_security_group: The reference of the NetworkSecurityGroup resource.
:type network_security_group: ~azure.mgmt.network.v2019_04_01.models.NetworkSecurityGroup
:ivar private_endpoint: A reference to the private endpoint to which the network interface is
linked.
:vartype private_endpoint: ~azure.mgmt.network.v2019_04_01.models.PrivateEndpoint
:param ip_configurations: A list of IPConfigurations of the network interface.
:type ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceIPConfiguration]
:param tap_configurations: A list of TapConfigurations of the network interface.
:type tap_configurations:
list[~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceTapConfiguration]
:param dns_settings: The DNS settings in network interface.
:type dns_settings: ~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceDnsSettings
:param mac_address: The MAC address of the network interface.
:type mac_address: str
:param primary: Gets whether this is a primary network interface on a virtual machine.
:type primary: bool
:param enable_accelerated_networking: If the network interface is accelerated networking
enabled.
:type enable_accelerated_networking: bool
:param enable_ip_forwarding: Indicates whether IP forwarding is enabled on this network
interface.
:type enable_ip_forwarding: bool
:ivar hosted_workloads: A list of references to linked BareMetal resources.
:vartype hosted_workloads: list[str]
:param resource_guid: The resource GUID property of the network interface resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_machine': {'readonly': True},
'private_endpoint': {'readonly': True},
'hosted_workloads': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_machine': {'key': 'properties.virtualMachine', 'type': 'SubResource'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'tap_configurations': {'key': 'properties.tapConfigurations', 'type': '[NetworkInterfaceTapConfiguration]'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'NetworkInterfaceDnsSettings'},
'mac_address': {'key': 'properties.macAddress', 'type': 'str'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'enable_accelerated_networking': {'key': 'properties.enableAcceleratedNetworking', 'type': 'bool'},
'enable_ip_forwarding': {'key': 'properties.enableIPForwarding', 'type': 'bool'},
'hosted_workloads': {'key': 'properties.hostedWorkloads', 'type': '[str]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterface, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.virtual_machine = None
self.network_security_group = kwargs.get('network_security_group', None)
self.private_endpoint = None
self.ip_configurations = kwargs.get('ip_configurations', None)
self.tap_configurations = kwargs.get('tap_configurations', None)
self.dns_settings = kwargs.get('dns_settings', None)
self.mac_address = kwargs.get('mac_address', None)
self.primary = kwargs.get('primary', None)
self.enable_accelerated_networking = kwargs.get('enable_accelerated_networking', None)
self.enable_ip_forwarding = kwargs.get('enable_ip_forwarding', None)
self.hosted_workloads = None
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class NetworkInterfaceAssociation(msrest.serialization.Model):
"""Network interface and its custom security rules.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Network interface ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list[~azure.mgmt.network.v2019_04_01.models.SecurityRule]
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceAssociation, self).__init__(**kwargs)
self.id = None
self.security_rules = kwargs.get('security_rules', None)
class NetworkInterfaceDnsSettings(msrest.serialization.Model):
"""DNS settings of a network interface.
:param dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure
provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be
the only value in dnsServers collection.
:type dns_servers: list[str]
:param applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then
this list will have the union of all DNS servers from all NICs that are part of the
Availability Set. This property is what is configured on each of those VMs.
:type applied_dns_servers: list[str]
:param internal_dns_name_label: Relative DNS name for this NIC used for internal communications
between VMs in the same virtual network.
:type internal_dns_name_label: str
:param internal_fqdn: Fully qualified DNS name supporting internal communications between VMs
in the same virtual network.
:type internal_fqdn: str
:param internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS entry
is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the
VM name with the value of internalDomainNameSuffix.
:type internal_domain_name_suffix: str
"""
_attribute_map = {
'dns_servers': {'key': 'dnsServers', 'type': '[str]'},
'applied_dns_servers': {'key': 'appliedDnsServers', 'type': '[str]'},
'internal_dns_name_label': {'key': 'internalDnsNameLabel', 'type': 'str'},
'internal_fqdn': {'key': 'internalFqdn', 'type': 'str'},
'internal_domain_name_suffix': {'key': 'internalDomainNameSuffix', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceDnsSettings, self).__init__(**kwargs)
self.dns_servers = kwargs.get('dns_servers', None)
self.applied_dns_servers = kwargs.get('applied_dns_servers', None)
self.internal_dns_name_label = kwargs.get('internal_dns_name_label', None)
self.internal_fqdn = kwargs.get('internal_fqdn', None)
self.internal_domain_name_suffix = kwargs.get('internal_domain_name_suffix', None)
class NetworkInterfaceIPConfiguration(SubResource):
"""IPConfiguration in a network interface.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param virtual_network_taps: The reference to Virtual Network Taps.
:type virtual_network_taps: list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkTap]
:param application_gateway_backend_address_pools: The reference of
ApplicationGatewayBackendAddressPool resource.
:type application_gateway_backend_address_pools:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationGatewayBackendAddressPool]
:param load_balancer_backend_address_pools: The reference of LoadBalancerBackendAddressPool
resource.
:type load_balancer_backend_address_pools:
list[~azure.mgmt.network.v2019_04_01.models.BackendAddressPool]
:param load_balancer_inbound_nat_rules: A list of references of LoadBalancerInboundNatRules.
:type load_balancer_inbound_nat_rules:
list[~azure.mgmt.network.v2019_04_01.models.InboundNatRule]
:param private_ip_address: Private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
:param private_ip_address_version: Available from Api-Version 2016-03-30 onwards, it represents
whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values
include: "IPv4", "IPv6".
:type private_ip_address_version: str or ~azure.mgmt.network.v2019_04_01.models.IPVersion
:param subnet: Subnet bound to the IP configuration.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.Subnet
:param primary: Gets whether this is a primary customer address on the network interface.
:type primary: bool
:param public_ip_address: Public IP address bound to the IP configuration.
:type public_ip_address: ~azure.mgmt.network.v2019_04_01.models.PublicIPAddress
:param application_security_groups: Application security groups in which the IP configuration
is included.
:type application_security_groups:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationSecurityGroup]
:param provisioning_state: The provisioning state of the network interface IP configuration.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_network_taps': {'key': 'properties.virtualNetworkTaps', 'type': '[VirtualNetworkTap]'},
'application_gateway_backend_address_pools': {'key': 'properties.applicationGatewayBackendAddressPools', 'type': '[ApplicationGatewayBackendAddressPool]'},
'load_balancer_backend_address_pools': {'key': 'properties.loadBalancerBackendAddressPools', 'type': '[BackendAddressPool]'},
'load_balancer_inbound_nat_rules': {'key': 'properties.loadBalancerInboundNatRules', 'type': '[InboundNatRule]'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'private_ip_address_version': {'key': 'properties.privateIPAddressVersion', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'application_security_groups': {'key': 'properties.applicationSecurityGroups', 'type': '[ApplicationSecurityGroup]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.virtual_network_taps = kwargs.get('virtual_network_taps', None)
self.application_gateway_backend_address_pools = kwargs.get('application_gateway_backend_address_pools', None)
self.load_balancer_backend_address_pools = kwargs.get('load_balancer_backend_address_pools', None)
self.load_balancer_inbound_nat_rules = kwargs.get('load_balancer_inbound_nat_rules', None)
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.private_ip_address_version = kwargs.get('private_ip_address_version', None)
self.subnet = kwargs.get('subnet', None)
self.primary = kwargs.get('primary', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.application_security_groups = kwargs.get('application_security_groups', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class NetworkInterfaceIPConfigurationListResult(msrest.serialization.Model):
"""Response for list ip configurations API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of ip configurations.
:type value: list[~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceIPConfiguration]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkInterfaceIPConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceIPConfigurationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkInterfaceListResult(msrest.serialization.Model):
"""Response for the ListNetworkInterface API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of network interfaces in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.NetworkInterface]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkInterface]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkInterfaceLoadBalancerListResult(msrest.serialization.Model):
"""Response for list ip configurations API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of load balancers.
:type value: list[~azure.mgmt.network.v2019_04_01.models.LoadBalancer]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LoadBalancer]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceLoadBalancerListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkInterfaceTapConfiguration(SubResource):
"""Tap configuration in a Network Interface.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar type: Sub Resource type.
:vartype type: str
:param virtual_network_tap: The reference of the Virtual Network Tap resource.
:type virtual_network_tap: ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkTap
:ivar provisioning_state: The provisioning state of the network interface tap configuration.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'virtual_network_tap': {'key': 'properties.virtualNetworkTap', 'type': 'VirtualNetworkTap'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceTapConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = None
self.virtual_network_tap = kwargs.get('virtual_network_tap', None)
self.provisioning_state = None
class NetworkInterfaceTapConfigurationListResult(msrest.serialization.Model):
"""Response for list tap configurations API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of tap configurations.
:type value: list[~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceTapConfiguration]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkInterfaceTapConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceTapConfigurationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkProfile(Resource):
"""Network profile resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param container_network_interfaces: List of child container network interfaces.
:type container_network_interfaces:
list[~azure.mgmt.network.v2019_04_01.models.ContainerNetworkInterface]
:param container_network_interface_configurations: List of chid container network interface
configurations.
:type container_network_interface_configurations:
list[~azure.mgmt.network.v2019_04_01.models.ContainerNetworkInterfaceConfiguration]
:ivar resource_guid: The resource GUID property of the network interface resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'container_network_interfaces': {'key': 'properties.containerNetworkInterfaces', 'type': '[ContainerNetworkInterface]'},
'container_network_interface_configurations': {'key': 'properties.containerNetworkInterfaceConfigurations', 'type': '[ContainerNetworkInterfaceConfiguration]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkProfile, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.container_network_interfaces = kwargs.get('container_network_interfaces', None)
self.container_network_interface_configurations = kwargs.get('container_network_interface_configurations', None)
self.resource_guid = None
self.provisioning_state = None
class NetworkProfileListResult(msrest.serialization.Model):
"""Response for ListNetworkProfiles API service call.
:param value: A list of network profiles that exist in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.NetworkProfile]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkProfile]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkProfileListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class NetworkSecurityGroup(Resource):
"""NetworkSecurityGroup resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param security_rules: A collection of security rules of the network security group.
:type security_rules: list[~azure.mgmt.network.v2019_04_01.models.SecurityRule]
:param default_security_rules: The default security rules of network security group.
:type default_security_rules: list[~azure.mgmt.network.v2019_04_01.models.SecurityRule]
:ivar network_interfaces: A collection of references to network interfaces.
:vartype network_interfaces: list[~azure.mgmt.network.v2019_04_01.models.NetworkInterface]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2019_04_01.models.Subnet]
:param resource_guid: The resource GUID property of the network security group resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'network_interfaces': {'readonly': True},
'subnets': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'security_rules': {'key': 'properties.securityRules', 'type': '[SecurityRule]'},
'default_security_rules': {'key': 'properties.defaultSecurityRules', 'type': '[SecurityRule]'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityGroup, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.security_rules = kwargs.get('security_rules', None)
self.default_security_rules = kwargs.get('default_security_rules', None)
self.network_interfaces = None
self.subnets = None
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class NetworkSecurityGroupListResult(msrest.serialization.Model):
"""Response for ListNetworkSecurityGroups API service call.
:param value: A list of NetworkSecurityGroup resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.NetworkSecurityGroup]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkSecurityGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class NetworkSecurityGroupResult(msrest.serialization.Model):
"""Network configuration diagnostic result corresponded provided traffic query.
Variables are only populated by the server, and will be ignored when sending a request.
:param security_rule_access_result: The network traffic is allowed or denied. Possible values
include: "Allow", "Deny".
:type security_rule_access_result: str or
~azure.mgmt.network.v2019_04_01.models.SecurityRuleAccess
:ivar evaluated_network_security_groups: List of results network security groups diagnostic.
:vartype evaluated_network_security_groups:
list[~azure.mgmt.network.v2019_04_01.models.EvaluatedNetworkSecurityGroup]
"""
_validation = {
'evaluated_network_security_groups': {'readonly': True},
}
_attribute_map = {
'security_rule_access_result': {'key': 'securityRuleAccessResult', 'type': 'str'},
'evaluated_network_security_groups': {'key': 'evaluatedNetworkSecurityGroups', 'type': '[EvaluatedNetworkSecurityGroup]'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityGroupResult, self).__init__(**kwargs)
self.security_rule_access_result = kwargs.get('security_rule_access_result', None)
self.evaluated_network_security_groups = None
class NetworkSecurityRulesEvaluationResult(msrest.serialization.Model):
"""Network security rules evaluation result.
:param name: Name of the network security rule.
:type name: str
:param protocol_matched: Value indicating whether protocol is matched.
:type protocol_matched: bool
:param source_matched: Value indicating whether source is matched.
:type source_matched: bool
:param source_port_matched: Value indicating whether source port is matched.
:type source_port_matched: bool
:param destination_matched: Value indicating whether destination is matched.
:type destination_matched: bool
:param destination_port_matched: Value indicating whether destination port is matched.
:type destination_port_matched: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'protocol_matched': {'key': 'protocolMatched', 'type': 'bool'},
'source_matched': {'key': 'sourceMatched', 'type': 'bool'},
'source_port_matched': {'key': 'sourcePortMatched', 'type': 'bool'},
'destination_matched': {'key': 'destinationMatched', 'type': 'bool'},
'destination_port_matched': {'key': 'destinationPortMatched', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityRulesEvaluationResult, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.protocol_matched = kwargs.get('protocol_matched', None)
self.source_matched = kwargs.get('source_matched', None)
self.source_port_matched = kwargs.get('source_port_matched', None)
self.destination_matched = kwargs.get('destination_matched', None)
self.destination_port_matched = kwargs.get('destination_port_matched', None)
class NetworkWatcher(Resource):
"""Network watcher in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkWatcher, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.provisioning_state = None
class NetworkWatcherListResult(msrest.serialization.Model):
"""Response for ListNetworkWatchers API service call.
:param value: List of network watcher resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.NetworkWatcher]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkWatcher]'},
}
def __init__(
self,
**kwargs
):
super(NetworkWatcherListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class NextHopParameters(msrest.serialization.Model):
"""Parameters that define the source and destination endpoint.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The resource identifier of the target resource against
which the action is to be performed.
:type target_resource_id: str
:param source_ip_address: Required. The source IP address.
:type source_ip_address: str
:param destination_ip_address: Required. The destination IP address.
:type destination_ip_address: str
:param target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is
enabled on any of the nics, then this parameter must be specified. Otherwise optional).
:type target_nic_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
'source_ip_address': {'required': True},
'destination_ip_address': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'source_ip_address': {'key': 'sourceIPAddress', 'type': 'str'},
'destination_ip_address': {'key': 'destinationIPAddress', 'type': 'str'},
'target_nic_resource_id': {'key': 'targetNicResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NextHopParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.source_ip_address = kwargs['source_ip_address']
self.destination_ip_address = kwargs['destination_ip_address']
self.target_nic_resource_id = kwargs.get('target_nic_resource_id', None)
class NextHopResult(msrest.serialization.Model):
"""The information about next hop from the specified VM.
:param next_hop_type: Next hop type. Possible values include: "Internet", "VirtualAppliance",
"VirtualNetworkGateway", "VnetLocal", "HyperNetGateway", "None".
:type next_hop_type: str or ~azure.mgmt.network.v2019_04_01.models.NextHopType
:param next_hop_ip_address: Next hop IP Address.
:type next_hop_ip_address: str
:param route_table_id: The resource identifier for the route table associated with the route
being returned. If the route being returned does not correspond to any user created routes then
this field will be the string 'System Route'.
:type route_table_id: str
"""
_attribute_map = {
'next_hop_type': {'key': 'nextHopType', 'type': 'str'},
'next_hop_ip_address': {'key': 'nextHopIpAddress', 'type': 'str'},
'route_table_id': {'key': 'routeTableId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NextHopResult, self).__init__(**kwargs)
self.next_hop_type = kwargs.get('next_hop_type', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
self.route_table_id = kwargs.get('route_table_id', None)
class Operation(msrest.serialization.Model):
"""Network REST API operation definition.
:param name: Operation name: {provider}/{resource}/{operation}.
:type name: str
:param display: Display metadata associated with the operation.
:type display: ~azure.mgmt.network.v2019_04_01.models.OperationDisplay
:param origin: Origin of the operation.
:type origin: str
:param service_specification: Specification of the service.
:type service_specification:
~azure.mgmt.network.v2019_04_01.models.OperationPropertiesFormatServiceSpecification
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationPropertiesFormatServiceSpecification'},
}
def __init__(
self,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display = kwargs.get('display', None)
self.origin = kwargs.get('origin', None)
self.service_specification = kwargs.get('service_specification', None)
class OperationDisplay(msrest.serialization.Model):
"""Display metadata associated with the operation.
:param provider: Service provider: Microsoft Network.
:type provider: str
:param resource: Resource on which the operation is performed.
:type resource: str
:param operation: Type of the operation: get, read, delete, etc.
:type operation: str
:param description: Description of the operation.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationDisplay, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.resource = kwargs.get('resource', None)
self.operation = kwargs.get('operation', None)
self.description = kwargs.get('description', None)
class OperationListResult(msrest.serialization.Model):
"""Result of the request to list Network operations. It contains a list of operations and a URL link to get the next set of results.
:param value: List of Network operations supported by the Network resource provider.
:type value: list[~azure.mgmt.network.v2019_04_01.models.Operation]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class OperationPropertiesFormatServiceSpecification(msrest.serialization.Model):
"""Specification of the service.
:param metric_specifications: Operation service specification.
:type metric_specifications: list[~azure.mgmt.network.v2019_04_01.models.MetricSpecification]
:param log_specifications: Operation log specification.
:type log_specifications: list[~azure.mgmt.network.v2019_04_01.models.LogSpecification]
"""
_attribute_map = {
'metric_specifications': {'key': 'metricSpecifications', 'type': '[MetricSpecification]'},
'log_specifications': {'key': 'logSpecifications', 'type': '[LogSpecification]'},
}
def __init__(
self,
**kwargs
):
super(OperationPropertiesFormatServiceSpecification, self).__init__(**kwargs)
self.metric_specifications = kwargs.get('metric_specifications', None)
self.log_specifications = kwargs.get('log_specifications', None)
class OutboundRule(SubResource):
"""Outbound rule of the load balancer.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param allocated_outbound_ports: The number of outbound ports to be used for NAT.
:type allocated_outbound_ports: int
:param frontend_ip_configurations: The Frontend IP addresses of the load balancer.
:type frontend_ip_configurations: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load
balanced across IPs in the backend IPs.
:type backend_address_pool: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param protocol: The protocol for the outbound rule in load balancer. Possible values include:
"Tcp", "Udp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.LoadBalancerOutboundRuleProtocol
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:param idle_timeout_in_minutes: The timeout for the TCP idle connection.
:type idle_timeout_in_minutes: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'allocated_outbound_ports': {'key': 'properties.allocatedOutboundPorts', 'type': 'int'},
'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[SubResource]'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(OutboundRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.allocated_outbound_ports = kwargs.get('allocated_outbound_ports', None)
self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.protocol = kwargs.get('protocol', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
class P2SVpnGateway(Resource):
"""P2SVpnGateway Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_hub: The VirtualHub to which the gateway belongs.
:type virtual_hub: ~azure.mgmt.network.v2019_04_01.models.SubResource
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param vpn_gateway_scale_unit: The scale unit for this p2s vpn gateway.
:type vpn_gateway_scale_unit: int
:param p2_s_vpn_server_configuration: The P2SVpnServerConfiguration to which the p2sVpnGateway
is attached to.
:type p2_s_vpn_server_configuration: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param vpn_client_address_pool: The reference of the address space resource which represents
Address space for P2S VpnClient.
:type vpn_client_address_pool: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:param custom_routes: The reference of the address space resource which represents the custom
routes specified by the customer for P2SVpnGateway and P2S VpnClient.
:type custom_routes: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:ivar vpn_client_connection_health: All P2S VPN clients' connection health status.
:vartype vpn_client_connection_health:
~azure.mgmt.network.v2019_04_01.models.VpnClientConnectionHealth
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'vpn_client_connection_health': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'vpn_gateway_scale_unit': {'key': 'properties.vpnGatewayScaleUnit', 'type': 'int'},
'p2_s_vpn_server_configuration': {'key': 'properties.p2SVpnServerConfiguration', 'type': 'SubResource'},
'vpn_client_address_pool': {'key': 'properties.vpnClientAddressPool', 'type': 'AddressSpace'},
'custom_routes': {'key': 'properties.customRoutes', 'type': 'AddressSpace'},
'vpn_client_connection_health': {'key': 'properties.vpnClientConnectionHealth', 'type': 'VpnClientConnectionHealth'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnGateway, self).__init__(**kwargs)
self.etag = None
self.virtual_hub = kwargs.get('virtual_hub', None)
self.provisioning_state = None
self.vpn_gateway_scale_unit = kwargs.get('vpn_gateway_scale_unit', None)
self.p2_s_vpn_server_configuration = kwargs.get('p2_s_vpn_server_configuration', None)
self.vpn_client_address_pool = kwargs.get('vpn_client_address_pool', None)
self.custom_routes = kwargs.get('custom_routes', None)
self.vpn_client_connection_health = None
class P2SVpnProfileParameters(msrest.serialization.Model):
"""Vpn Client Parameters for package generation.
:param authentication_method: VPN client authentication method. Possible values include:
"EAPTLS", "EAPMSCHAPv2".
:type authentication_method: str or ~azure.mgmt.network.v2019_04_01.models.AuthenticationMethod
"""
_attribute_map = {
'authentication_method': {'key': 'authenticationMethod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnProfileParameters, self).__init__(**kwargs)
self.authentication_method = kwargs.get('authentication_method', None)
class P2SVpnServerConfigRadiusClientRootCertificate(SubResource):
"""Radius client root certificate of P2SVpnServerConfiguration.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param thumbprint: The Radius client root certificate thumbprint.
:type thumbprint: str
:ivar provisioning_state: The provisioning state of the Radius client root certificate
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnServerConfigRadiusClientRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.thumbprint = kwargs.get('thumbprint', None)
self.provisioning_state = None
class P2SVpnServerConfigRadiusServerRootCertificate(SubResource):
"""Radius Server root certificate of P2SVpnServerConfiguration.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param public_cert_data: Required. The certificate public data.
:type public_cert_data: str
:ivar provisioning_state: The provisioning state of the P2SVpnServerConfiguration Radius Server
root certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'public_cert_data': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'public_cert_data': {'key': 'properties.publicCertData', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnServerConfigRadiusServerRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.public_cert_data = kwargs['public_cert_data']
self.provisioning_state = None
class P2SVpnServerConfiguration(SubResource):
"""P2SVpnServerConfiguration Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param name_properties_name: The name of the P2SVpnServerConfiguration that is unique within a
VirtualWan in a resource group. This name can be used to access the resource along with Paren
VirtualWan resource name.
:type name_properties_name: str
:param vpn_protocols: VPN protocols for the P2SVpnServerConfiguration.
:type vpn_protocols: list[str or
~azure.mgmt.network.v2019_04_01.models.VpnGatewayTunnelingProtocol]
:param p2_s_vpn_server_config_vpn_client_root_certificates: VPN client root certificate of
P2SVpnServerConfiguration.
:type p2_s_vpn_server_config_vpn_client_root_certificates:
list[~azure.mgmt.network.v2019_04_01.models.P2SVpnServerConfigVpnClientRootCertificate]
:param p2_s_vpn_server_config_vpn_client_revoked_certificates: VPN client revoked certificate
of P2SVpnServerConfiguration.
:type p2_s_vpn_server_config_vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2019_04_01.models.P2SVpnServerConfigVpnClientRevokedCertificate]
:param p2_s_vpn_server_config_radius_server_root_certificates: Radius Server root certificate
of P2SVpnServerConfiguration.
:type p2_s_vpn_server_config_radius_server_root_certificates:
list[~azure.mgmt.network.v2019_04_01.models.P2SVpnServerConfigRadiusServerRootCertificate]
:param p2_s_vpn_server_config_radius_client_root_certificates: Radius client root certificate
of P2SVpnServerConfiguration.
:type p2_s_vpn_server_config_radius_client_root_certificates:
list[~azure.mgmt.network.v2019_04_01.models.P2SVpnServerConfigRadiusClientRootCertificate]
:param vpn_client_ipsec_policies: VpnClientIpsecPolicies for P2SVpnServerConfiguration.
:type vpn_client_ipsec_policies: list[~azure.mgmt.network.v2019_04_01.models.IpsecPolicy]
:param radius_server_address: The radius server address property of the
P2SVpnServerConfiguration resource for point to site client connection.
:type radius_server_address: str
:param radius_server_secret: The radius secret property of the P2SVpnServerConfiguration
resource for point to site client connection.
:type radius_server_secret: str
:ivar provisioning_state: The provisioning state of the P2SVpnServerConfiguration resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:ivar p2_s_vpn_gateways: List of references to P2SVpnGateways.
:vartype p2_s_vpn_gateways: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param etag_properties_etag: A unique read-only string that changes whenever the resource is
updated.
:type etag_properties_etag: str
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'p2_s_vpn_gateways': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'vpn_protocols': {'key': 'properties.vpnProtocols', 'type': '[str]'},
'p2_s_vpn_server_config_vpn_client_root_certificates': {'key': 'properties.p2SVpnServerConfigVpnClientRootCertificates', 'type': '[P2SVpnServerConfigVpnClientRootCertificate]'},
'p2_s_vpn_server_config_vpn_client_revoked_certificates': {'key': 'properties.p2SVpnServerConfigVpnClientRevokedCertificates', 'type': '[P2SVpnServerConfigVpnClientRevokedCertificate]'},
'p2_s_vpn_server_config_radius_server_root_certificates': {'key': 'properties.p2SVpnServerConfigRadiusServerRootCertificates', 'type': '[P2SVpnServerConfigRadiusServerRootCertificate]'},
'p2_s_vpn_server_config_radius_client_root_certificates': {'key': 'properties.p2SVpnServerConfigRadiusClientRootCertificates', 'type': '[P2SVpnServerConfigRadiusClientRootCertificate]'},
'vpn_client_ipsec_policies': {'key': 'properties.vpnClientIpsecPolicies', 'type': '[IpsecPolicy]'},
'radius_server_address': {'key': 'properties.radiusServerAddress', 'type': 'str'},
'radius_server_secret': {'key': 'properties.radiusServerSecret', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'p2_s_vpn_gateways': {'key': 'properties.p2SVpnGateways', 'type': '[SubResource]'},
'etag_properties_etag': {'key': 'properties.etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnServerConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.name_properties_name = kwargs.get('name_properties_name', None)
self.vpn_protocols = kwargs.get('vpn_protocols', None)
self.p2_s_vpn_server_config_vpn_client_root_certificates = kwargs.get('p2_s_vpn_server_config_vpn_client_root_certificates', None)
self.p2_s_vpn_server_config_vpn_client_revoked_certificates = kwargs.get('p2_s_vpn_server_config_vpn_client_revoked_certificates', None)
self.p2_s_vpn_server_config_radius_server_root_certificates = kwargs.get('p2_s_vpn_server_config_radius_server_root_certificates', None)
self.p2_s_vpn_server_config_radius_client_root_certificates = kwargs.get('p2_s_vpn_server_config_radius_client_root_certificates', None)
self.vpn_client_ipsec_policies = kwargs.get('vpn_client_ipsec_policies', None)
self.radius_server_address = kwargs.get('radius_server_address', None)
self.radius_server_secret = kwargs.get('radius_server_secret', None)
self.provisioning_state = None
self.p2_s_vpn_gateways = None
self.etag_properties_etag = kwargs.get('etag_properties_etag', None)
class P2SVpnServerConfigVpnClientRevokedCertificate(SubResource):
"""VPN client revoked certificate of P2SVpnServerConfiguration.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param thumbprint: The revoked VPN client certificate thumbprint.
:type thumbprint: str
:ivar provisioning_state: The provisioning state of the VPN client revoked certificate
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnServerConfigVpnClientRevokedCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.thumbprint = kwargs.get('thumbprint', None)
self.provisioning_state = None
class P2SVpnServerConfigVpnClientRootCertificate(SubResource):
"""VPN client root certificate of P2SVpnServerConfiguration.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param public_cert_data: Required. The certificate public data.
:type public_cert_data: str
:ivar provisioning_state: The provisioning state of the P2SVpnServerConfiguration VPN client
root certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'public_cert_data': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'public_cert_data': {'key': 'properties.publicCertData', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnServerConfigVpnClientRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.public_cert_data = kwargs['public_cert_data']
self.provisioning_state = None
class PacketCapture(msrest.serialization.Model):
"""Parameters that define the create packet capture operation.
All required parameters must be populated in order to send to Azure.
:param target: Required. The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Required. Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_04_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_04_01.models.PacketCaptureFilter]
"""
_validation = {
'target': {'required': True},
'storage_location': {'required': True},
}
_attribute_map = {
'target': {'key': 'properties.target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'properties.bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'properties.totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'properties.timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'properties.storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'properties.filters', 'type': '[PacketCaptureFilter]'},
}
def __init__(
self,
**kwargs
):
super(PacketCapture, self).__init__(**kwargs)
self.target = kwargs['target']
self.bytes_to_capture_per_packet = kwargs.get('bytes_to_capture_per_packet', 0)
self.total_bytes_per_session = kwargs.get('total_bytes_per_session', 1073741824)
self.time_limit_in_seconds = kwargs.get('time_limit_in_seconds', 18000)
self.storage_location = kwargs['storage_location']
self.filters = kwargs.get('filters', None)
class PacketCaptureFilter(msrest.serialization.Model):
"""Filter that is applied to packet capture request. Multiple filters can be applied.
:param protocol: Protocol to be filtered on. Possible values include: "TCP", "UDP", "Any".
Default value: "Any".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.PcProtocol
:param local_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single
address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries.
Multiple ranges not currently supported. Mixing ranges with multiple entries not currently
supported. Default = null.
:type local_ip_address: str
:param remote_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single
address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple entries.
Multiple ranges not currently supported. Mixing ranges with multiple entries not currently
supported. Default = null.
:type remote_ip_address: str
:param local_port: Local port to be filtered on. Notation: "80" for single port entry."80-85"
for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing
ranges with multiple entries not currently supported. Default = null.
:type local_port: str
:param remote_port: Remote port to be filtered on. Notation: "80" for single port entry."80-85"
for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing
ranges with multiple entries not currently supported. Default = null.
:type remote_port: str
"""
_attribute_map = {
'protocol': {'key': 'protocol', 'type': 'str'},
'local_ip_address': {'key': 'localIPAddress', 'type': 'str'},
'remote_ip_address': {'key': 'remoteIPAddress', 'type': 'str'},
'local_port': {'key': 'localPort', 'type': 'str'},
'remote_port': {'key': 'remotePort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureFilter, self).__init__(**kwargs)
self.protocol = kwargs.get('protocol', "Any")
self.local_ip_address = kwargs.get('local_ip_address', None)
self.remote_ip_address = kwargs.get('remote_ip_address', None)
self.local_port = kwargs.get('local_port', None)
self.remote_port = kwargs.get('remote_port', None)
class PacketCaptureListResult(msrest.serialization.Model):
"""List of packet capture sessions.
:param value: Information about packet capture sessions.
:type value: list[~azure.mgmt.network.v2019_04_01.models.PacketCaptureResult]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PacketCaptureResult]'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class PacketCaptureParameters(msrest.serialization.Model):
"""Parameters that define the create packet capture operation.
All required parameters must be populated in order to send to Azure.
:param target: Required. The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Required. Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_04_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_04_01.models.PacketCaptureFilter]
"""
_validation = {
'target': {'required': True},
'storage_location': {'required': True},
}
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'filters', 'type': '[PacketCaptureFilter]'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureParameters, self).__init__(**kwargs)
self.target = kwargs['target']
self.bytes_to_capture_per_packet = kwargs.get('bytes_to_capture_per_packet', 0)
self.total_bytes_per_session = kwargs.get('total_bytes_per_session', 1073741824)
self.time_limit_in_seconds = kwargs.get('time_limit_in_seconds', 18000)
self.storage_location = kwargs['storage_location']
self.filters = kwargs.get('filters', None)
class PacketCaptureQueryStatusResult(msrest.serialization.Model):
"""Status of packet capture session.
:param name: The name of the packet capture resource.
:type name: str
:param id: The ID of the packet capture resource.
:type id: str
:param capture_start_time: The start time of the packet capture session.
:type capture_start_time: ~datetime.datetime
:param packet_capture_status: The status of the packet capture session. Possible values
include: "NotStarted", "Running", "Stopped", "Error", "Unknown".
:type packet_capture_status: str or ~azure.mgmt.network.v2019_04_01.models.PcStatus
:param stop_reason: The reason the current packet capture session was stopped.
:type stop_reason: str
:param packet_capture_error: List of errors of packet capture session.
:type packet_capture_error: list[str or ~azure.mgmt.network.v2019_04_01.models.PcError]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'capture_start_time': {'key': 'captureStartTime', 'type': 'iso-8601'},
'packet_capture_status': {'key': 'packetCaptureStatus', 'type': 'str'},
'stop_reason': {'key': 'stopReason', 'type': 'str'},
'packet_capture_error': {'key': 'packetCaptureError', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureQueryStatusResult, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.capture_start_time = kwargs.get('capture_start_time', None)
self.packet_capture_status = kwargs.get('packet_capture_status', None)
self.stop_reason = kwargs.get('stop_reason', None)
self.packet_capture_error = kwargs.get('packet_capture_error', None)
class PacketCaptureResult(msrest.serialization.Model):
"""Information about packet capture session.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of the packet capture session.
:vartype name: str
:ivar id: ID of the packet capture operation.
:vartype id: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param target: The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_04_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_04_01.models.PacketCaptureFilter]
:ivar provisioning_state: The provisioning state of the packet capture session. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'target': {'key': 'properties.target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'properties.bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'properties.totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'properties.timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'properties.storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'properties.filters', 'type': '[PacketCaptureFilter]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureResult, self).__init__(**kwargs)
self.name = None
self.id = None
self.etag = kwargs.get('etag', "A unique read-only string that changes whenever the resource is updated.")
self.target = kwargs.get('target', None)
self.bytes_to_capture_per_packet = kwargs.get('bytes_to_capture_per_packet', 0)
self.total_bytes_per_session = kwargs.get('total_bytes_per_session', 1073741824)
self.time_limit_in_seconds = kwargs.get('time_limit_in_seconds', 18000)
self.storage_location = kwargs.get('storage_location', None)
self.filters = kwargs.get('filters', None)
self.provisioning_state = None
class PacketCaptureResultProperties(PacketCaptureParameters):
"""Describes the properties of a packet capture session.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param target: Required. The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Required. Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_04_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_04_01.models.PacketCaptureFilter]
:ivar provisioning_state: The provisioning state of the packet capture session. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'target': {'required': True},
'storage_location': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'filters', 'type': '[PacketCaptureFilter]'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureResultProperties, self).__init__(**kwargs)
self.provisioning_state = None
class PacketCaptureStorageLocation(msrest.serialization.Model):
"""Describes the storage location for a packet capture session.
:param storage_id: The ID of the storage account to save the packet capture session. Required
if no local file path is provided.
:type storage_id: str
:param storage_path: The URI of the storage path to save the packet capture. Must be a
well-formed URI describing the location to save the packet capture.
:type storage_path: str
:param file_path: A valid local path on the targeting VM. Must include the name of the capture
file (*.cap). For linux virtual machine it must start with /var/captures. Required if no
storage ID is provided, otherwise optional.
:type file_path: str
"""
_attribute_map = {
'storage_id': {'key': 'storageId', 'type': 'str'},
'storage_path': {'key': 'storagePath', 'type': 'str'},
'file_path': {'key': 'filePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureStorageLocation, self).__init__(**kwargs)
self.storage_id = kwargs.get('storage_id', None)
self.storage_path = kwargs.get('storage_path', None)
self.file_path = kwargs.get('file_path', None)
class PatchRouteFilter(SubResource):
"""Route Filter Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param rules: Collection of RouteFilterRules contained within a route filter.
:type rules: list[~azure.mgmt.network.v2019_04_01.models.RouteFilterRule]
:param peerings: A collection of references to express route circuit peerings.
:type peerings: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeering]
:param ipv6_peerings: A collection of references to express route circuit ipv6 peerings.
:type ipv6_peerings: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeering]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'rules': {'key': 'properties.rules', 'type': '[RouteFilterRule]'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'},
'ipv6_peerings': {'key': 'properties.ipv6Peerings', 'type': '[ExpressRouteCircuitPeering]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PatchRouteFilter, self).__init__(**kwargs)
self.name = None
self.etag = None
self.type = None
self.tags = kwargs.get('tags', None)
self.rules = kwargs.get('rules', None)
self.peerings = kwargs.get('peerings', None)
self.ipv6_peerings = kwargs.get('ipv6_peerings', None)
self.provisioning_state = None
class PatchRouteFilterRule(SubResource):
"""Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param access: The access type of the rule. Possible values include: "Allow", "Deny".
:type access: str or ~azure.mgmt.network.v2019_04_01.models.Access
:param route_filter_rule_type: The rule type of the rule. Possible values include: "Community".
:type route_filter_rule_type: str or ~azure.mgmt.network.v2019_04_01.models.RouteFilterRuleType
:param communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:type communities: list[str]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'access': {'key': 'properties.access', 'type': 'str'},
'route_filter_rule_type': {'key': 'properties.routeFilterRuleType', 'type': 'str'},
'communities': {'key': 'properties.communities', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PatchRouteFilterRule, self).__init__(**kwargs)
self.name = None
self.etag = None
self.access = kwargs.get('access', None)
self.route_filter_rule_type = kwargs.get('route_filter_rule_type', None)
self.communities = kwargs.get('communities', None)
self.provisioning_state = None
class PeerExpressRouteCircuitConnection(SubResource):
"""Peer Express Route Circuit Connection in an ExpressRouteCircuitPeering resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the circuit.
:type express_route_circuit_peering: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param peer_express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the peered circuit.
:type peer_express_route_circuit_peering: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param address_prefix: /29 IP address space to carve out Customer addresses for tunnels.
:type address_prefix: str
:ivar circuit_connection_status: Express Route Circuit connection state. Possible values
include: "Connected", "Connecting", "Disconnected".
:vartype circuit_connection_status: str or
~azure.mgmt.network.v2019_04_01.models.CircuitConnectionStatus
:param connection_name: The name of the express route circuit connection resource.
:type connection_name: str
:param auth_resource_guid: The resource guid of the authorization used for the express route
circuit connection.
:type auth_resource_guid: str
:ivar provisioning_state: Provisioning state of the peer express route circuit connection
resource. Possible values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'circuit_connection_status': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'express_route_circuit_peering': {'key': 'properties.expressRouteCircuitPeering', 'type': 'SubResource'},
'peer_express_route_circuit_peering': {'key': 'properties.peerExpressRouteCircuitPeering', 'type': 'SubResource'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'circuit_connection_status': {'key': 'properties.circuitConnectionStatus', 'type': 'str'},
'connection_name': {'key': 'properties.connectionName', 'type': 'str'},
'auth_resource_guid': {'key': 'properties.authResourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PeerExpressRouteCircuitConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.express_route_circuit_peering = kwargs.get('express_route_circuit_peering', None)
self.peer_express_route_circuit_peering = kwargs.get('peer_express_route_circuit_peering', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.circuit_connection_status = None
self.connection_name = kwargs.get('connection_name', None)
self.auth_resource_guid = kwargs.get('auth_resource_guid', None)
self.provisioning_state = None
class PeerExpressRouteCircuitConnectionListResult(msrest.serialization.Model):
"""Response for ListPeeredConnections API service call retrieves all global reach peer circuit connections that belongs to a Private Peering for an ExpressRouteCircuit.
:param value: The global reach peer circuit connection associated with Private Peering in an
ExpressRoute Circuit.
:type value: list[~azure.mgmt.network.v2019_04_01.models.PeerExpressRouteCircuitConnection]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PeerExpressRouteCircuitConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PeerExpressRouteCircuitConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class PolicySettings(msrest.serialization.Model):
"""Defines contents of a web application firewall global configuration.
:param enabled_state: Describes if the policy is in enabled state or disabled state. Possible
values include: "Disabled", "Enabled".
:type enabled_state: str or
~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallEnabledState
:param mode: Describes if it is in detection mode or prevention mode at policy level. Possible
values include: "Prevention", "Detection".
:type mode: str or ~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallMode
"""
_attribute_map = {
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PolicySettings, self).__init__(**kwargs)
self.enabled_state = kwargs.get('enabled_state', None)
self.mode = kwargs.get('mode', None)
class PrepareNetworkPoliciesRequest(msrest.serialization.Model):
"""Details of PrepareNetworkPolicies for Subnet.
:param service_name: The name of the service for which subnet is being prepared for.
:type service_name: str
:param resource_group_name: The name of the resource group where the Network Intent Policy will
be stored.
:type resource_group_name: str
:param network_intent_policy_configurations: A list of NetworkIntentPolicyConfiguration.
:type network_intent_policy_configurations:
list[~azure.mgmt.network.v2019_04_01.models.NetworkIntentPolicyConfiguration]
"""
_attribute_map = {
'service_name': {'key': 'serviceName', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'network_intent_policy_configurations': {'key': 'networkIntentPolicyConfigurations', 'type': '[NetworkIntentPolicyConfiguration]'},
}
def __init__(
self,
**kwargs
):
super(PrepareNetworkPoliciesRequest, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
self.resource_group_name = kwargs.get('resource_group_name', None)
self.network_intent_policy_configurations = kwargs.get('network_intent_policy_configurations', None)
class PrivateEndpoint(Resource):
"""Private endpoint resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param subnet: The ID of the subnet from which the private IP will be allocated.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.Subnet
:ivar network_interfaces: Gets an array of references to the network interfaces created for
this private endpoint.
:vartype network_interfaces: list[~azure.mgmt.network.v2019_04_01.models.NetworkInterface]
:ivar provisioning_state: The provisioning state of the private endpoint. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param private_link_service_connections: A grouping of information about the connection to the
remote resource.
:type private_link_service_connections:
list[~azure.mgmt.network.v2019_04_01.models.PrivateLinkServiceConnection]
:param manual_private_link_service_connections: A grouping of information about the connection
to the remote resource. Used when the network admin does not have access to approve connections
to the remote resource.
:type manual_private_link_service_connections:
list[~azure.mgmt.network.v2019_04_01.models.PrivateLinkServiceConnection]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'network_interfaces': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_link_service_connections': {'key': 'properties.privateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'},
'manual_private_link_service_connections': {'key': 'properties.manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpoint, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.subnet = kwargs.get('subnet', None)
self.network_interfaces = None
self.provisioning_state = None
self.private_link_service_connections = kwargs.get('private_link_service_connections', None)
self.manual_private_link_service_connections = kwargs.get('manual_private_link_service_connections', None)
class PrivateEndpointConnection(SubResource):
"""PrivateEndpointConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar type: The resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param private_endpoint: The resource of private end point.
:type private_endpoint: ~azure.mgmt.network.v2019_04_01.models.PrivateEndpoint
:param private_link_service_connection_state: A collection of information about the state of
the connection between service consumer and provider.
:type private_link_service_connection_state:
~azure.mgmt.network.v2019_04_01.models.PrivateLinkServiceConnectionState
:ivar provisioning_state: The provisioning state of the private endpoint connection. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpointConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.private_endpoint = kwargs.get('private_endpoint', None)
self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
self.provisioning_state = None
class PrivateEndpointListResult(msrest.serialization.Model):
"""Response for the ListPrivateEndpoints API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: Gets a list of private endpoint resources in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.PrivateEndpoint]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateEndpoint]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpointListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class PrivateLinkService(Resource):
"""Private link service resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param load_balancer_frontend_ip_configurations: An array of references to the load balancer IP
configurations.
:type load_balancer_frontend_ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.FrontendIPConfiguration]
:param ip_configurations: An array of references to the private link service IP configuration.
:type ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.PrivateLinkServiceIpConfiguration]
:ivar network_interfaces: Gets an array of references to the network interfaces created for
this private link service.
:vartype network_interfaces: list[~azure.mgmt.network.v2019_04_01.models.NetworkInterface]
:ivar provisioning_state: The provisioning state of the private link service. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param private_endpoint_connections: An array of list about connections to the private
endpoint.
:type private_endpoint_connections:
list[~azure.mgmt.network.v2019_04_01.models.PrivateEndpointConnection]
:param visibility: The visibility list of the private link service.
:type visibility: ~azure.mgmt.network.v2019_04_01.models.PrivateLinkServicePropertiesVisibility
:param auto_approval: The auto-approval list of the private link service.
:type auto_approval:
~azure.mgmt.network.v2019_04_01.models.PrivateLinkServicePropertiesAutoApproval
:param fqdns: The list of Fqdn.
:type fqdns: list[str]
:ivar alias: The alias of the private link service.
:vartype alias: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'network_interfaces': {'readonly': True},
'provisioning_state': {'readonly': True},
'alias': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'load_balancer_frontend_ip_configurations': {'key': 'properties.loadBalancerFrontendIpConfigurations', 'type': '[FrontendIPConfiguration]'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[PrivateLinkServiceIpConfiguration]'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
'visibility': {'key': 'properties.visibility', 'type': 'PrivateLinkServicePropertiesVisibility'},
'auto_approval': {'key': 'properties.autoApproval', 'type': 'PrivateLinkServicePropertiesAutoApproval'},
'fqdns': {'key': 'properties.fqdns', 'type': '[str]'},
'alias': {'key': 'properties.alias', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkService, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.load_balancer_frontend_ip_configurations = kwargs.get('load_balancer_frontend_ip_configurations', None)
self.ip_configurations = kwargs.get('ip_configurations', None)
self.network_interfaces = None
self.provisioning_state = None
self.private_endpoint_connections = kwargs.get('private_endpoint_connections', None)
self.visibility = kwargs.get('visibility', None)
self.auto_approval = kwargs.get('auto_approval', None)
self.fqdns = kwargs.get('fqdns', None)
self.alias = None
class PrivateLinkServiceConnection(SubResource):
"""PrivateLinkServiceConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar type: The resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the private link service connection.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param private_link_service_id: The resource id of private link service.
:type private_link_service_id: str
:param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private
endpoint should connect to.
:type group_ids: list[str]
:param request_message: A message passed to the owner of the remote resource with this
connection request. Restricted to 140 chars.
:type request_message: str
:param private_link_service_connection_state: A collection of read-only information about the
state of the connection to the remote resource.
:type private_link_service_connection_state:
~azure.mgmt.network.v2019_04_01.models.PrivateLinkServiceConnectionState
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'},
'group_ids': {'key': 'properties.groupIds', 'type': '[str]'},
'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.provisioning_state = None
self.private_link_service_id = kwargs.get('private_link_service_id', None)
self.group_ids = kwargs.get('group_ids', None)
self.request_message = kwargs.get('request_message', None)
self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
class PrivateLinkServiceConnectionState(msrest.serialization.Model):
"""A collection of information about the state of the connection between service consumer and provider.
:param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
of the service.
:type status: str
:param description: The reason for approval/rejection of the connection.
:type description: str
:param actions_required: A message indicating if changes on the service provider require any
updates on the consumer.
:type actions_required: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'actions_required': {'key': 'actionsRequired', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.actions_required = kwargs.get('actions_required', None)
class PrivateLinkServiceIpConfiguration(SubResource):
"""The private link service ip configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of private link service ip configuration.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: The resource type.
:vartype type: str
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.Subnet
:param primary: Whether the ip configuration is primary or not.
:type primary: bool
:ivar provisioning_state: The provisioning state of the private link service ip configuration.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param private_ip_address_version: Available from Api-Version 2016-03-30 onwards, it represents
whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values
include: "IPv4", "IPv6".
:type private_ip_address_version: str or ~azure.mgmt.network.v2019_04_01.models.IPVersion
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_ip_address_version': {'key': 'properties.privateIPAddressVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceIpConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.primary = kwargs.get('primary', None)
self.provisioning_state = None
self.private_ip_address_version = kwargs.get('private_ip_address_version', None)
class PrivateLinkServiceListResult(msrest.serialization.Model):
"""Response for the ListPrivateLinkService API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: Gets a list of PrivateLinkService resources in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.PrivateLinkService]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateLinkService]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ResourceSet(msrest.serialization.Model):
"""The base resource set for visibility and auto-approval.
:param subscriptions: The list of subscriptions.
:type subscriptions: list[str]
"""
_attribute_map = {
'subscriptions': {'key': 'subscriptions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ResourceSet, self).__init__(**kwargs)
self.subscriptions = kwargs.get('subscriptions', None)
class PrivateLinkServicePropertiesAutoApproval(ResourceSet):
"""The auto-approval list of the private link service.
:param subscriptions: The list of subscriptions.
:type subscriptions: list[str]
"""
_attribute_map = {
'subscriptions': {'key': 'subscriptions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServicePropertiesAutoApproval, self).__init__(**kwargs)
class PrivateLinkServicePropertiesVisibility(ResourceSet):
"""The visibility list of the private link service.
:param subscriptions: The list of subscriptions.
:type subscriptions: list[str]
"""
_attribute_map = {
'subscriptions': {'key': 'subscriptions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServicePropertiesVisibility, self).__init__(**kwargs)
class PrivateLinkServiceVisibility(msrest.serialization.Model):
"""Response for the CheckPrivateLinkServiceVisibility API service call.
:param visible: Private Link Service Visibility (True/False).
:type visible: bool
"""
_attribute_map = {
'visible': {'key': 'visible', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceVisibility, self).__init__(**kwargs)
self.visible = kwargs.get('visible', None)
class Probe(SubResource):
"""A load balancer probe.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar load_balancing_rules: The load balancer rules that use this probe.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param protocol: The protocol of the end point. If 'Tcp' is specified, a received ACK is
required for the probe to be successful. If 'Http' or 'Https' is specified, a 200 OK response
from the specifies URI is required for the probe to be successful. Possible values include:
"Http", "Tcp", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.ProbeProtocol
:param port: The port for communicating the probe. Possible values range from 1 to 65535,
inclusive.
:type port: int
:param interval_in_seconds: The interval, in seconds, for how frequently to probe the endpoint
for health status. Typically, the interval is slightly less than half the allocated timeout
period (in seconds) which allows two full probes before taking the instance out of rotation.
The default value is 15, the minimum value is 5.
:type interval_in_seconds: int
:param number_of_probes: The number of probes where if no response, will result in stopping
further traffic from being delivered to the endpoint. This values allows endpoints to be taken
out of rotation faster or slower than the typical times used in Azure.
:type number_of_probes: int
:param request_path: The URI used for requesting health status from the VM. Path is required if
a protocol is set to http. Otherwise, it is not allowed. There is no default value.
:type request_path: str
:param provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'load_balancing_rules': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'interval_in_seconds': {'key': 'properties.intervalInSeconds', 'type': 'int'},
'number_of_probes': {'key': 'properties.numberOfProbes', 'type': 'int'},
'request_path': {'key': 'properties.requestPath', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Probe, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.load_balancing_rules = None
self.protocol = kwargs.get('protocol', None)
self.port = kwargs.get('port', None)
self.interval_in_seconds = kwargs.get('interval_in_seconds', None)
self.number_of_probes = kwargs.get('number_of_probes', None)
self.request_path = kwargs.get('request_path', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ProtocolConfiguration(msrest.serialization.Model):
"""Configuration of the protocol.
:param http_configuration: HTTP configuration of the connectivity check.
:type http_configuration: ~azure.mgmt.network.v2019_04_01.models.HTTPConfiguration
"""
_attribute_map = {
'http_configuration': {'key': 'HTTPConfiguration', 'type': 'HTTPConfiguration'},
}
def __init__(
self,
**kwargs
):
super(ProtocolConfiguration, self).__init__(**kwargs)
self.http_configuration = kwargs.get('http_configuration', None)
class ProtocolCustomSettingsFormat(msrest.serialization.Model):
"""DDoS custom policy properties.
:param protocol: The protocol for which the DDoS protection policy is being customized.
Possible values include: "Tcp", "Udp", "Syn".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.DdosCustomPolicyProtocol
:param trigger_rate_override: The customized DDoS protection trigger rate.
:type trigger_rate_override: str
:param source_rate_override: The customized DDoS protection source rate.
:type source_rate_override: str
:param trigger_sensitivity_override: The customized DDoS protection trigger rate sensitivity
degrees. High: Trigger rate set with most sensitivity w.r.t. normal traffic. Default: Trigger
rate set with moderate sensitivity w.r.t. normal traffic. Low: Trigger rate set with less
sensitivity w.r.t. normal traffic. Relaxed: Trigger rate set with least sensitivity w.r.t.
normal traffic. Possible values include: "Relaxed", "Low", "Default", "High".
:type trigger_sensitivity_override: str or
~azure.mgmt.network.v2019_04_01.models.DdosCustomPolicyTriggerSensitivityOverride
"""
_attribute_map = {
'protocol': {'key': 'protocol', 'type': 'str'},
'trigger_rate_override': {'key': 'triggerRateOverride', 'type': 'str'},
'source_rate_override': {'key': 'sourceRateOverride', 'type': 'str'},
'trigger_sensitivity_override': {'key': 'triggerSensitivityOverride', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProtocolCustomSettingsFormat, self).__init__(**kwargs)
self.protocol = kwargs.get('protocol', None)
self.trigger_rate_override = kwargs.get('trigger_rate_override', None)
self.source_rate_override = kwargs.get('source_rate_override', None)
self.trigger_sensitivity_override = kwargs.get('trigger_sensitivity_override', None)
class PublicIPAddress(Resource):
"""Public IP address resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The public IP address SKU.
:type sku: ~azure.mgmt.network.v2019_04_01.models.PublicIPAddressSku
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param zones: A list of availability zones denoting the IP allocated for the resource needs to
come from.
:type zones: list[str]
:param public_ip_allocation_method: The public IP address allocation method. Possible values
include: "Static", "Dynamic".
:type public_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
:param public_ip_address_version: The public IP address version. Possible values include:
"IPv4", "IPv6".
:type public_ip_address_version: str or ~azure.mgmt.network.v2019_04_01.models.IPVersion
:ivar ip_configuration: The IP configuration associated with the public IP address.
:vartype ip_configuration: ~azure.mgmt.network.v2019_04_01.models.IPConfiguration
:param dns_settings: The FQDN of the DNS record associated with the public IP address.
:type dns_settings: ~azure.mgmt.network.v2019_04_01.models.PublicIPAddressDnsSettings
:param ddos_settings: The DDoS protection custom policy associated with the public IP address.
:type ddos_settings: ~azure.mgmt.network.v2019_04_01.models.DdosSettings
:param ip_tags: The list of tags associated with the public IP address.
:type ip_tags: list[~azure.mgmt.network.v2019_04_01.models.IpTag]
:param ip_address: The IP address associated with the public IP address resource.
:type ip_address: str
:param public_ip_prefix: The Public IP Prefix this Public IP Address should be allocated from.
:type public_ip_prefix: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param idle_timeout_in_minutes: The idle timeout of the public IP address.
:type idle_timeout_in_minutes: int
:param resource_guid: The resource GUID property of the public IP resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'ip_configuration': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'PublicIPAddressSku'},
'etag': {'key': 'etag', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'public_ip_allocation_method': {'key': 'properties.publicIPAllocationMethod', 'type': 'str'},
'public_ip_address_version': {'key': 'properties.publicIPAddressVersion', 'type': 'str'},
'ip_configuration': {'key': 'properties.ipConfiguration', 'type': 'IPConfiguration'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'PublicIPAddressDnsSettings'},
'ddos_settings': {'key': 'properties.ddosSettings', 'type': 'DdosSettings'},
'ip_tags': {'key': 'properties.ipTags', 'type': '[IpTag]'},
'ip_address': {'key': 'properties.ipAddress', 'type': 'str'},
'public_ip_prefix': {'key': 'properties.publicIPPrefix', 'type': 'SubResource'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddress, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = kwargs.get('etag', None)
self.zones = kwargs.get('zones', None)
self.public_ip_allocation_method = kwargs.get('public_ip_allocation_method', None)
self.public_ip_address_version = kwargs.get('public_ip_address_version', None)
self.ip_configuration = None
self.dns_settings = kwargs.get('dns_settings', None)
self.ddos_settings = kwargs.get('ddos_settings', None)
self.ip_tags = kwargs.get('ip_tags', None)
self.ip_address = kwargs.get('ip_address', None)
self.public_ip_prefix = kwargs.get('public_ip_prefix', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class PublicIPAddressDnsSettings(msrest.serialization.Model):
"""Contains FQDN of the DNS record associated with the public IP address.
:param domain_name_label: Gets or sets the Domain name label.The concatenation of the domain
name label and the regionalized DNS zone make up the fully qualified domain name associated
with the public IP address. If a domain name label is specified, an A DNS record is created for
the public IP in the Microsoft Azure DNS system.
:type domain_name_label: str
:param fqdn: Gets the FQDN, Fully qualified domain name of the A DNS record associated with the
public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone.
:type fqdn: str
:param reverse_fqdn: Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name
that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record
is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
:type reverse_fqdn: str
"""
_attribute_map = {
'domain_name_label': {'key': 'domainNameLabel', 'type': 'str'},
'fqdn': {'key': 'fqdn', 'type': 'str'},
'reverse_fqdn': {'key': 'reverseFqdn', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddressDnsSettings, self).__init__(**kwargs)
self.domain_name_label = kwargs.get('domain_name_label', None)
self.fqdn = kwargs.get('fqdn', None)
self.reverse_fqdn = kwargs.get('reverse_fqdn', None)
class PublicIPAddressListResult(msrest.serialization.Model):
"""Response for ListPublicIpAddresses API service call.
:param value: A list of public IP addresses that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.PublicIPAddress]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PublicIPAddress]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddressListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class PublicIPAddressSku(msrest.serialization.Model):
"""SKU of a public IP address.
:param name: Name of a public IP address SKU. Possible values include: "Basic", "Standard".
:type name: str or ~azure.mgmt.network.v2019_04_01.models.PublicIPAddressSkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddressSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class PublicIPPrefix(Resource):
"""Public IP prefix resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The public IP prefix SKU.
:type sku: ~azure.mgmt.network.v2019_04_01.models.PublicIPPrefixSku
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param zones: A list of availability zones denoting the IP allocated for the resource needs to
come from.
:type zones: list[str]
:param public_ip_address_version: The public IP address version. Possible values include:
"IPv4", "IPv6".
:type public_ip_address_version: str or ~azure.mgmt.network.v2019_04_01.models.IPVersion
:param ip_tags: The list of tags associated with the public IP prefix.
:type ip_tags: list[~azure.mgmt.network.v2019_04_01.models.IpTag]
:param prefix_length: The Length of the Public IP Prefix.
:type prefix_length: int
:param ip_prefix: The allocated Prefix.
:type ip_prefix: str
:param public_ip_addresses: The list of all referenced PublicIPAddresses.
:type public_ip_addresses:
list[~azure.mgmt.network.v2019_04_01.models.ReferencedPublicIpAddress]
:ivar load_balancer_frontend_ip_configuration: The reference to load balancer frontend IP
configuration associated with the public IP prefix.
:vartype load_balancer_frontend_ip_configuration:
~azure.mgmt.network.v2019_04_01.models.SubResource
:param resource_guid: The resource GUID property of the public IP prefix resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the Public IP prefix resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'load_balancer_frontend_ip_configuration': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'PublicIPPrefixSku'},
'etag': {'key': 'etag', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'public_ip_address_version': {'key': 'properties.publicIPAddressVersion', 'type': 'str'},
'ip_tags': {'key': 'properties.ipTags', 'type': '[IpTag]'},
'prefix_length': {'key': 'properties.prefixLength', 'type': 'int'},
'ip_prefix': {'key': 'properties.ipPrefix', 'type': 'str'},
'public_ip_addresses': {'key': 'properties.publicIPAddresses', 'type': '[ReferencedPublicIpAddress]'},
'load_balancer_frontend_ip_configuration': {'key': 'properties.loadBalancerFrontendIpConfiguration', 'type': 'SubResource'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPPrefix, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = kwargs.get('etag', None)
self.zones = kwargs.get('zones', None)
self.public_ip_address_version = kwargs.get('public_ip_address_version', None)
self.ip_tags = kwargs.get('ip_tags', None)
self.prefix_length = kwargs.get('prefix_length', None)
self.ip_prefix = kwargs.get('ip_prefix', None)
self.public_ip_addresses = kwargs.get('public_ip_addresses', None)
self.load_balancer_frontend_ip_configuration = None
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class PublicIPPrefixListResult(msrest.serialization.Model):
"""Response for ListPublicIpPrefixes API service call.
:param value: A list of public IP prefixes that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.PublicIPPrefix]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PublicIPPrefix]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPPrefixListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class PublicIPPrefixSku(msrest.serialization.Model):
"""SKU of a public IP prefix.
:param name: Name of a public IP prefix SKU. Possible values include: "Standard".
:type name: str or ~azure.mgmt.network.v2019_04_01.models.PublicIPPrefixSkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPPrefixSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class QueryTroubleshootingParameters(msrest.serialization.Model):
"""Parameters that define the resource to query the troubleshooting result.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The target resource ID to query the troubleshooting
result.
:type target_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(QueryTroubleshootingParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
class ReferencedPublicIpAddress(msrest.serialization.Model):
"""Reference to a public IP address.
:param id: The PublicIPAddress Reference.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ReferencedPublicIpAddress, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ResourceNavigationLink(SubResource):
"""ResourceNavigationLink resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Resource type.
:vartype type: str
:param linked_resource_type: Resource type of the linked resource.
:type linked_resource_type: str
:param link: Link to the external resource.
:type link: str
:ivar provisioning_state: Provisioning state of the ResourceNavigationLink resource.
:vartype provisioning_state: str
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'linked_resource_type': {'key': 'properties.linkedResourceType', 'type': 'str'},
'link': {'key': 'properties.link', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceNavigationLink, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.linked_resource_type = kwargs.get('linked_resource_type', None)
self.link = kwargs.get('link', None)
self.provisioning_state = None
class ResourceNavigationLinksListResult(msrest.serialization.Model):
"""Response for ResourceNavigationLinks_List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: The resource navigation links in a subnet.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ResourceNavigationLink]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceNavigationLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceNavigationLinksListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class RetentionPolicyParameters(msrest.serialization.Model):
"""Parameters that define the retention policy for flow log.
:param days: Number of days to retain flow log records.
:type days: int
:param enabled: Flag to enable/disable retention.
:type enabled: bool
"""
_attribute_map = {
'days': {'key': 'days', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(RetentionPolicyParameters, self).__init__(**kwargs)
self.days = kwargs.get('days', 0)
self.enabled = kwargs.get('enabled', False)
class Route(SubResource):
"""Route resource.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param address_prefix: The destination CIDR to which the route applies.
:type address_prefix: str
:param next_hop_type: The type of Azure hop the packet should be sent to. Possible values
include: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", "None".
:type next_hop_type: str or ~azure.mgmt.network.v2019_04_01.models.RouteNextHopType
:param next_hop_ip_address: The IP address packets should be forwarded to. Next hop values are
only allowed in routes where the next hop type is VirtualAppliance.
:type next_hop_ip_address: str
:param provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'next_hop_type': {'key': 'properties.nextHopType', 'type': 'str'},
'next_hop_ip_address': {'key': 'properties.nextHopIpAddress', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Route, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.next_hop_type = kwargs.get('next_hop_type', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class RouteFilter(Resource):
"""Route Filter Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param rules: Collection of RouteFilterRules contained within a route filter.
:type rules: list[~azure.mgmt.network.v2019_04_01.models.RouteFilterRule]
:param peerings: A collection of references to express route circuit peerings.
:type peerings: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeering]
:param ipv6_peerings: A collection of references to express route circuit ipv6 peerings.
:type ipv6_peerings: list[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitPeering]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'rules': {'key': 'properties.rules', 'type': '[RouteFilterRule]'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'},
'ipv6_peerings': {'key': 'properties.ipv6Peerings', 'type': '[ExpressRouteCircuitPeering]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilter, self).__init__(**kwargs)
self.etag = None
self.rules = kwargs.get('rules', None)
self.peerings = kwargs.get('peerings', None)
self.ipv6_peerings = kwargs.get('ipv6_peerings', None)
self.provisioning_state = None
class RouteFilterListResult(msrest.serialization.Model):
"""Response for the ListRouteFilters API service call.
:param value: Gets a list of route filters in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.RouteFilter]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[RouteFilter]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilterListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class RouteFilterRule(SubResource):
"""Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param location: Resource location.
:type location: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param access: The access type of the rule. Possible values include: "Allow", "Deny".
:type access: str or ~azure.mgmt.network.v2019_04_01.models.Access
:param route_filter_rule_type: The rule type of the rule. Possible values include: "Community".
:type route_filter_rule_type: str or ~azure.mgmt.network.v2019_04_01.models.RouteFilterRuleType
:param communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:type communities: list[str]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'access': {'key': 'properties.access', 'type': 'str'},
'route_filter_rule_type': {'key': 'properties.routeFilterRuleType', 'type': 'str'},
'communities': {'key': 'properties.communities', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilterRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.location = kwargs.get('location', None)
self.etag = None
self.access = kwargs.get('access', None)
self.route_filter_rule_type = kwargs.get('route_filter_rule_type', None)
self.communities = kwargs.get('communities', None)
self.provisioning_state = None
class RouteFilterRuleListResult(msrest.serialization.Model):
"""Response for the ListRouteFilterRules API service call.
:param value: Gets a list of RouteFilterRules in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.RouteFilterRule]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[RouteFilterRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilterRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class RouteListResult(msrest.serialization.Model):
"""Response for the ListRoute API service call.
:param value: Gets a list of routes in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.Route]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Route]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class RouteTable(Resource):
"""Route table resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param routes: Collection of routes contained within a route table.
:type routes: list[~azure.mgmt.network.v2019_04_01.models.Route]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2019_04_01.models.Subnet]
:param disable_bgp_route_propagation: Gets or sets whether to disable the routes learned by BGP
on that route table. True means disable.
:type disable_bgp_route_propagation: bool
:param provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'subnets': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'routes': {'key': 'properties.routes', 'type': '[Route]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'disable_bgp_route_propagation': {'key': 'properties.disableBgpRoutePropagation', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteTable, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.routes = kwargs.get('routes', None)
self.subnets = None
self.disable_bgp_route_propagation = kwargs.get('disable_bgp_route_propagation', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class RouteTableListResult(msrest.serialization.Model):
"""Response for the ListRouteTable API service call.
:param value: Gets a list of route tables in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.RouteTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[RouteTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteTableListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class SecurityGroupNetworkInterface(msrest.serialization.Model):
"""Network interface and all its associated security rules.
:param id: ID of the network interface.
:type id: str
:param security_rule_associations: All security rules associated with the network interface.
:type security_rule_associations:
~azure.mgmt.network.v2019_04_01.models.SecurityRuleAssociations
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rule_associations': {'key': 'securityRuleAssociations', 'type': 'SecurityRuleAssociations'},
}
def __init__(
self,
**kwargs
):
super(SecurityGroupNetworkInterface, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.security_rule_associations = kwargs.get('security_rule_associations', None)
class SecurityGroupViewParameters(msrest.serialization.Model):
"""Parameters that define the VM to check security groups for.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. ID of the target VM.
:type target_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SecurityGroupViewParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
class SecurityGroupViewResult(msrest.serialization.Model):
"""The information about security rules applied to the specified VM.
:param network_interfaces: List of network interfaces on the specified VM.
:type network_interfaces:
list[~azure.mgmt.network.v2019_04_01.models.SecurityGroupNetworkInterface]
"""
_attribute_map = {
'network_interfaces': {'key': 'networkInterfaces', 'type': '[SecurityGroupNetworkInterface]'},
}
def __init__(
self,
**kwargs
):
super(SecurityGroupViewResult, self).__init__(**kwargs)
self.network_interfaces = kwargs.get('network_interfaces', None)
class SecurityRule(SubResource):
"""Network security rule.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param description: A description for this rule. Restricted to 140 chars.
:type description: str
:param protocol: Network protocol this rule applies to. Possible values include: "Tcp", "Udp",
"Icmp", "Esp", "*".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.SecurityRuleProtocol
:param source_port_range: The source port or range. Integer or range between 0 and 65535.
Asterisk '*' can also be used to match all ports.
:type source_port_range: str
:param destination_port_range: The destination port or range. Integer or range between 0 and
65535. Asterisk '*' can also be used to match all ports.
:type destination_port_range: str
:param source_address_prefix: The CIDR or source IP range. Asterisk '*' can also be used to
match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet'
can also be used. If this is an ingress rule, specifies where network traffic originates from.
:type source_address_prefix: str
:param source_address_prefixes: The CIDR or source IP ranges.
:type source_address_prefixes: list[str]
:param source_application_security_groups: The application security group specified as source.
:type source_application_security_groups:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationSecurityGroup]
:param destination_address_prefix: The destination address prefix. CIDR or destination IP
range. Asterisk '*' can also be used to match all source IPs. Default tags such as
'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
:type destination_address_prefix: str
:param destination_address_prefixes: The destination address prefixes. CIDR or destination IP
ranges.
:type destination_address_prefixes: list[str]
:param destination_application_security_groups: The application security group specified as
destination.
:type destination_application_security_groups:
list[~azure.mgmt.network.v2019_04_01.models.ApplicationSecurityGroup]
:param source_port_ranges: The source port ranges.
:type source_port_ranges: list[str]
:param destination_port_ranges: The destination port ranges.
:type destination_port_ranges: list[str]
:param access: The network traffic is allowed or denied. Possible values include: "Allow",
"Deny".
:type access: str or ~azure.mgmt.network.v2019_04_01.models.SecurityRuleAccess
:param priority: The priority of the rule. The value can be between 100 and 4096. The priority
number must be unique for each rule in the collection. The lower the priority number, the
higher the priority of the rule.
:type priority: int
:param direction: The direction of the rule. The direction specifies if rule will be evaluated
on incoming or outgoing traffic. Possible values include: "Inbound", "Outbound".
:type direction: str or ~azure.mgmt.network.v2019_04_01.models.SecurityRuleDirection
:param provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'source_port_range': {'key': 'properties.sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'properties.destinationPortRange', 'type': 'str'},
'source_address_prefix': {'key': 'properties.sourceAddressPrefix', 'type': 'str'},
'source_address_prefixes': {'key': 'properties.sourceAddressPrefixes', 'type': '[str]'},
'source_application_security_groups': {'key': 'properties.sourceApplicationSecurityGroups', 'type': '[ApplicationSecurityGroup]'},
'destination_address_prefix': {'key': 'properties.destinationAddressPrefix', 'type': 'str'},
'destination_address_prefixes': {'key': 'properties.destinationAddressPrefixes', 'type': '[str]'},
'destination_application_security_groups': {'key': 'properties.destinationApplicationSecurityGroups', 'type': '[ApplicationSecurityGroup]'},
'source_port_ranges': {'key': 'properties.sourcePortRanges', 'type': '[str]'},
'destination_port_ranges': {'key': 'properties.destinationPortRanges', 'type': '[str]'},
'access': {'key': 'properties.access', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'direction': {'key': 'properties.direction', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SecurityRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.description = kwargs.get('description', None)
self.protocol = kwargs.get('protocol', None)
self.source_port_range = kwargs.get('source_port_range', None)
self.destination_port_range = kwargs.get('destination_port_range', None)
self.source_address_prefix = kwargs.get('source_address_prefix', None)
self.source_address_prefixes = kwargs.get('source_address_prefixes', None)
self.source_application_security_groups = kwargs.get('source_application_security_groups', None)
self.destination_address_prefix = kwargs.get('destination_address_prefix', None)
self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None)
self.destination_application_security_groups = kwargs.get('destination_application_security_groups', None)
self.source_port_ranges = kwargs.get('source_port_ranges', None)
self.destination_port_ranges = kwargs.get('destination_port_ranges', None)
self.access = kwargs.get('access', None)
self.priority = kwargs.get('priority', None)
self.direction = kwargs.get('direction', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class SecurityRuleAssociations(msrest.serialization.Model):
"""All security rules associated with the network interface.
:param network_interface_association: Network interface and it's custom security rules.
:type network_interface_association:
~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceAssociation
:param subnet_association: Subnet and it's custom security rules.
:type subnet_association: ~azure.mgmt.network.v2019_04_01.models.SubnetAssociation
:param default_security_rules: Collection of default security rules of the network security
group.
:type default_security_rules: list[~azure.mgmt.network.v2019_04_01.models.SecurityRule]
:param effective_security_rules: Collection of effective security rules.
:type effective_security_rules:
list[~azure.mgmt.network.v2019_04_01.models.EffectiveNetworkSecurityRule]
"""
_attribute_map = {
'network_interface_association': {'key': 'networkInterfaceAssociation', 'type': 'NetworkInterfaceAssociation'},
'subnet_association': {'key': 'subnetAssociation', 'type': 'SubnetAssociation'},
'default_security_rules': {'key': 'defaultSecurityRules', 'type': '[SecurityRule]'},
'effective_security_rules': {'key': 'effectiveSecurityRules', 'type': '[EffectiveNetworkSecurityRule]'},
}
def __init__(
self,
**kwargs
):
super(SecurityRuleAssociations, self).__init__(**kwargs)
self.network_interface_association = kwargs.get('network_interface_association', None)
self.subnet_association = kwargs.get('subnet_association', None)
self.default_security_rules = kwargs.get('default_security_rules', None)
self.effective_security_rules = kwargs.get('effective_security_rules', None)
class SecurityRuleListResult(msrest.serialization.Model):
"""Response for ListSecurityRule API service call. Retrieves all security rules that belongs to a network security group.
:param value: The security rules in a network security group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.SecurityRule]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[SecurityRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SecurityRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ServiceAssociationLink(SubResource):
"""ServiceAssociationLink resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param type: Resource type.
:type type: str
:param linked_resource_type: Resource type of the linked resource.
:type linked_resource_type: str
:param link: Link to the external resource.
:type link: str
:ivar provisioning_state: Provisioning state of the ServiceAssociationLink resource.
:vartype provisioning_state: str
:param allow_delete: If true, the resource can be deleted.
:type allow_delete: bool
:param locations: A list of locations.
:type locations: list[str]
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'linked_resource_type': {'key': 'properties.linkedResourceType', 'type': 'str'},
'link': {'key': 'properties.link', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'allow_delete': {'key': 'properties.allowDelete', 'type': 'bool'},
'locations': {'key': 'properties.locations', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ServiceAssociationLink, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = kwargs.get('type', None)
self.linked_resource_type = kwargs.get('linked_resource_type', None)
self.link = kwargs.get('link', None)
self.provisioning_state = None
self.allow_delete = kwargs.get('allow_delete', None)
self.locations = kwargs.get('locations', None)
class ServiceAssociationLinksListResult(msrest.serialization.Model):
"""Response for ServiceAssociationLinks_List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: The service association links in a subnet.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ServiceAssociationLink]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ServiceAssociationLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceAssociationLinksListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ServiceEndpointPolicy(Resource):
"""Service End point policy resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param service_endpoint_policy_definitions: A collection of service endpoint policy definitions
of the service endpoint policy.
:type service_endpoint_policy_definitions:
list[~azure.mgmt.network.v2019_04_01.models.ServiceEndpointPolicyDefinition]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2019_04_01.models.Subnet]
:ivar resource_guid: The resource GUID property of the service endpoint policy resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the service endpoint policy. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'subnets': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'service_endpoint_policy_definitions': {'key': 'properties.serviceEndpointPolicyDefinitions', 'type': '[ServiceEndpointPolicyDefinition]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicy, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.service_endpoint_policy_definitions = kwargs.get('service_endpoint_policy_definitions', None)
self.subnets = None
self.resource_guid = None
self.provisioning_state = None
class ServiceEndpointPolicyDefinition(SubResource):
"""Service Endpoint policy definitions.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param description: A description for this rule. Restricted to 140 chars.
:type description: str
:param service: Service endpoint name.
:type service: str
:param service_resources: A list of service resources.
:type service_resources: list[str]
:ivar provisioning_state: The provisioning state of the service end point policy definition.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'service': {'key': 'properties.service', 'type': 'str'},
'service_resources': {'key': 'properties.serviceResources', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicyDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.description = kwargs.get('description', None)
self.service = kwargs.get('service', None)
self.service_resources = kwargs.get('service_resources', None)
self.provisioning_state = None
class ServiceEndpointPolicyDefinitionListResult(msrest.serialization.Model):
"""Response for ListServiceEndpointPolicyDefinition API service call. Retrieves all service endpoint policy definition that belongs to a service endpoint policy.
:param value: The service endpoint policy definition in a service endpoint policy.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ServiceEndpointPolicyDefinition]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ServiceEndpointPolicyDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicyDefinitionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ServiceEndpointPolicyListResult(msrest.serialization.Model):
"""Response for ListServiceEndpointPolicies API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of ServiceEndpointPolicy resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ServiceEndpointPolicy]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ServiceEndpointPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicyListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ServiceEndpointPropertiesFormat(msrest.serialization.Model):
"""The service endpoint properties.
:param service: The type of the endpoint service.
:type service: str
:param locations: A list of locations.
:type locations: list[str]
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
"""
_attribute_map = {
'service': {'key': 'service', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPropertiesFormat, self).__init__(**kwargs)
self.service = kwargs.get('service', None)
self.locations = kwargs.get('locations', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class ServiceTagInformation(msrest.serialization.Model):
"""The service tag information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar properties: Properties of the service tag information.
:vartype properties:
~azure.mgmt.network.v2019_04_01.models.ServiceTagInformationPropertiesFormat
:ivar name: The name of service tag.
:vartype name: str
:ivar id: The ID of service tag.
:vartype id: str
"""
_validation = {
'properties': {'readonly': True},
'name': {'readonly': True},
'id': {'readonly': True},
}
_attribute_map = {
'properties': {'key': 'properties', 'type': 'ServiceTagInformationPropertiesFormat'},
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceTagInformation, self).__init__(**kwargs)
self.properties = None
self.name = None
self.id = None
class ServiceTagInformationPropertiesFormat(msrest.serialization.Model):
"""Properties of the service tag information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar change_number: The iteration number of service tag.
:vartype change_number: str
:ivar region: The region of service tag.
:vartype region: str
:ivar system_service: The name of system service.
:vartype system_service: str
:ivar address_prefixes: The list of IP address prefixes.
:vartype address_prefixes: list[str]
"""
_validation = {
'change_number': {'readonly': True},
'region': {'readonly': True},
'system_service': {'readonly': True},
'address_prefixes': {'readonly': True},
}
_attribute_map = {
'change_number': {'key': 'changeNumber', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
'system_service': {'key': 'systemService', 'type': 'str'},
'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ServiceTagInformationPropertiesFormat, self).__init__(**kwargs)
self.change_number = None
self.region = None
self.system_service = None
self.address_prefixes = None
class ServiceTagsListResult(msrest.serialization.Model):
"""Response for the ListServiceTags API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: The name of the cloud.
:vartype name: str
:ivar id: The ID of the cloud.
:vartype id: str
:ivar type: The azure resource type.
:vartype type: str
:ivar change_number: The iteration number.
:vartype change_number: str
:ivar cloud: The name of the cloud.
:vartype cloud: str
:ivar values: The list of service tag information resources.
:vartype values: list[~azure.mgmt.network.v2019_04_01.models.ServiceTagInformation]
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'type': {'readonly': True},
'change_number': {'readonly': True},
'cloud': {'readonly': True},
'values': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'change_number': {'key': 'changeNumber', 'type': 'str'},
'cloud': {'key': 'cloud', 'type': 'str'},
'values': {'key': 'values', 'type': '[ServiceTagInformation]'},
}
def __init__(
self,
**kwargs
):
super(ServiceTagsListResult, self).__init__(**kwargs)
self.name = None
self.id = None
self.type = None
self.change_number = None
self.cloud = None
self.values = None
class Subnet(SubResource):
"""Subnet in a virtual network resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param address_prefix: The address prefix for the subnet.
:type address_prefix: str
:param address_prefixes: List of address prefixes for the subnet.
:type address_prefixes: list[str]
:param network_security_group: The reference of the NetworkSecurityGroup resource.
:type network_security_group: ~azure.mgmt.network.v2019_04_01.models.NetworkSecurityGroup
:param route_table: The reference of the RouteTable resource.
:type route_table: ~azure.mgmt.network.v2019_04_01.models.RouteTable
:param nat_gateway: Nat gateway associated with this subnet.
:type nat_gateway: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param service_endpoints: An array of service endpoints.
:type service_endpoints:
list[~azure.mgmt.network.v2019_04_01.models.ServiceEndpointPropertiesFormat]
:param service_endpoint_policies: An array of service endpoint policies.
:type service_endpoint_policies:
list[~azure.mgmt.network.v2019_04_01.models.ServiceEndpointPolicy]
:ivar private_endpoints: An array of references to private endpoints.
:vartype private_endpoints: list[~azure.mgmt.network.v2019_04_01.models.PrivateEndpoint]
:ivar ip_configurations: Gets an array of references to the network interface IP configurations
using subnet.
:vartype ip_configurations: list[~azure.mgmt.network.v2019_04_01.models.IPConfiguration]
:ivar ip_configuration_profiles: Array of IP configuration profiles which reference this
subnet.
:vartype ip_configuration_profiles:
list[~azure.mgmt.network.v2019_04_01.models.IPConfigurationProfile]
:param resource_navigation_links: Gets an array of references to the external resources using
subnet.
:type resource_navigation_links:
list[~azure.mgmt.network.v2019_04_01.models.ResourceNavigationLink]
:param service_association_links: Gets an array of references to services injecting into this
subnet.
:type service_association_links:
list[~azure.mgmt.network.v2019_04_01.models.ServiceAssociationLink]
:param delegations: Gets an array of references to the delegations on the subnet.
:type delegations: list[~azure.mgmt.network.v2019_04_01.models.Delegation]
:ivar purpose: A read-only string identifying the intention of use for this subnet based on
delegations and other user-defined properties.
:vartype purpose: str
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param private_endpoint_network_policies: Enable or Disable private end point on the subnet.
:type private_endpoint_network_policies: str
:param private_link_service_network_policies: Enable or Disable private link service on the
subnet.
:type private_link_service_network_policies: str
"""
_validation = {
'private_endpoints': {'readonly': True},
'ip_configurations': {'readonly': True},
'ip_configuration_profiles': {'readonly': True},
'purpose': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'address_prefixes': {'key': 'properties.addressPrefixes', 'type': '[str]'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'route_table': {'key': 'properties.routeTable', 'type': 'RouteTable'},
'nat_gateway': {'key': 'properties.natGateway', 'type': 'SubResource'},
'service_endpoints': {'key': 'properties.serviceEndpoints', 'type': '[ServiceEndpointPropertiesFormat]'},
'service_endpoint_policies': {'key': 'properties.serviceEndpointPolicies', 'type': '[ServiceEndpointPolicy]'},
'private_endpoints': {'key': 'properties.privateEndpoints', 'type': '[PrivateEndpoint]'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[IPConfiguration]'},
'ip_configuration_profiles': {'key': 'properties.ipConfigurationProfiles', 'type': '[IPConfigurationProfile]'},
'resource_navigation_links': {'key': 'properties.resourceNavigationLinks', 'type': '[ResourceNavigationLink]'},
'service_association_links': {'key': 'properties.serviceAssociationLinks', 'type': '[ServiceAssociationLink]'},
'delegations': {'key': 'properties.delegations', 'type': '[Delegation]'},
'purpose': {'key': 'properties.purpose', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_endpoint_network_policies': {'key': 'properties.privateEndpointNetworkPolicies', 'type': 'str'},
'private_link_service_network_policies': {'key': 'properties.privateLinkServiceNetworkPolicies', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Subnet, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.address_prefixes = kwargs.get('address_prefixes', None)
self.network_security_group = kwargs.get('network_security_group', None)
self.route_table = kwargs.get('route_table', None)
self.nat_gateway = kwargs.get('nat_gateway', None)
self.service_endpoints = kwargs.get('service_endpoints', None)
self.service_endpoint_policies = kwargs.get('service_endpoint_policies', None)
self.private_endpoints = None
self.ip_configurations = None
self.ip_configuration_profiles = None
self.resource_navigation_links = kwargs.get('resource_navigation_links', None)
self.service_association_links = kwargs.get('service_association_links', None)
self.delegations = kwargs.get('delegations', None)
self.purpose = None
self.provisioning_state = kwargs.get('provisioning_state', None)
self.private_endpoint_network_policies = kwargs.get('private_endpoint_network_policies', None)
self.private_link_service_network_policies = kwargs.get('private_link_service_network_policies', None)
class SubnetAssociation(msrest.serialization.Model):
"""Subnet and it's custom security rules.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Subnet ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list[~azure.mgmt.network.v2019_04_01.models.SecurityRule]
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(
self,
**kwargs
):
super(SubnetAssociation, self).__init__(**kwargs)
self.id = None
self.security_rules = kwargs.get('security_rules', None)
class SubnetListResult(msrest.serialization.Model):
"""Response for ListSubnets API service callRetrieves all subnet that belongs to a virtual network.
:param value: The subnets in a virtual network.
:type value: list[~azure.mgmt.network.v2019_04_01.models.Subnet]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Subnet]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubnetListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class TagsObject(msrest.serialization.Model):
"""Tags object for patch operations.
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(TagsObject, self).__init__(**kwargs)
self.tags = kwargs.get('tags', None)
class Topology(msrest.serialization.Model):
"""Topology of the specified resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: GUID representing the operation id.
:vartype id: str
:ivar created_date_time: The datetime when the topology was initially created for the resource
group.
:vartype created_date_time: ~datetime.datetime
:ivar last_modified: The datetime when the topology was last modified.
:vartype last_modified: ~datetime.datetime
:param resources: A list of topology resources.
:type resources: list[~azure.mgmt.network.v2019_04_01.models.TopologyResource]
"""
_validation = {
'id': {'readonly': True},
'created_date_time': {'readonly': True},
'last_modified': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'resources': {'key': 'resources', 'type': '[TopologyResource]'},
}
def __init__(
self,
**kwargs
):
super(Topology, self).__init__(**kwargs)
self.id = None
self.created_date_time = None
self.last_modified = None
self.resources = kwargs.get('resources', None)
class TopologyAssociation(msrest.serialization.Model):
"""Resources that have an association with the parent resource.
:param name: The name of the resource that is associated with the parent resource.
:type name: str
:param resource_id: The ID of the resource that is associated with the parent resource.
:type resource_id: str
:param association_type: The association type of the child resource to the parent resource.
Possible values include: "Associated", "Contains".
:type association_type: str or ~azure.mgmt.network.v2019_04_01.models.AssociationType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'association_type': {'key': 'associationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TopologyAssociation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.resource_id = kwargs.get('resource_id', None)
self.association_type = kwargs.get('association_type', None)
class TopologyParameters(msrest.serialization.Model):
"""Parameters that define the representation of topology.
:param target_resource_group_name: The name of the target resource group to perform topology
on.
:type target_resource_group_name: str
:param target_virtual_network: The reference of the Virtual Network resource.
:type target_virtual_network: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param target_subnet: The reference of the Subnet resource.
:type target_subnet: ~azure.mgmt.network.v2019_04_01.models.SubResource
"""
_attribute_map = {
'target_resource_group_name': {'key': 'targetResourceGroupName', 'type': 'str'},
'target_virtual_network': {'key': 'targetVirtualNetwork', 'type': 'SubResource'},
'target_subnet': {'key': 'targetSubnet', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(TopologyParameters, self).__init__(**kwargs)
self.target_resource_group_name = kwargs.get('target_resource_group_name', None)
self.target_virtual_network = kwargs.get('target_virtual_network', None)
self.target_subnet = kwargs.get('target_subnet', None)
class TopologyResource(msrest.serialization.Model):
"""The network resource topology information for the given resource group.
:param name: Name of the resource.
:type name: str
:param id: ID of the resource.
:type id: str
:param location: Resource location.
:type location: str
:param associations: Holds the associations the resource has with other resources in the
resource group.
:type associations: list[~azure.mgmt.network.v2019_04_01.models.TopologyAssociation]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'associations': {'key': 'associations', 'type': '[TopologyAssociation]'},
}
def __init__(
self,
**kwargs
):
super(TopologyResource, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.location = kwargs.get('location', None)
self.associations = kwargs.get('associations', None)
class TrafficAnalyticsConfigurationProperties(msrest.serialization.Model):
"""Parameters that define the configuration of traffic analytics.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Flag to enable/disable traffic analytics.
:type enabled: bool
:param workspace_id: Required. The resource guid of the attached workspace.
:type workspace_id: str
:param workspace_region: Required. The location of the attached workspace.
:type workspace_region: str
:param workspace_resource_id: Required. Resource Id of the attached workspace.
:type workspace_resource_id: str
:param traffic_analytics_interval: The interval in minutes which would decide how frequently TA
service should do flow analytics.
:type traffic_analytics_interval: int
"""
_validation = {
'enabled': {'required': True},
'workspace_id': {'required': True},
'workspace_region': {'required': True},
'workspace_resource_id': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'workspace_region': {'key': 'workspaceRegion', 'type': 'str'},
'workspace_resource_id': {'key': 'workspaceResourceId', 'type': 'str'},
'traffic_analytics_interval': {'key': 'trafficAnalyticsInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(TrafficAnalyticsConfigurationProperties, self).__init__(**kwargs)
self.enabled = kwargs['enabled']
self.workspace_id = kwargs['workspace_id']
self.workspace_region = kwargs['workspace_region']
self.workspace_resource_id = kwargs['workspace_resource_id']
self.traffic_analytics_interval = kwargs.get('traffic_analytics_interval', None)
class TrafficAnalyticsProperties(msrest.serialization.Model):
"""Parameters that define the configuration of traffic analytics.
All required parameters must be populated in order to send to Azure.
:param network_watcher_flow_analytics_configuration: Required. Parameters that define the
configuration of traffic analytics.
:type network_watcher_flow_analytics_configuration:
~azure.mgmt.network.v2019_04_01.models.TrafficAnalyticsConfigurationProperties
"""
_validation = {
'network_watcher_flow_analytics_configuration': {'required': True},
}
_attribute_map = {
'network_watcher_flow_analytics_configuration': {'key': 'networkWatcherFlowAnalyticsConfiguration', 'type': 'TrafficAnalyticsConfigurationProperties'},
}
def __init__(
self,
**kwargs
):
super(TrafficAnalyticsProperties, self).__init__(**kwargs)
self.network_watcher_flow_analytics_configuration = kwargs['network_watcher_flow_analytics_configuration']
class TroubleshootingDetails(msrest.serialization.Model):
"""Information gained from troubleshooting of specified resource.
:param id: The id of the get troubleshoot operation.
:type id: str
:param reason_type: Reason type of failure.
:type reason_type: str
:param summary: A summary of troubleshooting.
:type summary: str
:param detail: Details on troubleshooting results.
:type detail: str
:param recommended_actions: List of recommended actions.
:type recommended_actions:
list[~azure.mgmt.network.v2019_04_01.models.TroubleshootingRecommendedActions]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'reason_type': {'key': 'reasonType', 'type': 'str'},
'summary': {'key': 'summary', 'type': 'str'},
'detail': {'key': 'detail', 'type': 'str'},
'recommended_actions': {'key': 'recommendedActions', 'type': '[TroubleshootingRecommendedActions]'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingDetails, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.reason_type = kwargs.get('reason_type', None)
self.summary = kwargs.get('summary', None)
self.detail = kwargs.get('detail', None)
self.recommended_actions = kwargs.get('recommended_actions', None)
class TroubleshootingParameters(msrest.serialization.Model):
"""Parameters that define the resource to troubleshoot.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The target resource to troubleshoot.
:type target_resource_id: str
:param storage_id: Required. The ID for the storage account to save the troubleshoot result.
:type storage_id: str
:param storage_path: Required. The path to the blob to save the troubleshoot result in.
:type storage_path: str
"""
_validation = {
'target_resource_id': {'required': True},
'storage_id': {'required': True},
'storage_path': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'storage_id': {'key': 'properties.storageId', 'type': 'str'},
'storage_path': {'key': 'properties.storagePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.storage_id = kwargs['storage_id']
self.storage_path = kwargs['storage_path']
class TroubleshootingRecommendedActions(msrest.serialization.Model):
"""Recommended actions based on discovered issues.
:param action_id: ID of the recommended action.
:type action_id: str
:param action_text: Description of recommended actions.
:type action_text: str
:param action_uri: The uri linking to a documentation for the recommended troubleshooting
actions.
:type action_uri: str
:param action_uri_text: The information from the URI for the recommended troubleshooting
actions.
:type action_uri_text: str
"""
_attribute_map = {
'action_id': {'key': 'actionId', 'type': 'str'},
'action_text': {'key': 'actionText', 'type': 'str'},
'action_uri': {'key': 'actionUri', 'type': 'str'},
'action_uri_text': {'key': 'actionUriText', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingRecommendedActions, self).__init__(**kwargs)
self.action_id = kwargs.get('action_id', None)
self.action_text = kwargs.get('action_text', None)
self.action_uri = kwargs.get('action_uri', None)
self.action_uri_text = kwargs.get('action_uri_text', None)
class TroubleshootingResult(msrest.serialization.Model):
"""Troubleshooting information gained from specified resource.
:param start_time: The start time of the troubleshooting.
:type start_time: ~datetime.datetime
:param end_time: The end time of the troubleshooting.
:type end_time: ~datetime.datetime
:param code: The result code of the troubleshooting.
:type code: str
:param results: Information from troubleshooting.
:type results: list[~azure.mgmt.network.v2019_04_01.models.TroubleshootingDetails]
"""
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'code': {'key': 'code', 'type': 'str'},
'results': {'key': 'results', 'type': '[TroubleshootingDetails]'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingResult, self).__init__(**kwargs)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.code = kwargs.get('code', None)
self.results = kwargs.get('results', None)
class TunnelConnectionHealth(msrest.serialization.Model):
"""VirtualNetworkGatewayConnection properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar tunnel: Tunnel name.
:vartype tunnel: str
:ivar connection_status: Virtual Network Gateway connection status. Possible values include:
"Unknown", "Connecting", "Connected", "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionStatus
:ivar ingress_bytes_transferred: The Ingress Bytes Transferred in this connection.
:vartype ingress_bytes_transferred: long
:ivar egress_bytes_transferred: The Egress Bytes Transferred in this connection.
:vartype egress_bytes_transferred: long
:ivar last_connection_established_utc_time: The time at which connection was established in Utc
format.
:vartype last_connection_established_utc_time: str
"""
_validation = {
'tunnel': {'readonly': True},
'connection_status': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'last_connection_established_utc_time': {'readonly': True},
}
_attribute_map = {
'tunnel': {'key': 'tunnel', 'type': 'str'},
'connection_status': {'key': 'connectionStatus', 'type': 'str'},
'ingress_bytes_transferred': {'key': 'ingressBytesTransferred', 'type': 'long'},
'egress_bytes_transferred': {'key': 'egressBytesTransferred', 'type': 'long'},
'last_connection_established_utc_time': {'key': 'lastConnectionEstablishedUtcTime', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TunnelConnectionHealth, self).__init__(**kwargs)
self.tunnel = None
self.connection_status = None
self.ingress_bytes_transferred = None
self.egress_bytes_transferred = None
self.last_connection_established_utc_time = None
class Usage(msrest.serialization.Model):
"""Describes network resource usage.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource identifier.
:vartype id: str
:param unit: Required. An enum describing the unit of measurement. Possible values include:
"Count".
:type unit: str or ~azure.mgmt.network.v2019_04_01.models.UsageUnit
:param current_value: Required. The current value of the usage.
:type current_value: long
:param limit: Required. The limit of usage.
:type limit: long
:param name: Required. The name of the type of usage.
:type name: ~azure.mgmt.network.v2019_04_01.models.UsageName
"""
_validation = {
'id': {'readonly': True},
'unit': {'required': True},
'current_value': {'required': True},
'limit': {'required': True},
'name': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'long'},
'limit': {'key': 'limit', 'type': 'long'},
'name': {'key': 'name', 'type': 'UsageName'},
}
def __init__(
self,
**kwargs
):
super(Usage, self).__init__(**kwargs)
self.id = None
self.unit = kwargs['unit']
self.current_value = kwargs['current_value']
self.limit = kwargs['limit']
self.name = kwargs['name']
class UsageName(msrest.serialization.Model):
"""The usage names.
:param value: A string describing the resource name.
:type value: str
:param localized_value: A localized string describing the resource name.
:type localized_value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UsageName, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.localized_value = kwargs.get('localized_value', None)
class UsagesListResult(msrest.serialization.Model):
"""The list usages operation response.
:param value: The list network resource usages.
:type value: list[~azure.mgmt.network.v2019_04_01.models.Usage]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Usage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UsagesListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VerificationIPFlowParameters(msrest.serialization.Model):
"""Parameters that define the IP flow to be verified.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The ID of the target resource to perform next-hop on.
:type target_resource_id: str
:param direction: Required. The direction of the packet represented as a 5-tuple. Possible
values include: "Inbound", "Outbound".
:type direction: str or ~azure.mgmt.network.v2019_04_01.models.Direction
:param protocol: Required. Protocol to be verified on. Possible values include: "TCP", "UDP".
:type protocol: str or ~azure.mgmt.network.v2019_04_01.models.IpFlowProtocol
:param local_port: Required. The local port. Acceptable values are a single integer in the
range (0-65535). Support for * for the source port, which depends on the direction.
:type local_port: str
:param remote_port: Required. The remote port. Acceptable values are a single integer in the
range (0-65535). Support for * for the source port, which depends on the direction.
:type remote_port: str
:param local_ip_address: Required. The local IP address. Acceptable values are valid IPv4
addresses.
:type local_ip_address: str
:param remote_ip_address: Required. The remote IP address. Acceptable values are valid IPv4
addresses.
:type remote_ip_address: str
:param target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is
enabled on any of them, then this parameter must be specified. Otherwise optional).
:type target_nic_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
'direction': {'required': True},
'protocol': {'required': True},
'local_port': {'required': True},
'remote_port': {'required': True},
'local_ip_address': {'required': True},
'remote_ip_address': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'direction': {'key': 'direction', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'local_port': {'key': 'localPort', 'type': 'str'},
'remote_port': {'key': 'remotePort', 'type': 'str'},
'local_ip_address': {'key': 'localIPAddress', 'type': 'str'},
'remote_ip_address': {'key': 'remoteIPAddress', 'type': 'str'},
'target_nic_resource_id': {'key': 'targetNicResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VerificationIPFlowParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.direction = kwargs['direction']
self.protocol = kwargs['protocol']
self.local_port = kwargs['local_port']
self.remote_port = kwargs['remote_port']
self.local_ip_address = kwargs['local_ip_address']
self.remote_ip_address = kwargs['remote_ip_address']
self.target_nic_resource_id = kwargs.get('target_nic_resource_id', None)
class VerificationIPFlowResult(msrest.serialization.Model):
"""Results of IP flow verification on the target resource.
:param access: Indicates whether the traffic is allowed or denied. Possible values include:
"Allow", "Deny".
:type access: str or ~azure.mgmt.network.v2019_04_01.models.Access
:param rule_name: Name of the rule. If input is not matched against any security rule, it is
not displayed.
:type rule_name: str
"""
_attribute_map = {
'access': {'key': 'access', 'type': 'str'},
'rule_name': {'key': 'ruleName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VerificationIPFlowResult, self).__init__(**kwargs)
self.access = kwargs.get('access', None)
self.rule_name = kwargs.get('rule_name', None)
class VirtualHub(Resource):
"""VirtualHub Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_wan: The VirtualWAN to which the VirtualHub belongs.
:type virtual_wan: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param vpn_gateway: The VpnGateway associated with this VirtualHub.
:type vpn_gateway: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param p2_s_vpn_gateway: The P2SVpnGateway associated with this VirtualHub.
:type p2_s_vpn_gateway: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param express_route_gateway: The expressRouteGateway associated with this VirtualHub.
:type express_route_gateway: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param virtual_network_connections: List of all vnet connections with this VirtualHub.
:type virtual_network_connections:
list[~azure.mgmt.network.v2019_04_01.models.HubVirtualNetworkConnection]
:param address_prefix: Address-prefix for this VirtualHub.
:type address_prefix: str
:param route_table: The routeTable associated with this virtual hub.
:type route_table: ~azure.mgmt.network.v2019_04_01.models.VirtualHubRouteTable
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_wan': {'key': 'properties.virtualWan', 'type': 'SubResource'},
'vpn_gateway': {'key': 'properties.vpnGateway', 'type': 'SubResource'},
'p2_s_vpn_gateway': {'key': 'properties.p2SVpnGateway', 'type': 'SubResource'},
'express_route_gateway': {'key': 'properties.expressRouteGateway', 'type': 'SubResource'},
'virtual_network_connections': {'key': 'properties.virtualNetworkConnections', 'type': '[HubVirtualNetworkConnection]'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'route_table': {'key': 'properties.routeTable', 'type': 'VirtualHubRouteTable'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualHub, self).__init__(**kwargs)
self.etag = None
self.virtual_wan = kwargs.get('virtual_wan', None)
self.vpn_gateway = kwargs.get('vpn_gateway', None)
self.p2_s_vpn_gateway = kwargs.get('p2_s_vpn_gateway', None)
self.express_route_gateway = kwargs.get('express_route_gateway', None)
self.virtual_network_connections = kwargs.get('virtual_network_connections', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.route_table = kwargs.get('route_table', None)
self.provisioning_state = None
class VirtualHubId(msrest.serialization.Model):
"""Virtual Hub identifier.
:param id: The resource URI for the Virtual Hub where the ExpressRoute gateway is or will be
deployed. The Virtual Hub resource and the ExpressRoute gateway resource reside in the same
subscription.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubId, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class VirtualHubRoute(msrest.serialization.Model):
"""VirtualHub route.
:param address_prefixes: List of all addressPrefixes.
:type address_prefixes: list[str]
:param next_hop_ip_address: NextHop ip address.
:type next_hop_ip_address: str
"""
_attribute_map = {
'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'},
'next_hop_ip_address': {'key': 'nextHopIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubRoute, self).__init__(**kwargs)
self.address_prefixes = kwargs.get('address_prefixes', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
class VirtualHubRouteTable(msrest.serialization.Model):
"""VirtualHub route table.
:param routes: List of all routes.
:type routes: list[~azure.mgmt.network.v2019_04_01.models.VirtualHubRoute]
"""
_attribute_map = {
'routes': {'key': 'routes', 'type': '[VirtualHubRoute]'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubRouteTable, self).__init__(**kwargs)
self.routes = kwargs.get('routes', None)
class VirtualNetwork(Resource):
"""Virtual Network resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param address_space: The AddressSpace that contains an array of IP address ranges that can be
used by subnets.
:type address_space: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:param dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs
deployed in the virtual network.
:type dhcp_options: ~azure.mgmt.network.v2019_04_01.models.DhcpOptions
:param subnets: A list of subnets in a Virtual Network.
:type subnets: list[~azure.mgmt.network.v2019_04_01.models.Subnet]
:param virtual_network_peerings: A list of peerings in a Virtual Network.
:type virtual_network_peerings:
list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkPeering]
:param resource_guid: The resourceGuid property of the Virtual Network resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param enable_ddos_protection: Indicates if DDoS protection is enabled for all the protected
resources in the virtual network. It requires a DDoS protection plan associated with the
resource.
:type enable_ddos_protection: bool
:param enable_vm_protection: Indicates if VM protection is enabled for all the subnets in the
virtual network.
:type enable_vm_protection: bool
:param ddos_protection_plan: The DDoS protection plan associated with the virtual network.
:type ddos_protection_plan: ~azure.mgmt.network.v2019_04_01.models.SubResource
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'address_space': {'key': 'properties.addressSpace', 'type': 'AddressSpace'},
'dhcp_options': {'key': 'properties.dhcpOptions', 'type': 'DhcpOptions'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'virtual_network_peerings': {'key': 'properties.virtualNetworkPeerings', 'type': '[VirtualNetworkPeering]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'enable_ddos_protection': {'key': 'properties.enableDdosProtection', 'type': 'bool'},
'enable_vm_protection': {'key': 'properties.enableVmProtection', 'type': 'bool'},
'ddos_protection_plan': {'key': 'properties.ddosProtectionPlan', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetwork, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.address_space = kwargs.get('address_space', None)
self.dhcp_options = kwargs.get('dhcp_options', None)
self.subnets = kwargs.get('subnets', None)
self.virtual_network_peerings = kwargs.get('virtual_network_peerings', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.enable_ddos_protection = kwargs.get('enable_ddos_protection', False)
self.enable_vm_protection = kwargs.get('enable_vm_protection', False)
self.ddos_protection_plan = kwargs.get('ddos_protection_plan', None)
class VirtualNetworkConnectionGatewayReference(msrest.serialization.Model):
"""A reference to VirtualNetworkGateway or LocalNetworkGateway resource.
All required parameters must be populated in order to send to Azure.
:param id: Required. The ID of VirtualNetworkGateway or LocalNetworkGateway resource.
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkConnectionGatewayReference, self).__init__(**kwargs)
self.id = kwargs['id']
class VirtualNetworkGateway(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param ip_configurations: IP configurations for virtual network gateway.
:type ip_configurations:
list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayIPConfiguration]
:param gateway_type: The type of this virtual network gateway. Possible values include: "Vpn",
"ExpressRoute".
:type gateway_type: str or ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayType
:param vpn_type: The type of this virtual network gateway. Possible values include:
"PolicyBased", "RouteBased".
:type vpn_type: str or ~azure.mgmt.network.v2019_04_01.models.VpnType
:param enable_bgp: Whether BGP is enabled for this virtual network gateway or not.
:type enable_bgp: bool
:param active: ActiveActive flag.
:type active: bool
:param gateway_default_site: The reference of the LocalNetworkGateway resource which represents
local network site having default routes. Assign Null value in case of removing existing
default site setting.
:type gateway_default_site: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU
selected for Virtual network gateway.
:type sku: ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewaySku
:param vpn_client_configuration: The reference of the VpnClientConfiguration resource which
represents the P2S VpnClient configurations.
:type vpn_client_configuration: ~azure.mgmt.network.v2019_04_01.models.VpnClientConfiguration
:param bgp_settings: Virtual network gateway's BGP speaker settings.
:type bgp_settings: ~azure.mgmt.network.v2019_04_01.models.BgpSettings
:param custom_routes: The reference of the address space resource which represents the custom
routes address space specified by the customer for virtual network gateway and VpnClient.
:type custom_routes: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:param resource_guid: The resource GUID property of the VirtualNetworkGateway resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the VirtualNetworkGateway resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[VirtualNetworkGatewayIPConfiguration]'},
'gateway_type': {'key': 'properties.gatewayType', 'type': 'str'},
'vpn_type': {'key': 'properties.vpnType', 'type': 'str'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'active': {'key': 'properties.activeActive', 'type': 'bool'},
'gateway_default_site': {'key': 'properties.gatewayDefaultSite', 'type': 'SubResource'},
'sku': {'key': 'properties.sku', 'type': 'VirtualNetworkGatewaySku'},
'vpn_client_configuration': {'key': 'properties.vpnClientConfiguration', 'type': 'VpnClientConfiguration'},
'bgp_settings': {'key': 'properties.bgpSettings', 'type': 'BgpSettings'},
'custom_routes': {'key': 'properties.customRoutes', 'type': 'AddressSpace'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGateway, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.ip_configurations = kwargs.get('ip_configurations', None)
self.gateway_type = kwargs.get('gateway_type', None)
self.vpn_type = kwargs.get('vpn_type', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.active = kwargs.get('active', None)
self.gateway_default_site = kwargs.get('gateway_default_site', None)
self.sku = kwargs.get('sku', None)
self.vpn_client_configuration = kwargs.get('vpn_client_configuration', None)
self.bgp_settings = kwargs.get('bgp_settings', None)
self.custom_routes = kwargs.get('custom_routes', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = None
class VirtualNetworkGatewayConnection(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param authorization_key: The authorizationKey.
:type authorization_key: str
:param virtual_network_gateway1: Required. The reference to virtual network gateway resource.
:type virtual_network_gateway1: ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGateway
:param virtual_network_gateway2: The reference to virtual network gateway resource.
:type virtual_network_gateway2: ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGateway
:param local_network_gateway2: The reference to local network gateway resource.
:type local_network_gateway2: ~azure.mgmt.network.v2019_04_01.models.LocalNetworkGateway
:param connection_type: Required. Gateway connection type. Possible values include: "IPsec",
"Vnet2Vnet", "ExpressRoute", "VPNClient".
:type connection_type: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionType
:param connection_protocol: Connection protocol used for this connection. Possible values
include: "IKEv2", "IKEv1".
:type connection_protocol: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionProtocol
:param routing_weight: The routing weight.
:type routing_weight: int
:param shared_key: The IPSec shared key.
:type shared_key: str
:ivar connection_status: Virtual Network Gateway connection status. Possible values include:
"Unknown", "Connecting", "Connected", "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2019_04_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this connection.
:vartype egress_bytes_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes transferred in this connection.
:vartype ingress_bytes_transferred: long
:param peer: The reference to peerings resource.
:type peer: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param enable_bgp: EnableBgp flag.
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this connection.
:type ipsec_policies: list[~azure.mgmt.network.v2019_04_01.models.IpsecPolicy]
:param resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection
resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the VirtualNetworkGatewayConnection
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding.
:type express_route_gateway_bypass: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_gateway1': {'required': True},
'connection_type': {'required': True},
'connection_status': {'readonly': True},
'tunnel_connection_status': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'virtual_network_gateway1': {'key': 'properties.virtualNetworkGateway1', 'type': 'VirtualNetworkGateway'},
'virtual_network_gateway2': {'key': 'properties.virtualNetworkGateway2', 'type': 'VirtualNetworkGateway'},
'local_network_gateway2': {'key': 'properties.localNetworkGateway2', 'type': 'LocalNetworkGateway'},
'connection_type': {'key': 'properties.connectionType', 'type': 'str'},
'connection_protocol': {'key': 'properties.connectionProtocol', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'tunnel_connection_status': {'key': 'properties.tunnelConnectionStatus', 'type': '[TunnelConnectionHealth]'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'peer': {'key': 'properties.peer', 'type': 'SubResource'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'express_route_gateway_bypass': {'key': 'properties.expressRouteGatewayBypass', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayConnection, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.authorization_key = kwargs.get('authorization_key', None)
self.virtual_network_gateway1 = kwargs['virtual_network_gateway1']
self.virtual_network_gateway2 = kwargs.get('virtual_network_gateway2', None)
self.local_network_gateway2 = kwargs.get('local_network_gateway2', None)
self.connection_type = kwargs['connection_type']
self.connection_protocol = kwargs.get('connection_protocol', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.shared_key = kwargs.get('shared_key', None)
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = kwargs.get('peer', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = None
self.express_route_gateway_bypass = kwargs.get('express_route_gateway_bypass', None)
class VirtualNetworkGatewayConnectionListEntity(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param authorization_key: The authorizationKey.
:type authorization_key: str
:param virtual_network_gateway1: Required. The reference to virtual network gateway resource.
:type virtual_network_gateway1:
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkConnectionGatewayReference
:param virtual_network_gateway2: The reference to virtual network gateway resource.
:type virtual_network_gateway2:
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkConnectionGatewayReference
:param local_network_gateway2: The reference to local network gateway resource.
:type local_network_gateway2:
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkConnectionGatewayReference
:param connection_type: Required. Gateway connection type. Possible values include: "IPsec",
"Vnet2Vnet", "ExpressRoute", "VPNClient".
:type connection_type: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionType
:param connection_protocol: Connection protocol used for this connection. Possible values
include: "IKEv2", "IKEv1".
:type connection_protocol: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionProtocol
:param routing_weight: The routing weight.
:type routing_weight: int
:param shared_key: The IPSec shared key.
:type shared_key: str
:ivar connection_status: Virtual Network Gateway connection status. Possible values include:
"Unknown", "Connecting", "Connected", "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2019_04_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this connection.
:vartype egress_bytes_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes transferred in this connection.
:vartype ingress_bytes_transferred: long
:param peer: The reference to peerings resource.
:type peer: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param enable_bgp: EnableBgp flag.
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this connection.
:type ipsec_policies: list[~azure.mgmt.network.v2019_04_01.models.IpsecPolicy]
:param resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection
resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the VirtualNetworkGatewayConnection
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding.
:type express_route_gateway_bypass: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_gateway1': {'required': True},
'connection_type': {'required': True},
'connection_status': {'readonly': True},
'tunnel_connection_status': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'virtual_network_gateway1': {'key': 'properties.virtualNetworkGateway1', 'type': 'VirtualNetworkConnectionGatewayReference'},
'virtual_network_gateway2': {'key': 'properties.virtualNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'local_network_gateway2': {'key': 'properties.localNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'connection_type': {'key': 'properties.connectionType', 'type': 'str'},
'connection_protocol': {'key': 'properties.connectionProtocol', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'tunnel_connection_status': {'key': 'properties.tunnelConnectionStatus', 'type': '[TunnelConnectionHealth]'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'peer': {'key': 'properties.peer', 'type': 'SubResource'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'express_route_gateway_bypass': {'key': 'properties.expressRouteGatewayBypass', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayConnectionListEntity, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.authorization_key = kwargs.get('authorization_key', None)
self.virtual_network_gateway1 = kwargs['virtual_network_gateway1']
self.virtual_network_gateway2 = kwargs.get('virtual_network_gateway2', None)
self.local_network_gateway2 = kwargs.get('local_network_gateway2', None)
self.connection_type = kwargs['connection_type']
self.connection_protocol = kwargs.get('connection_protocol', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.shared_key = kwargs.get('shared_key', None)
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = kwargs.get('peer', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = None
self.express_route_gateway_bypass = kwargs.get('express_route_gateway_bypass', None)
class VirtualNetworkGatewayConnectionListResult(msrest.serialization.Model):
"""Response for the ListVirtualNetworkGatewayConnections API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a
resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnection]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkGatewayConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class VirtualNetworkGatewayIPConfiguration(SubResource):
"""IP configuration for virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_04_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param public_ip_address: The reference of the public IP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_04_01.models.SubResource
:ivar provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
class VirtualNetworkGatewayListConnectionsResult(msrest.serialization.Model):
"""Response for the VirtualNetworkGatewayListConnections API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a
resource group.
:type value:
list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionListEntity]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkGatewayConnectionListEntity]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayListConnectionsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class VirtualNetworkGatewayListResult(msrest.serialization.Model):
"""Response for the ListVirtualNetworkGateways API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: Gets a list of VirtualNetworkGateway resources that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGateway]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class VirtualNetworkGatewaySku(msrest.serialization.Model):
"""VirtualNetworkGatewaySku details.
:param name: Gateway SKU name. Possible values include: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw1AZ", "VpnGw2AZ", "VpnGw3AZ",
"ErGw1AZ", "ErGw2AZ", "ErGw3AZ".
:type name: str or ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewaySkuName
:param tier: Gateway SKU tier. Possible values include: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw1AZ", "VpnGw2AZ", "VpnGw3AZ",
"ErGw1AZ", "ErGw2AZ", "ErGw3AZ".
:type tier: str or ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewaySkuTier
:param capacity: The capacity.
:type capacity: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewaySku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.capacity = kwargs.get('capacity', None)
class VirtualNetworkListResult(msrest.serialization.Model):
"""Response for the ListVirtualNetworks API service call.
:param value: Gets a list of VirtualNetwork resources in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VirtualNetwork]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetwork]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkListUsageResult(msrest.serialization.Model):
"""Response for the virtual networks GetUsage API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: VirtualNetwork usage stats.
:vartype value: list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkUsage]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkUsage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkListUsageResult, self).__init__(**kwargs)
self.value = None
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param allow_virtual_network_access: Whether the VMs in the local virtual network space would
be able to access the VMs in remote virtual network space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the local virtual
network will be allowed/disallowed in remote virtual network.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote virtual networking to link
to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual network. If the flag
is set to true, and allowGatewayTransit on remote peering is also true, virtual network will
use gateways of remote virtual network for transit. Only one peering can have this flag set to
true. This flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual network. The remote virtual
network can be in the same or different region (preview). See here to register for the preview
and learn more
(https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering).
:type remote_virtual_network: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param remote_address_space: The reference of the remote virtual network address space.
:type remote_address_space: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:param peering_state: The status of the virtual network peering. Possible values include:
"Initiated", "Connected", "Disconnected".
:type peering_state: str or ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkPeeringState
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkPeering, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.allow_virtual_network_access = kwargs.get('allow_virtual_network_access', None)
self.allow_forwarded_traffic = kwargs.get('allow_forwarded_traffic', None)
self.allow_gateway_transit = kwargs.get('allow_gateway_transit', None)
self.use_remote_gateways = kwargs.get('use_remote_gateways', None)
self.remote_virtual_network = kwargs.get('remote_virtual_network', None)
self.remote_address_space = kwargs.get('remote_address_space', None)
self.peering_state = kwargs.get('peering_state', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
class VirtualNetworkPeeringListResult(msrest.serialization.Model):
"""Response for ListSubnets API service call. Retrieves all subnets that belong to a virtual network.
:param value: The peerings in a virtual network.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkPeering]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkPeering]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkPeeringListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkTap(Resource):
"""Virtual Network Tap resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar network_interface_tap_configurations: Specifies the list of resource IDs for the network
interface IP configuration that needs to be tapped.
:vartype network_interface_tap_configurations:
list[~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceTapConfiguration]
:ivar resource_guid: The resourceGuid property of the virtual network tap.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the virtual network tap. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param destination_network_interface_ip_configuration: The reference to the private IP Address
of the collector nic that will receive the tap.
:type destination_network_interface_ip_configuration:
~azure.mgmt.network.v2019_04_01.models.NetworkInterfaceIPConfiguration
:param destination_load_balancer_front_end_ip_configuration: The reference to the private IP
address on the internal Load Balancer that will receive the tap.
:type destination_load_balancer_front_end_ip_configuration:
~azure.mgmt.network.v2019_04_01.models.FrontendIPConfiguration
:param destination_port: The VXLAN destination port that will receive the tapped traffic.
:type destination_port: int
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'network_interface_tap_configurations': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'network_interface_tap_configurations': {'key': 'properties.networkInterfaceTapConfigurations', 'type': '[NetworkInterfaceTapConfiguration]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'destination_network_interface_ip_configuration': {'key': 'properties.destinationNetworkInterfaceIPConfiguration', 'type': 'NetworkInterfaceIPConfiguration'},
'destination_load_balancer_front_end_ip_configuration': {'key': 'properties.destinationLoadBalancerFrontEndIPConfiguration', 'type': 'FrontendIPConfiguration'},
'destination_port': {'key': 'properties.destinationPort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkTap, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.network_interface_tap_configurations = None
self.resource_guid = None
self.provisioning_state = None
self.destination_network_interface_ip_configuration = kwargs.get('destination_network_interface_ip_configuration', None)
self.destination_load_balancer_front_end_ip_configuration = kwargs.get('destination_load_balancer_front_end_ip_configuration', None)
self.destination_port = kwargs.get('destination_port', None)
class VirtualNetworkTapListResult(msrest.serialization.Model):
"""Response for ListVirtualNetworkTap API service call.
:param value: A list of VirtualNetworkTaps in a resource group.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkTap]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkTap]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkTapListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkUsage(msrest.serialization.Model):
"""Usage details for subnet.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar current_value: Indicates number of IPs used from the Subnet.
:vartype current_value: float
:ivar id: Subnet identifier.
:vartype id: str
:ivar limit: Indicates the size of the subnet.
:vartype limit: float
:ivar name: The name containing common and localized value for usage.
:vartype name: ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkUsageName
:ivar unit: Usage units. Returns 'Count'.
:vartype unit: str
"""
_validation = {
'current_value': {'readonly': True},
'id': {'readonly': True},
'limit': {'readonly': True},
'name': {'readonly': True},
'unit': {'readonly': True},
}
_attribute_map = {
'current_value': {'key': 'currentValue', 'type': 'float'},
'id': {'key': 'id', 'type': 'str'},
'limit': {'key': 'limit', 'type': 'float'},
'name': {'key': 'name', 'type': 'VirtualNetworkUsageName'},
'unit': {'key': 'unit', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkUsage, self).__init__(**kwargs)
self.current_value = None
self.id = None
self.limit = None
self.name = None
self.unit = None
class VirtualNetworkUsageName(msrest.serialization.Model):
"""Usage strings container.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar localized_value: Localized subnet size and usage string.
:vartype localized_value: str
:ivar value: Subnet size and usage string.
:vartype value: str
"""
_validation = {
'localized_value': {'readonly': True},
'value': {'readonly': True},
}
_attribute_map = {
'localized_value': {'key': 'localizedValue', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkUsageName, self).__init__(**kwargs)
self.localized_value = None
self.value = None
class VirtualWAN(Resource):
"""VirtualWAN Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param disable_vpn_encryption: Vpn encryption to be disabled or not.
:type disable_vpn_encryption: bool
:ivar virtual_hubs: List of VirtualHubs in the VirtualWAN.
:vartype virtual_hubs: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:ivar vpn_sites: List of VpnSites in the VirtualWAN.
:vartype vpn_sites: list[~azure.mgmt.network.v2019_04_01.models.SubResource]
:param security_provider_name: The Security Provider name.
:type security_provider_name: str
:param allow_branch_to_branch_traffic: True if branch to branch traffic is allowed.
:type allow_branch_to_branch_traffic: bool
:param allow_vnet_to_vnet_traffic: True if Vnet to Vnet traffic is allowed.
:type allow_vnet_to_vnet_traffic: bool
:ivar office365_local_breakout_category: The office local breakout category. Possible values
include: "Optimize", "OptimizeAndAllow", "All", "None".
:vartype office365_local_breakout_category: str or
~azure.mgmt.network.v2019_04_01.models.OfficeTrafficCategory
:param p2_s_vpn_server_configurations: List of all P2SVpnServerConfigurations associated with
the virtual wan.
:type p2_s_vpn_server_configurations:
list[~azure.mgmt.network.v2019_04_01.models.P2SVpnServerConfiguration]
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'virtual_hubs': {'readonly': True},
'vpn_sites': {'readonly': True},
'office365_local_breakout_category': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'disable_vpn_encryption': {'key': 'properties.disableVpnEncryption', 'type': 'bool'},
'virtual_hubs': {'key': 'properties.virtualHubs', 'type': '[SubResource]'},
'vpn_sites': {'key': 'properties.vpnSites', 'type': '[SubResource]'},
'security_provider_name': {'key': 'properties.securityProviderName', 'type': 'str'},
'allow_branch_to_branch_traffic': {'key': 'properties.allowBranchToBranchTraffic', 'type': 'bool'},
'allow_vnet_to_vnet_traffic': {'key': 'properties.allowVnetToVnetTraffic', 'type': 'bool'},
'office365_local_breakout_category': {'key': 'properties.office365LocalBreakoutCategory', 'type': 'str'},
'p2_s_vpn_server_configurations': {'key': 'properties.p2SVpnServerConfigurations', 'type': '[P2SVpnServerConfiguration]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualWAN, self).__init__(**kwargs)
self.etag = None
self.disable_vpn_encryption = kwargs.get('disable_vpn_encryption', None)
self.virtual_hubs = None
self.vpn_sites = None
self.security_provider_name = kwargs.get('security_provider_name', None)
self.allow_branch_to_branch_traffic = kwargs.get('allow_branch_to_branch_traffic', None)
self.allow_vnet_to_vnet_traffic = kwargs.get('allow_vnet_to_vnet_traffic', None)
self.office365_local_breakout_category = None
self.p2_s_vpn_server_configurations = kwargs.get('p2_s_vpn_server_configurations', None)
self.provisioning_state = None
class VirtualWanSecurityProvider(msrest.serialization.Model):
"""Collection of SecurityProviders.
Variables are only populated by the server, and will be ignored when sending a request.
:param name: Name of the security provider.
:type name: str
:param url: Url of the security provider.
:type url: str
:ivar type: Name of the security provider. Possible values include: "External", "Native".
:vartype type: str or ~azure.mgmt.network.v2019_04_01.models.VirtualWanSecurityProviderType
"""
_validation = {
'type': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualWanSecurityProvider, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.url = kwargs.get('url', None)
self.type = None
class VirtualWanSecurityProviders(msrest.serialization.Model):
"""Collection of SecurityProviders.
:param supported_providers: List of VirtualWAN security providers.
:type supported_providers:
list[~azure.mgmt.network.v2019_04_01.models.VirtualWanSecurityProvider]
"""
_attribute_map = {
'supported_providers': {'key': 'supportedProviders', 'type': '[VirtualWanSecurityProvider]'},
}
def __init__(
self,
**kwargs
):
super(VirtualWanSecurityProviders, self).__init__(**kwargs)
self.supported_providers = kwargs.get('supported_providers', None)
class VpnClientConfiguration(msrest.serialization.Model):
"""VpnClientConfiguration for P2S client.
:param vpn_client_address_pool: The reference of the address space resource which represents
Address space for P2S VpnClient.
:type vpn_client_address_pool: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:param vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway.
:type vpn_client_root_certificates:
list[~azure.mgmt.network.v2019_04_01.models.VpnClientRootCertificate]
:param vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network
gateway.
:type vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2019_04_01.models.VpnClientRevokedCertificate]
:param vpn_client_protocols: VpnClientProtocols for Virtual network gateway.
:type vpn_client_protocols: list[str or
~azure.mgmt.network.v2019_04_01.models.VpnClientProtocol]
:param vpn_client_ipsec_policies: VpnClientIpsecPolicies for virtual network gateway P2S
client.
:type vpn_client_ipsec_policies: list[~azure.mgmt.network.v2019_04_01.models.IpsecPolicy]
:param radius_server_address: The radius server address property of the VirtualNetworkGateway
resource for vpn client connection.
:type radius_server_address: str
:param radius_server_secret: The radius secret property of the VirtualNetworkGateway resource
for vpn client connection.
:type radius_server_secret: str
:param aad_tenant: The AADTenant property of the VirtualNetworkGateway resource for vpn client
connection used for AAD authentication.
:type aad_tenant: str
:param aad_audience: The AADAudience property of the VirtualNetworkGateway resource for vpn
client connection used for AAD authentication.
:type aad_audience: str
:param aad_issuer: The AADIssuer property of the VirtualNetworkGateway resource for vpn client
connection used for AAD authentication.
:type aad_issuer: str
"""
_attribute_map = {
'vpn_client_address_pool': {'key': 'vpnClientAddressPool', 'type': 'AddressSpace'},
'vpn_client_root_certificates': {'key': 'vpnClientRootCertificates', 'type': '[VpnClientRootCertificate]'},
'vpn_client_revoked_certificates': {'key': 'vpnClientRevokedCertificates', 'type': '[VpnClientRevokedCertificate]'},
'vpn_client_protocols': {'key': 'vpnClientProtocols', 'type': '[str]'},
'vpn_client_ipsec_policies': {'key': 'vpnClientIpsecPolicies', 'type': '[IpsecPolicy]'},
'radius_server_address': {'key': 'radiusServerAddress', 'type': 'str'},
'radius_server_secret': {'key': 'radiusServerSecret', 'type': 'str'},
'aad_tenant': {'key': 'aadTenant', 'type': 'str'},
'aad_audience': {'key': 'aadAudience', 'type': 'str'},
'aad_issuer': {'key': 'aadIssuer', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConfiguration, self).__init__(**kwargs)
self.vpn_client_address_pool = kwargs.get('vpn_client_address_pool', None)
self.vpn_client_root_certificates = kwargs.get('vpn_client_root_certificates', None)
self.vpn_client_revoked_certificates = kwargs.get('vpn_client_revoked_certificates', None)
self.vpn_client_protocols = kwargs.get('vpn_client_protocols', None)
self.vpn_client_ipsec_policies = kwargs.get('vpn_client_ipsec_policies', None)
self.radius_server_address = kwargs.get('radius_server_address', None)
self.radius_server_secret = kwargs.get('radius_server_secret', None)
self.aad_tenant = kwargs.get('aad_tenant', None)
self.aad_audience = kwargs.get('aad_audience', None)
self.aad_issuer = kwargs.get('aad_issuer', None)
class VpnClientConnectionHealth(msrest.serialization.Model):
"""VpnClientConnectionHealth properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar total_ingress_bytes_transferred: Total of the Ingress Bytes Transferred in this P2S Vpn
connection.
:vartype total_ingress_bytes_transferred: long
:ivar total_egress_bytes_transferred: Total of the Egress Bytes Transferred in this connection.
:vartype total_egress_bytes_transferred: long
:param vpn_client_connections_count: The total of p2s vpn clients connected at this time to
this P2SVpnGateway.
:type vpn_client_connections_count: int
:param allocated_ip_addresses: List of allocated ip addresses to the connected p2s vpn clients.
:type allocated_ip_addresses: list[str]
"""
_validation = {
'total_ingress_bytes_transferred': {'readonly': True},
'total_egress_bytes_transferred': {'readonly': True},
}
_attribute_map = {
'total_ingress_bytes_transferred': {'key': 'totalIngressBytesTransferred', 'type': 'long'},
'total_egress_bytes_transferred': {'key': 'totalEgressBytesTransferred', 'type': 'long'},
'vpn_client_connections_count': {'key': 'vpnClientConnectionsCount', 'type': 'int'},
'allocated_ip_addresses': {'key': 'allocatedIpAddresses', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConnectionHealth, self).__init__(**kwargs)
self.total_ingress_bytes_transferred = None
self.total_egress_bytes_transferred = None
self.vpn_client_connections_count = kwargs.get('vpn_client_connections_count', None)
self.allocated_ip_addresses = kwargs.get('allocated_ip_addresses', None)
class VpnClientConnectionHealthDetail(msrest.serialization.Model):
"""VPN client connection health detail.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar vpn_connection_id: The vpn client Id.
:vartype vpn_connection_id: str
:ivar vpn_connection_duration: The duration time of a connected vpn client.
:vartype vpn_connection_duration: long
:ivar vpn_connection_time: The start time of a connected vpn client.
:vartype vpn_connection_time: str
:ivar public_ip_address: The public Ip of a connected vpn client.
:vartype public_ip_address: str
:ivar private_ip_address: The assigned private Ip of a connected vpn client.
:vartype private_ip_address: str
:ivar vpn_user_name: The user name of a connected vpn client.
:vartype vpn_user_name: str
:ivar max_bandwidth: The max band width.
:vartype max_bandwidth: long
:ivar egress_packets_transferred: The egress packets per second.
:vartype egress_packets_transferred: long
:ivar egress_bytes_transferred: The egress bytes per second.
:vartype egress_bytes_transferred: long
:ivar ingress_packets_transferred: The ingress packets per second.
:vartype ingress_packets_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes per second.
:vartype ingress_bytes_transferred: long
:ivar max_packets_per_second: The max packets transferred per second.
:vartype max_packets_per_second: long
"""
_validation = {
'vpn_connection_id': {'readonly': True},
'vpn_connection_duration': {'readonly': True},
'vpn_connection_time': {'readonly': True},
'public_ip_address': {'readonly': True},
'private_ip_address': {'readonly': True},
'vpn_user_name': {'readonly': True},
'max_bandwidth': {'readonly': True},
'egress_packets_transferred': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_packets_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'max_packets_per_second': {'readonly': True},
}
_attribute_map = {
'vpn_connection_id': {'key': 'vpnConnectionId', 'type': 'str'},
'vpn_connection_duration': {'key': 'vpnConnectionDuration', 'type': 'long'},
'vpn_connection_time': {'key': 'vpnConnectionTime', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
'vpn_user_name': {'key': 'vpnUserName', 'type': 'str'},
'max_bandwidth': {'key': 'maxBandwidth', 'type': 'long'},
'egress_packets_transferred': {'key': 'egressPacketsTransferred', 'type': 'long'},
'egress_bytes_transferred': {'key': 'egressBytesTransferred', 'type': 'long'},
'ingress_packets_transferred': {'key': 'ingressPacketsTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'ingressBytesTransferred', 'type': 'long'},
'max_packets_per_second': {'key': 'maxPacketsPerSecond', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConnectionHealthDetail, self).__init__(**kwargs)
self.vpn_connection_id = None
self.vpn_connection_duration = None
self.vpn_connection_time = None
self.public_ip_address = None
self.private_ip_address = None
self.vpn_user_name = None
self.max_bandwidth = None
self.egress_packets_transferred = None
self.egress_bytes_transferred = None
self.ingress_packets_transferred = None
self.ingress_bytes_transferred = None
self.max_packets_per_second = None
class VpnClientConnectionHealthDetailListResult(msrest.serialization.Model):
"""List of virtual network gateway vpn client connection health.
:param value: List of vpn client connection health.
:type value: list[~azure.mgmt.network.v2019_04_01.models.VpnClientConnectionHealthDetail]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnClientConnectionHealthDetail]'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConnectionHealthDetailListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class VpnClientIPsecParameters(msrest.serialization.Model):
"""An IPSec parameters for a virtual network gateway P2S connection.
All required parameters must be populated in order to send to Azure.
:param sa_life_time_seconds: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) lifetime in seconds for P2S client.
:type sa_life_time_seconds: int
:param sa_data_size_kilobytes: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) payload size in KB for P2S client..
:type sa_data_size_kilobytes: int
:param ipsec_encryption: Required. The IPSec encryption algorithm (IKE phase 1). Possible
values include: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192",
"GCMAES256".
:type ipsec_encryption: str or ~azure.mgmt.network.v2019_04_01.models.IpsecEncryption
:param ipsec_integrity: Required. The IPSec integrity algorithm (IKE phase 1). Possible values
include: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", "GCMAES256".
:type ipsec_integrity: str or ~azure.mgmt.network.v2019_04_01.models.IpsecIntegrity
:param ike_encryption: Required. The IKE encryption algorithm (IKE phase 2). Possible values
include: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", "GCMAES128".
:type ike_encryption: str or ~azure.mgmt.network.v2019_04_01.models.IkeEncryption
:param ike_integrity: Required. The IKE integrity algorithm (IKE phase 2). Possible values
include: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", "GCMAES128".
:type ike_integrity: str or ~azure.mgmt.network.v2019_04_01.models.IkeIntegrity
:param dh_group: Required. The DH Group used in IKE Phase 1 for initial SA. Possible values
include: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384",
"DHGroup24".
:type dh_group: str or ~azure.mgmt.network.v2019_04_01.models.DhGroup
:param pfs_group: Required. The Pfs Group used in IKE Phase 2 for new child SA. Possible values
include: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", "PFSMM".
:type pfs_group: str or ~azure.mgmt.network.v2019_04_01.models.PfsGroup
"""
_validation = {
'sa_life_time_seconds': {'required': True},
'sa_data_size_kilobytes': {'required': True},
'ipsec_encryption': {'required': True},
'ipsec_integrity': {'required': True},
'ike_encryption': {'required': True},
'ike_integrity': {'required': True},
'dh_group': {'required': True},
'pfs_group': {'required': True},
}
_attribute_map = {
'sa_life_time_seconds': {'key': 'saLifeTimeSeconds', 'type': 'int'},
'sa_data_size_kilobytes': {'key': 'saDataSizeKilobytes', 'type': 'int'},
'ipsec_encryption': {'key': 'ipsecEncryption', 'type': 'str'},
'ipsec_integrity': {'key': 'ipsecIntegrity', 'type': 'str'},
'ike_encryption': {'key': 'ikeEncryption', 'type': 'str'},
'ike_integrity': {'key': 'ikeIntegrity', 'type': 'str'},
'dh_group': {'key': 'dhGroup', 'type': 'str'},
'pfs_group': {'key': 'pfsGroup', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientIPsecParameters, self).__init__(**kwargs)
self.sa_life_time_seconds = kwargs['sa_life_time_seconds']
self.sa_data_size_kilobytes = kwargs['sa_data_size_kilobytes']
self.ipsec_encryption = kwargs['ipsec_encryption']
self.ipsec_integrity = kwargs['ipsec_integrity']
self.ike_encryption = kwargs['ike_encryption']
self.ike_integrity = kwargs['ike_integrity']
self.dh_group = kwargs['dh_group']
self.pfs_group = kwargs['pfs_group']
class VpnClientParameters(msrest.serialization.Model):
"""Vpn Client Parameters for package generation.
:param processor_architecture: VPN client Processor Architecture. Possible values include:
"Amd64", "X86".
:type processor_architecture: str or
~azure.mgmt.network.v2019_04_01.models.ProcessorArchitecture
:param authentication_method: VPN client authentication method. Possible values include:
"EAPTLS", "EAPMSCHAPv2".
:type authentication_method: str or ~azure.mgmt.network.v2019_04_01.models.AuthenticationMethod
:param radius_server_auth_certificate: The public certificate data for the radius server
authentication certificate as a Base-64 encoded string. Required only if external radius
authentication has been configured with EAPTLS authentication.
:type radius_server_auth_certificate: str
:param client_root_certificates: A list of client root certificates public certificate data
encoded as Base-64 strings. Optional parameter for external radius based authentication with
EAPTLS.
:type client_root_certificates: list[str]
"""
_attribute_map = {
'processor_architecture': {'key': 'processorArchitecture', 'type': 'str'},
'authentication_method': {'key': 'authenticationMethod', 'type': 'str'},
'radius_server_auth_certificate': {'key': 'radiusServerAuthCertificate', 'type': 'str'},
'client_root_certificates': {'key': 'clientRootCertificates', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(VpnClientParameters, self).__init__(**kwargs)
self.processor_architecture = kwargs.get('processor_architecture', None)
self.authentication_method = kwargs.get('authentication_method', None)
self.radius_server_auth_certificate = kwargs.get('radius_server_auth_certificate', None)
self.client_root_certificates = kwargs.get('client_root_certificates', None)
class VpnClientRevokedCertificate(SubResource):
"""VPN client revoked certificate of virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param thumbprint: The revoked VPN client certificate thumbprint.
:type thumbprint: str
:ivar provisioning_state: The provisioning state of the VPN client revoked certificate
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientRevokedCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.thumbprint = kwargs.get('thumbprint', None)
self.provisioning_state = None
class VpnClientRootCertificate(SubResource):
"""VPN client root certificate of virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:param public_cert_data: Required. The certificate public data.
:type public_cert_data: str
:ivar provisioning_state: The provisioning state of the VPN client root certificate resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
'public_cert_data': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'public_cert_data': {'key': 'properties.publicCertData', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.public_cert_data = kwargs['public_cert_data']
self.provisioning_state = None
class VpnConnection(SubResource):
"""VpnConnection Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param remote_vpn_site: Id of the connected vpn site.
:type remote_vpn_site: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param routing_weight: Routing weight for vpn connection.
:type routing_weight: int
:ivar connection_status: The connection status. Possible values include: "Unknown",
"Connecting", "Connected", "NotConnected".
:vartype connection_status: str or ~azure.mgmt.network.v2019_04_01.models.VpnConnectionStatus
:param vpn_connection_protocol_type: Connection protocol used for this connection. Possible
values include: "IKEv2", "IKEv1".
:type vpn_connection_protocol_type: str or
~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionProtocol
:ivar ingress_bytes_transferred: Ingress bytes transferred.
:vartype ingress_bytes_transferred: long
:ivar egress_bytes_transferred: Egress bytes transferred.
:vartype egress_bytes_transferred: long
:param connection_bandwidth: Expected bandwidth in MBPS.
:type connection_bandwidth: int
:param shared_key: SharedKey for the vpn connection.
:type shared_key: str
:param enable_bgp: EnableBgp flag.
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this connection.
:type ipsec_policies: list[~azure.mgmt.network.v2019_04_01.models.IpsecPolicy]
:param enable_rate_limiting: EnableBgp flag.
:type enable_rate_limiting: bool
:param enable_internet_security: Enable internet security.
:type enable_internet_security: bool
:param use_local_azure_ip_address: Use local azure ip to initiate connection.
:type use_local_azure_ip_address: bool
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'connection_status': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'remote_vpn_site': {'key': 'properties.remoteVpnSite', 'type': 'SubResource'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'vpn_connection_protocol_type': {'key': 'properties.vpnConnectionProtocolType', 'type': 'str'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'connection_bandwidth': {'key': 'properties.connectionBandwidth', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'enable_rate_limiting': {'key': 'properties.enableRateLimiting', 'type': 'bool'},
'enable_internet_security': {'key': 'properties.enableInternetSecurity', 'type': 'bool'},
'use_local_azure_ip_address': {'key': 'properties.useLocalAzureIpAddress', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.remote_vpn_site = kwargs.get('remote_vpn_site', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.connection_status = None
self.vpn_connection_protocol_type = kwargs.get('vpn_connection_protocol_type', None)
self.ingress_bytes_transferred = None
self.egress_bytes_transferred = None
self.connection_bandwidth = kwargs.get('connection_bandwidth', None)
self.shared_key = kwargs.get('shared_key', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.enable_rate_limiting = kwargs.get('enable_rate_limiting', None)
self.enable_internet_security = kwargs.get('enable_internet_security', None)
self.use_local_azure_ip_address = kwargs.get('use_local_azure_ip_address', None)
self.provisioning_state = None
class VpnDeviceScriptParameters(msrest.serialization.Model):
"""Vpn device configuration script generation parameters.
:param vendor: The vendor for the vpn device.
:type vendor: str
:param device_family: The device family for the vpn device.
:type device_family: str
:param firmware_version: The firmware version for the vpn device.
:type firmware_version: str
"""
_attribute_map = {
'vendor': {'key': 'vendor', 'type': 'str'},
'device_family': {'key': 'deviceFamily', 'type': 'str'},
'firmware_version': {'key': 'firmwareVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnDeviceScriptParameters, self).__init__(**kwargs)
self.vendor = kwargs.get('vendor', None)
self.device_family = kwargs.get('device_family', None)
self.firmware_version = kwargs.get('firmware_version', None)
class VpnGateway(Resource):
"""VpnGateway Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_hub: The VirtualHub to which the gateway belongs.
:type virtual_hub: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param connections: List of all vpn connections to the gateway.
:type connections: list[~azure.mgmt.network.v2019_04_01.models.VpnConnection]
:param bgp_settings: Local network gateway's BGP speaker settings.
:type bgp_settings: ~azure.mgmt.network.v2019_04_01.models.BgpSettings
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param vpn_gateway_scale_unit: The scale unit for this vpn gateway.
:type vpn_gateway_scale_unit: int
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'SubResource'},
'connections': {'key': 'properties.connections', 'type': '[VpnConnection]'},
'bgp_settings': {'key': 'properties.bgpSettings', 'type': 'BgpSettings'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'vpn_gateway_scale_unit': {'key': 'properties.vpnGatewayScaleUnit', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(VpnGateway, self).__init__(**kwargs)
self.etag = None
self.virtual_hub = kwargs.get('virtual_hub', None)
self.connections = kwargs.get('connections', None)
self.bgp_settings = kwargs.get('bgp_settings', None)
self.provisioning_state = None
self.vpn_gateway_scale_unit = kwargs.get('vpn_gateway_scale_unit', None)
class VpnProfileResponse(msrest.serialization.Model):
"""Vpn Profile Response for package generation.
:param profile_url: URL to the VPN profile.
:type profile_url: str
"""
_attribute_map = {
'profile_url': {'key': 'profileUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnProfileResponse, self).__init__(**kwargs)
self.profile_url = kwargs.get('profile_url', None)
class VpnSite(Resource):
"""VpnSite Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_wan: The VirtualWAN to which the vpnSite belongs.
:type virtual_wan: ~azure.mgmt.network.v2019_04_01.models.SubResource
:param device_properties: The device properties.
:type device_properties: ~azure.mgmt.network.v2019_04_01.models.DeviceProperties
:param ip_address: The ip-address for the vpn-site.
:type ip_address: str
:param site_key: The key for vpn-site that can be used for connections.
:type site_key: str
:param address_space: The AddressSpace that contains an array of IP address ranges.
:type address_space: ~azure.mgmt.network.v2019_04_01.models.AddressSpace
:param bgp_properties: The set of bgp properties.
:type bgp_properties: ~azure.mgmt.network.v2019_04_01.models.BgpSettings
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_04_01.models.ProvisioningState
:param is_security_site: IsSecuritySite flag.
:type is_security_site: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_wan': {'key': 'properties.virtualWan', 'type': 'SubResource'},
'device_properties': {'key': 'properties.deviceProperties', 'type': 'DeviceProperties'},
'ip_address': {'key': 'properties.ipAddress', 'type': 'str'},
'site_key': {'key': 'properties.siteKey', 'type': 'str'},
'address_space': {'key': 'properties.addressSpace', 'type': 'AddressSpace'},
'bgp_properties': {'key': 'properties.bgpProperties', 'type': 'BgpSettings'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'is_security_site': {'key': 'properties.isSecuritySite', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(VpnSite, self).__init__(**kwargs)
self.etag = None
self.virtual_wan = kwargs.get('virtual_wan', None)
self.device_properties = kwargs.get('device_properties', None)
self.ip_address = kwargs.get('ip_address', None)
self.site_key = kwargs.get('site_key', None)
self.address_space = kwargs.get('address_space', None)
self.bgp_properties = kwargs.get('bgp_properties', None)
self.provisioning_state = None
self.is_security_site = kwargs.get('is_security_site', None)
class VpnSiteId(msrest.serialization.Model):
"""VpnSite Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar vpn_site: The resource-uri of the vpn-site for which config is to be fetched.
:vartype vpn_site: str
"""
_validation = {
'vpn_site': {'readonly': True},
}
_attribute_map = {
'vpn_site': {'key': 'vpnSite', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnSiteId, self).__init__(**kwargs)
self.vpn_site = None
class WebApplicationFirewallCustomRule(msrest.serialization.Model):
"""Defines contents of a web application rule.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Gets name of the resource that is unique within a policy. This name can be used to
access the resource.
:type name: str
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Required. Describes priority of the rule. Rules with a lower value will be
evaluated before rules with a higher value.
:type priority: int
:param rule_type: Required. Describes type of rule. Possible values include: "MatchRule",
"Invalid".
:type rule_type: str or ~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallRuleType
:param match_conditions: Required. List of match conditions.
:type match_conditions: list[~azure.mgmt.network.v2019_04_01.models.MatchCondition]
:param action: Required. Type of Actions. Possible values include: "Allow", "Block", "Log".
:type action: str or ~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallAction
"""
_validation = {
'name': {'max_length': 128, 'min_length': 0},
'etag': {'readonly': True},
'priority': {'required': True},
'rule_type': {'required': True},
'match_conditions': {'required': True},
'action': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'rule_type': {'key': 'ruleType', 'type': 'str'},
'match_conditions': {'key': 'matchConditions', 'type': '[MatchCondition]'},
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebApplicationFirewallCustomRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs['priority']
self.rule_type = kwargs['rule_type']
self.match_conditions = kwargs['match_conditions']
self.action = kwargs['action']
class WebApplicationFirewallPolicy(Resource):
"""Defines web application firewall policy.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param policy_settings: Describes policySettings for policy.
:type policy_settings: ~azure.mgmt.network.v2019_04_01.models.PolicySettings
:param custom_rules: Describes custom rules inside the policy.
:type custom_rules:
list[~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallCustomRule]
:ivar application_gateways: A collection of references to application gateways.
:vartype application_gateways: list[~azure.mgmt.network.v2019_04_01.models.ApplicationGateway]
:ivar provisioning_state: Provisioning state of the WebApplicationFirewallPolicy.
:vartype provisioning_state: str
:ivar resource_state: Resource status of the policy. Possible values include: "Creating",
"Enabling", "Enabled", "Disabling", "Disabled", "Deleting".
:vartype resource_state: str or
~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallPolicyResourceState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'application_gateways': {'readonly': True},
'provisioning_state': {'readonly': True},
'resource_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'policy_settings': {'key': 'properties.policySettings', 'type': 'PolicySettings'},
'custom_rules': {'key': 'properties.customRules', 'type': '[WebApplicationFirewallCustomRule]'},
'application_gateways': {'key': 'properties.applicationGateways', 'type': '[ApplicationGateway]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebApplicationFirewallPolicy, self).__init__(**kwargs)
self.etag = kwargs.get('etag', None)
self.policy_settings = kwargs.get('policy_settings', None)
self.custom_rules = kwargs.get('custom_rules', None)
self.application_gateways = None
self.provisioning_state = None
self.resource_state = None
class WebApplicationFirewallPolicyListResult(msrest.serialization.Model):
"""Result of the request to list WebApplicationFirewallPolicies. It contains a list of WebApplicationFirewallPolicy objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of WebApplicationFirewallPolicies within a resource group.
:vartype value: list[~azure.mgmt.network.v2019_04_01.models.WebApplicationFirewallPolicy]
:ivar next_link: URL to get the next set of WebApplicationFirewallPolicy objects if there are
any.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[WebApplicationFirewallPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebApplicationFirewallPolicyListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None<|fim▁end|> |
def __init__(
self, |
<|file_name|>user.server.model.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
extend = require('mongoose-schema-extend'),
Schema = mongoose.Schema,
moment = require('moment'),
crypto = require('crypto');
/**
* A Validation function for local strategy properties
*/
var validateLocalStrategyProperty = function(property) {
return ((this.provider !== 'local' && !this.updated) || property.length);
};
/**
* A Validation function for local strategy password
*/
var validateLocalStrategyPassword = function(password) {
return (this.provider !== 'local' || (password && password.length > 6));
};
/**
* User Schema
*/
var UserSchema = new Schema({
firstName: {
type: String,
trim: true,
default: '',
validate: [validateLocalStrategyProperty, 'Please fill in your first name']
},
lastName: {
type: String,
trim: true,
default: '',
validate: [validateLocalStrategyProperty, 'Please fill in your last name']
},
email: {
type: String,
trim: true,<|fim▁hole|> validate: [validateLocalStrategyProperty, 'Please fill in your email'],
match: [/.+\@.+\..+/, 'Please fill a valid email address']
},
username: {
type: String,
required: 'Please fill in a username',
trim: true
},
password: {
type: String,
default: '',
validate: [validateLocalStrategyPassword, 'Password should be longer']
},
salt: {
type: String
},
provider: {
type: String,
required: 'Provider is required',
},
providerData: {},
additionalProvidersData: {},
updated: {
type: Date
},
created: {
type: Date,
default: Date.now
}
}, {
collection: 'users',
discriminatorKey: '_type'
});
/**
* SkillCategory Schema
*/
var SkillCategorySchema = new Schema({
name: {
type: String,
required: 'Name of skill category is important'
}
});
/**
* Skill Schema
*/
var SkillSchema = new Schema({
name: {
type: String
},
category: {
type: Schema.ObjectId,
ref: 'SkillCategory'
},
});
/**
* Assessment Schema
*/
var AssessmentSchema = new Schema({
assessment_name: {
type: String,
trim: true,
required: 'Name of assessment is important'
},
assessment_date: {
type: Date,
required: 'Date of assessment is important'
},
applicantId: {
type: Schema.ObjectId,
ref: 'Applicant'
},
instructorId: {
type: Schema.ObjectId,
ref: 'Instructor'
},
score: {
type: Number,
required: 'The Applicant score is compulsory'
}
});
/**
* Placement Schema
*/
var PlacementSchema = new Schema({
company: {
type: String,
trim: true,
required: 'Name of company is important'
},
jobDescription: {
type: String,
required: 'Job description is important'
},
start_date: {
type: Date,
required: 'Start date is important'
},
end_date: {
type: Date,
required: 'End date is important'
}
});
/**
*
* Applicant Schema, Trainee and Fellow
*/
var ApplicantSchema = UserSchema.extend({
testScore: {
type: Number,
required: 'Applicant score must be submitted'
},
cvPath: {
type: String
},
photo_path: String,
role: {
type: String,
enum: ['applicant', 'trainee', 'fellow']
},
status: {
name: {
type: String,
enum: ['pending', 'rejected', 'selected for bootcamp', 'selected for interview'],
default: 'pending'
},
reason: {
type: String,
default: ''
}
},
portfolio: {
type: String
},
skillSet: [{
skill: {
type: Schema.Types.ObjectId,
ref: 'Skill'
},
rating: {
type: Number
}
}],
skillSummary: {},
profile: {
type: String
},
campId: {
type: Schema.ObjectId,
ref: 'Bootcamp'
},
assessments: [AssessmentSchema],
placements: [{
type: Schema.Types.ObjectId,
ref: 'Placement'
}]
});
/**
* Instructor Schema
*/
var InstructorSchema = UserSchema.extend({
experience: {
type: String
},
photo: {
type: String
},
role: {
type: String,
enum: ['instructor', 'admin']
},
skillSet: [SkillSchema]
});
/**
* Bootcamp Schema
*/
var BootcampSchema = new Schema({
camp_name: {
type: String,
trim: true
},
location: {
type: String,
required: 'Please fill in the Bootcamp location',
default: 'Lagos',
trim: true
},
start_date: {
type: Date
},
end_date: {
type: Date
},
created: {
type: Date,
default: Date.now
},
applicants: [{
type: Schema.Types.ObjectId,
ref: 'Applicant'
}]
});
/**
* Hook a pre save method to hash the password
*/
UserSchema.pre('save', function(next) {
if (this.password && this.password.length > 6) {
this.salt = new Buffer(crypto.randomBytes(16).toString('base64'), 'base64');
this.password = this.hashPassword(this.password);
}
next();
});
ApplicantSchema.pre('save', function(next) {
if (this.password && this.password.length > 6) {
this.salt = new Buffer(crypto.randomBytes(16).toString('base64'), 'base64');
if (this.constructor.name === 'EmbeddedDocument') {
var TempApplicant = mongoose.model('Applicant');
var embeddedDocApplicant = new TempApplicant(this);
this.password = embeddedDocApplicant.hashPassword(this.password);
} else {
this.password = this.hashPassword(this.password);
}
}
next();
});
InstructorSchema.pre('save', function(next) {
if (this.password && this.password.length > 6) {
this.salt = new Buffer(crypto.randomBytes(16).toString('base64'), 'base64');
this.password = this.hashPassword(this.password);
}
next();
});
BootcampSchema.pre('save', function(next) {
if (this.start_date && this.location) {
this.camp_name = moment(this.start_date).format('MMMM D, YYYY') + ', ' + this.location;
}
next();
});
SkillSchema.post('save', function(next) {
var skill = this;
var Applicant = mongoose.model('Applicant');
Applicant.find().exec(function(err, applicants) {
applicants.forEach(function(applicant) {
Applicant.update({
_id: applicant._id
}, {
$push: {
'skillSet': {
skill: skill._id,
rating: 0
}
}
}, function(err) {
if (err) {
return {
message: 'Couldn\'t add skill to applicant'
};
}
});
});
});
});
ApplicantSchema.post('save', function(next) {
var applicant = this;
var Skill = mongoose.model('Skill');
var Applicant = mongoose.model('Applicant');
//Initialize skill summary
var SkillCategory = mongoose.model('SkillCategory');
var skillSummary = {};
SkillCategory.find().exec(function(err, skillCategories) {
skillCategories.forEach(function(category) {
skillSummary[category.name] = 0;
});
Skill.find().exec(function(err, skills) {
skills.forEach(function(skill) {
Applicant.update({
_id: applicant._id
}, {
$push: {
'skillSet': {
skill: skill._id,
rating: 0
}
},
$set: {
'skillSummary': skillSummary
}
}, function(err) {
if (err) {
return {
message: 'Couldn\'t add skill to applicant'
};
}
});
});
});
});
});
/**
* Create instance method for hashing a password
*/
UserSchema.methods.hashPassword = function(password) {
if (this.salt && password) {
return crypto.pbkdf2Sync(password, this.salt, 10000, 64).toString('base64');
} else {
return password;
}
};
ApplicantSchema.methods.hashPassword = function(password) {
if (this.salt && password) {
return crypto.pbkdf2Sync(password, this.salt, 10000, 64).toString('base64');
} else {
return password;
}
};
InstructorSchema.methods.hashPassword = function(password) {
if (this.salt && password) {
return crypto.pbkdf2Sync(password, this.salt, 10000, 64).toString('base64');
} else {
return password;
}
};
/**
* Create instance method for authenticating user
*/
UserSchema.methods.authenticate = function(password) {
return this.password === this.hashPassword(password);
};
ApplicantSchema.methods.authenticate = function(password) {
return this.password === this.hashPassword(password);
};
InstructorSchema.methods.authenticate = function(password) {
return this.password === this.hashPassword(password);
};
/**
* Find possible not used username
*/
UserSchema.statics.findUniqueUsername = function(username, suffix, callback) {
var _this = this;
var possibleUsername = username + (suffix || '');
_this.findOne({
username: possibleUsername
}, function(err, user) {
if (!err) {
if (!user) {
callback(possibleUsername);
} else {
return _this.findUniqueUsername(username, (suffix || 0) + 1, callback);
}
} else {
callback(null);
}
});
};
ApplicantSchema.statics.findUniqueUsername = function(username, suffix, callback) {
var _this = this;
var possibleUsername = username + (suffix || '');
_this.findOne({
username: possibleUsername
}, function(err, user) {
if (!err) {
if (!user) {
callback(possibleUsername);
} else {
return _this.findUniqueUsername(username, (suffix || 0) + 1, callback);
}
} else {
callback(null);
}
});
};
InstructorSchema.statics.findUniqueUsername = function(username, suffix, callback) {
var _this = this;
var possibleUsername = username + (suffix || '');
_this.findOne({
username: possibleUsername
}, function(err, user) {
if (!err) {
if (!user) {
callback(possibleUsername);
} else {
return _this.findUniqueUsername(username, (suffix || 0) + 1, callback);
}
} else {
callback(null);
}
});
};
mongoose.model('Placement', PlacementSchema);
mongoose.model('User', UserSchema);
mongoose.model('Applicant', ApplicantSchema);
mongoose.model('Instructor', InstructorSchema);
mongoose.model('Bootcamp', BootcampSchema);
mongoose.model('SkillCategory', SkillCategorySchema);
mongoose.model('Skill', SkillSchema);
mongoose.model('Assessment', AssessmentSchema);<|fim▁end|> | default: '', |
<|file_name|>font_context.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::rc::Rc;
extern mod freetype;
extern mod fontconfig;
use self::freetype::freetype::{
FTErrorMethods,
FT_Library,
};
use self::freetype::freetype::bindgen::{
FT_Init_FreeType,
FT_Done_FreeType
};
use fontconfig::font_list::path_from_identifier;
use gfx_font::{
FontHandle,
UsedFontStyle,
};
use font_context::FontContextHandleMethods;
use freetype_impl::font::FreeTypeFontHandle;
struct FreeTypeLibraryHandle {
ctx: FT_Library,
}
impl Drop for FreeTypeLibraryHandle {
fn finalize(&self) {
assert!(self.ctx.is_not_null());
FT_Done_FreeType(self.ctx);
}<|fim▁hole|>pub struct FreeTypeFontContextHandle {
ctx: Rc<FreeTypeLibraryHandle>,
}
pub impl FreeTypeFontContextHandle {
pub fn new() -> FreeTypeFontContextHandle {
let ctx: FT_Library = ptr::null();
let result = FT_Init_FreeType(ptr::to_unsafe_ptr(&ctx));
if !result.succeeded() { fail!(); }
FreeTypeFontContextHandle {
ctx: Rc::new(FreeTypeLibraryHandle { ctx: ctx }),
}
}
}
impl FontContextHandleMethods for FreeTypeFontContextHandle {
fn clone(&self) -> FreeTypeFontContextHandle {
FreeTypeFontContextHandle { ctx: self.ctx }
}
fn create_font_from_identifier(&self, name: ~str, style: UsedFontStyle) -> Result<FontHandle, ()> {
debug!("Creating font handle for {:s}", name);
do path_from_identifier(name).chain |file_name| {
debug!("Opening font face {:s}", file_name);
FreeTypeFontHandle::new_from_file(self, file_name, &style)
}
}
}<|fim▁end|> | }
|
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>#---------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#---------------------------------------------------------------------------------------------
#The MIT License (MIT)
#Copyright (c) 2016 Blockstack
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER<|fim▁hole|>#SOFTWARE.
#pylint: skip-file
class InvalidLineException(Exception):
pass<|fim▁end|> | #LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
<|file_name|>company-card.component.ts<|end_file_name|><|fim▁begin|>import {
Component, Input, OnInit, ChangeDetectionStrategy,
trigger, state, style, transition, animate
} from '@angular/core';
import { ICompany } from '_models/_gen/modelInterfaces';
import { TrackByService } from 'core/services/trackby.service';<|fim▁hole|>@Component({
selector: 'cm-company-card',
templateUrl: 'company-card.component.html',
//styleUrls: ['companies-card.component.scss'],
//Add [@flyInOut]="'in'" into template on card
// animations: [
// trigger('flyInOut', [
// state('in', style({transform: 'translateX(0)', opacity: 1})),
// transition('void => *', [
// style({transform: 'translateX(25%)', opacity: 0}),
// animate(300)
// ]),
// transition('* => void', [
// animate(300, style({transform: 'translateX(-25%)', opacity: 1}))
// ])
// ])
// ],
//When using OnPush detectors, then the framework will check an OnPush
//component when any of its input properties changes, when it fires
//an event, or when an observable fires an event ~ Victor Savkin (Angular Team)
changeDetection: ChangeDetectionStrategy.OnPush
})
export class CompaniesCardComponent implements OnInit {
@Input() companies: ICompany[] = [];
constructor(private trackbyService: TrackByService) { }
ngOnInit() {
}
}<|fim▁end|> | import 'style-loader!./company-card.component.scss';
|
<|file_name|>spinlock_gcc_arm.hpp<|end_file_name|><|fim▁begin|>#ifndef BOOST_SMART_PTR_DETAIL_SPINLOCK_GCC_ARM_HPP_INCLUDED
#define BOOST_SMART_PTR_DETAIL_SPINLOCK_GCC_ARM_HPP_INCLUDED
//
// Copyright (c) 2008, 2011 Peter Dimov
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
#include <boost/smart_ptr/detail/yield_k.hpp>
#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7EM__) || defined(__ARM_ARCH_7S__)
# define BOOST_SP_ARM_BARRIER "dmb"
# define BOOST_SP_ARM_HAS_LDREX
#elif defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__)
# define BOOST_SP_ARM_BARRIER "mcr p15, 0, r0, c7, c10, 5"
# define BOOST_SP_ARM_HAS_LDREX
#else
# define BOOST_SP_ARM_BARRIER ""
#endif
namespace pdalboost {} namespace boost = pdalboost; namespace pdalboost
{
namespace detail
{
class spinlock
{
public:
int v_;
public:
bool try_lock()
{
int r;
#ifdef BOOST_SP_ARM_HAS_LDREX<|fim▁hole|> "cmp %0, %1; \n"
"strexne %0, %1, [%2]; \n"
BOOST_SP_ARM_BARRIER :
"=&r"( r ): // outputs
"r"( 1 ), "r"( &v_ ): // inputs
"memory", "cc" );
#else
__asm__ __volatile__(
"swp %0, %1, [%2];\n"
BOOST_SP_ARM_BARRIER :
"=&r"( r ): // outputs
"r"( 1 ), "r"( &v_ ): // inputs
"memory", "cc" );
#endif
return r == 0;
}
void lock()
{
for( unsigned k = 0; !try_lock(); ++k )
{
pdalboost::detail::yield( k );
}
}
void unlock()
{
__asm__ __volatile__( BOOST_SP_ARM_BARRIER ::: "memory" );
*const_cast< int volatile* >( &v_ ) = 0;
}
public:
class scoped_lock
{
private:
spinlock & sp_;
scoped_lock( scoped_lock const & );
scoped_lock & operator=( scoped_lock const & );
public:
explicit scoped_lock( spinlock & sp ): sp_( sp )
{
sp.lock();
}
~scoped_lock()
{
sp_.unlock();
}
};
};
} // namespace detail
} // namespace pdalboost
#define BOOST_DETAIL_SPINLOCK_INIT {0}
#undef BOOST_SP_ARM_BARRIER
#undef BOOST_SP_ARM_HAS_LDREX
#endif // #ifndef BOOST_SMART_PTR_DETAIL_SPINLOCK_GCC_ARM_HPP_INCLUDED<|fim▁end|> |
__asm__ __volatile__(
"ldrex %0, [%2]; \n" |
<|file_name|>raw.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Nacl-specific raw type definitions
#![stable(feature = "raw_ext", since = "1.1.0")]
#[stable(feature = "raw_ext", since = "1.1.0")] pub type dev_t = u64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type mode_t = u32;
pub use self::arch::{off_t, ino_t, nlink_t, blksize_t, blkcnt_t, stat, time_t};
#[cfg(any(target_arch = "x86",
target_arch = "le32",
target_arch = "powerpc",
target_arch = "arm"))]
mod arch {
use super::{dev_t, mode_t};
use os::raw::{c_long, c_short};
use os::unix::raw::{gid_t, uid_t};
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = i32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = i32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u32;<|fim▁hole|> #[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = i32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i32;
#[repr(C)]
#[stable(feature = "raw_ext", since = "1.1.0")]
pub struct stat {
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_dev: dev_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __pad1: c_short,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ino: ino_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mode: mode_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_nlink: nlink_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_uid: uid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_gid: gid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_rdev: dev_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __pad2: c_short,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_size: off_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blksize: blksize_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blocks: blkcnt_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __unused4: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __unused5: c_long,
}
}
#[cfg(any(target_arch = "mips",
target_arch = "mipsel"))]
mod arch {
use super::{dev_t, mode_t};
use os::raw::c_long;
use os::unix::raw::{gid_t, uid_t};
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = i32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = i32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = i32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i32;
#[repr(C)]
#[stable(feature = "raw_ext", since = "1.1.0")]
pub struct stat {
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_dev: c_ulong,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_pad1: [c_long; 3],
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ino: ino_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mode: mode_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_nlink: nlink_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_uid: uid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_gid: gid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_rdev: c_ulong,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_pad2: [c_long; 2],
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_size: off_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_pad3: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blksize: blksize_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blocks: blkcnt_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_pad5: [c_long; 14],
}
}
#[cfg(target_arch = "aarch64")]
mod arch {
use super::{dev_t, mode_t};
use os::raw::{c_long, c_int};
use os::unix::raw::{gid_t, uid_t};
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = i64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = i32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u32;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = i64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i64;
#[repr(C)]
#[stable(feature = "raw_ext", since = "1.1.0")]
pub struct stat {
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_dev: dev_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ino: ino_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mode: mode_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_nlink: nlink_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_uid: uid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_gid: gid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_rdev: dev_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __pad1: dev_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_size: off_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blksize: blksize_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __pad2: c_int,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blocks: blkcnt_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __unused: [c_int; 2],
}
}
#[cfg(target_arch = "x86_64")]
mod arch {
use super::{dev_t, mode_t};
use os::raw::{c_long, c_int};
use os::unix::raw::{gid_t, uid_t};
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = i64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = i64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = i64;
#[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i64;
#[repr(C)]
#[stable(feature = "raw_ext", since = "1.1.0")]
pub struct stat {
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_dev: dev_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ino: ino_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_nlink: nlink_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mode: mode_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_uid: uid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_gid: gid_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __pad0: c_int,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_rdev: dev_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_size: off_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blksize: blksize_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_blocks: blkcnt_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_atime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_mtime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime: time_t,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub st_ctime_nsec: c_long,
#[stable(feature = "raw_ext", since = "1.1.0")]
pub __unused: [c_long; 3],
}
}<|fim▁end|> | #[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u32; |
<|file_name|>math.rs<|end_file_name|><|fim▁begin|>//! Various methods for computing with vectors.
use std::ops::{Add, Rem};
use vecmath::{self, traits::Float};
pub use vecmath::{
mat2x3_inv as invert, row_mat2x3_mul as multiply, row_mat2x3_transform_pos2 as transform_pos,
row_mat2x3_transform_vec2 as transform_vec, vec2_add as add, vec2_cast as cast,
vec2_cross as cross, vec2_dot as dot, vec2_mul as mul, vec2_scale as mul_scalar,
vec2_square_len as square_len, vec2_sub as sub,
};
use crate::{
modular_index::previous,
types::{Area, Color, Line, Polygon, Ray, Rectangle, SourceRectangle, Triangle},
};
/// The type used for scalars.
pub type Scalar = f64;
/// The type used for matrices.
pub type Matrix2d<T = Scalar> = vecmath::Matrix2x3<T>;
/// The type used for 2D vectors.
pub type Vec2d<T = Scalar> = vecmath::Vector2<T>;
/// The type used for 3D vectors.
pub type Vec3d<T = Scalar> = vecmath::Vector3<T>;
/// Creates a perpendicular vector.
#[inline(always)]
pub fn perp<T>(v: [T; 2]) -> [T; 2]
where
T: Float,
{
[-v[1], v[0]]
}
/// Transforms from normalized to absolute coordinates.
///
/// Computes absolute transform from width and height of viewport.
/// In absolute coordinates, the x axis points to the right,
/// and the y axis points down on the screen.
#[inline(always)]
pub fn abs_transform<T>(w: T, h: T) -> Matrix2d<T>
where
T: Float,
{
use vecmath::traits::{FromPrimitive, One, Zero};
let _0: T = Zero::zero();
let _1: T = One::one();
let _2: T = FromPrimitive::from_f64(2.0);
let sx = _2 / w;
let sy = -_2 / h;
[[sx, _0, -_1], [_0, sy, _1]]
}
/// Creates a translation matrix.
#[inline(always)]
pub fn translate<T>(v: Vec2d<T>) -> Matrix2d<T>
where
T: Float,
{
use vecmath::traits::{One, Zero};
let _0: T = Zero::zero();
let _1: T = One::one();
[[_1, _0, v[0]], [_0, _1, v[1]]]
}
/// Creates a rotation matrix.
#[inline(always)]
pub fn rotate_radians<T>(angle: T) -> Matrix2d<T>
where
T: Float,
{
use vecmath::traits::Zero;
let _0 = Zero::zero();
let c = angle.cos();
let s = angle.sin();
[[c, -s, _0], [s, c, _0]]
}
/// Orients x axis to look at point.
///
/// Leaves x axis unchanged if the
/// point to look at is the origin.
#[inline(always)]
pub fn orient<T>(x: T, y: T) -> Matrix2d<T>
where
T: Float,
{
use vecmath::traits::Zero;
let _0: T = Zero::zero();
let len = x * x + y * y;
if len == _0 {
return identity();
}
let len = len.sqrt();
let c = x / len;
let s = y / len;
[[c, -s, _0], [s, c, _0]]
}
/// Create a scale matrix.
#[inline(always)]
pub fn scale<T>(sx: T, sy: T) -> Matrix2d<T>
where
T: Float,
{
use vecmath::traits::Zero;
let _0: T = Zero::zero();
[[sx, _0, _0], [_0, sy, _0]]
}<|fim▁hole|>where
T: Float,
{
use vecmath::traits::{One, Zero};
let _0 = Zero::zero();
let _1 = One::one();
[[_1, v[0], _0], [v[1], _1, _0]]
}
/// Create an identity matrix.
#[inline(always)]
pub fn identity<T>() -> Matrix2d<T>
where
T: Float,
{
use vecmath::traits::{One, Zero};
let _0: T = Zero::zero();
let _1: T = One::one();
[[_1, _0, _0], [_0, _1, _0]]
}
/// Extract scale information from matrix.
#[inline(always)]
pub fn get_scale<T>(m: Matrix2d<T>) -> Vec2d<T>
where
T: Float,
{
[
(m[0][0] * m[0][0] + m[1][0] * m[1][0]).sqrt(),
(m[0][1] * m[0][1] + m[1][1] * m[1][1]).sqrt(),
]
}
/// Compute the shortest vector from point to ray.
/// A ray stores starting point and directional vector.
#[inline(always)]
pub fn separation<T>(ray: Ray<T>, v: Vec2d<T>) -> Vec2d<T>
where
T: Float,
{
// Get the directional vector.
let (dir_x, dir_y) = (ray[2], ray[3]);
// Get displacement vector from point.
let (dx, dy) = (ray[0] - v[0], ray[1] - v[1]);
// Compute the component of position in ray direction.
let dot = dir_x * v[0] + dir_y * v[1];
// The directional vector multiplied with
// the dot gives us a parallel vector.
// When we subtract this from the displacement
// we get a vector normal to the ray.
// This is the shortest vector from the point to the ray.
[dx - dot * dir_x, dy - dot * dir_y]
}
/// Returns the least separation out of four.
/// Each seperation can be computed using `separation` function.
/// The separation returned can be used
/// to solve collision of rectangles.
#[inline(always)]
pub fn least_separation_4<T>(
sep1: Vec2d<T>,
sep2: Vec2d<T>,
sep3: Vec2d<T>,
sep4: Vec2d<T>,
) -> Vec2d<T>
where
T: Float,
{
let dot1 = sep1[0] * sep1[0] + sep1[1] * sep1[1];
let dot2 = sep2[0] * sep2[0] + sep2[1] * sep2[1];
let dot3 = sep3[0] * sep3[0] + sep3[1] * sep3[1];
let dot4 = sep4[0] * sep4[0] + sep4[1] * sep4[1];
// Search for the smallest dot product.
if dot1 < dot2 {
if dot3 < dot4 {
if dot1 < dot3 {
sep1
} else {
sep3
}
} else {
if dot1 < dot4 {
sep1
} else {
sep4
}
}
} else {
if dot3 < dot4 {
if dot2 < dot3 {
sep2
} else {
sep3
}
} else {
if dot2 < dot4 {
sep2
} else {
sep4
}
}
}
}
/// Shrinks a rectangle by a factor on all sides.
#[inline(always)]
pub fn margin_rectangle<T>(rect: Rectangle<T>, m: T) -> Rectangle<T>
where
T: Float,
{
use vecmath::traits::{FromPrimitive, Zero};
let _0: T = Zero::zero();
let _05: T = FromPrimitive::from_f64(0.5);
let _2: T = FromPrimitive::from_f64(2.0);
let w = rect[2] - _2 * m;
let h = rect[3] - _2 * m;
let (x, w) = if w < _0 {
(rect[0] + _05 * rect[2], _0)
} else {
(rect[0] + m, w)
};
let (y, h) = if h < _0 {
(rect[1] + _05 * rect[3], _0)
} else {
(rect[1] + m, h)
};
[x, y, w, h]
}
/// Computes a relative rectangle using the rectangle as a tile.
#[inline(always)]
pub fn relative_rectangle<T>(rect: Rectangle<T>, v: Vec2d<T>) -> Rectangle<T>
where
T: Float,
{
[
rect[0] + v[0] * rect[2],
rect[1] + v[1] * rect[3],
rect[2],
rect[3],
]
}
/// Computes overlap between two rectangles.
/// The area of the overlapping rectangle is positive.
/// A shared edge or corner is not considered overlap.
#[inline(always)]
pub fn overlap_rectangle<T>(a: Rectangle<T>, b: Rectangle<T>) -> Option<Rectangle<T>>
where
T: Float,
{
#[inline(always)]
fn min<T: Float>(a: T, b: T) -> T {
if a < b {
a
} else {
b
}
}
#[inline(always)]
fn max<T: Float>(a: T, b: T) -> T {
if a > b {
a
} else {
b
}
}
if a[0] < b[0] + b[2] && a[1] < b[1] + b[3] && b[0] < a[0] + a[2] && b[1] < a[1] + a[3] {
let x = max(a[0], b[0]);
let y = max(a[1], b[1]);
let w = min(a[0] + a[2], b[0] + b[2]) - x;
let h = min(a[1] + a[3], b[1] + b[3]) - y;
Some([x, y, w, h])
} else {
None
}
}
#[cfg(test)]
mod test_overlap {
use super::overlap_rectangle;
#[test]
fn overlap() {
let a = [0.0, 1.0, 100.0, 101.0];
let b = [51.0, 52.0, 102.0, 103.0];
let c = overlap_rectangle(a, b).unwrap();
assert_eq!(c, [51.0, 52.0, 49.0, 50.0]);
let d = overlap_rectangle(a, c).unwrap();
assert_eq!(d, c);
let e = overlap_rectangle(b, c).unwrap();
assert_eq!(e, c);
}
#[test]
fn edge() {
let a = [0.0, 0.0, 100.0, 100.0];
let b = [100.0, 0.0, 100.0, 100.0];
let c = overlap_rectangle(a, b);
assert_eq!(c, None);
}
}
/// Computes a relative source rectangle using
/// the source rectangle as a tile.
#[inline(always)]
pub fn relative_source_rectangle<T>(rect: SourceRectangle<T>, x: T, y: T) -> SourceRectangle<T>
where
T: Float,
{
let (rx, ry, rw, rh) = (rect[0], rect[1], rect[2], rect[3]);
let (x, y) = (rx + x * rw, ry + y * rh);
[x, y, rw, rh]
}
/// Computes modular offset safely for numbers.
#[inline(always)]
pub fn modular_offset<T: Add<Output = T> + Rem<Output = T> + Copy>(n: &T, i: &T, off: &T) -> T {
(*i + (*off % *n + *n)) % *n
}
#[cfg(test)]
mod test_modular_offset {
use super::*;
#[test]
fn test_modular_offset() {
assert_eq!(modular_offset(&3.0_f64, &0.0_f64, &-1.0_f64), 2.0_f64);
assert_eq!(modular_offset(&3.0_f64, &1.0_f64, &-1.0_f64), 0.0_f64);
assert_eq!(modular_offset(&3.0_f64, &2.0_f64, &-1.0_f64), 1.0_f64);
assert_eq!(modular_offset(&3.0_f64, &3.0_f64, &-1.0_f64), 2.0_f64);
assert_eq!(modular_offset(&3.0_f64, &0.0_f64, &1.0_f64), 1.0_f64);
assert_eq!(modular_offset(&3.0_f64, &1.0_f64, &1.0_f64), 2.0_f64);
assert_eq!(modular_offset(&3.0_f64, &2.0_f64, &1.0_f64), 0.0_f64);
assert_eq!(modular_offset(&3.0_f64, &3.0_f64, &1.0_f64), 1.0_f64);
}
}
/// Computes the area and centroid of a simple polygon.
///
/// A simple polygon is one that does not intersect itself.
/// Source: http://en.wikipedia.org/wiki/Polygon_area#Simple_polygons
pub fn area_centroid<T>(polygon: Polygon<'_, T>) -> (Area<T>, Vec2d<T>)
where
T: Float,
{
use vecmath::traits::{FromPrimitive, Zero};
let _0: T = Zero::zero();
let _05: T = FromPrimitive::from_f64(0.5);
let _3: T = FromPrimitive::from_f64(3.0);
let n = polygon.len();
let mut sum = _0;
let (mut cx, mut cy) = (_0, _0);
for i in 0..n {
let qx = polygon[i][0];
let qy = polygon[i][1];
let p_i = previous(n, i);
let px = polygon[p_i][0];
let py = polygon[p_i][1];
let cross = px * qy - qx * py;
cx += (px + qx) * cross;
cy += (py + qy) * cross;
sum += cross;
}
let area = _05 * sum;
// 'cx / (6.0 * area)' = 'cx / (3.0 * sum)'
let centroid = [cx / (_3 * sum), cy / (_3 * sum)];
(area, centroid)
}
/// Computes area of a simple polygon.
///
/// A simple polygon is one that does not intersect itself.
#[inline(always)]
pub fn area<T>(polygon: Polygon<'_, T>) -> T
where
T: Float,
{
let (res, _) = area_centroid(polygon);
res
}
/// Computes centroid of a simple polygon.
///
/// A simple polygon is one that does not intersect itself.
#[inline(always)]
pub fn centroid<T>(polygon: Polygon<'_, T>) -> Vec2d<T>
where
T: Float,
{
let (_, res) = area_centroid(polygon);
res
}
/// Returns a number that tells which side it is relative to a line.
///
/// Computes the cross product of the vector that gives the line
/// with the vector between point and starting point of line.
/// One side of the line has opposite sign of the other.
#[inline(always)]
pub fn line_side<T>(line: Line<T>, v: Vec2d<T>) -> T
where
T: Float,
{
let (ax, ay) = (line[0], line[1]);
let (bx, by) = (line[2], line[3]);
(bx - ax) * (v[1] - ay) - (by - ay) * (v[0] - ax)
}
/// Returns true if point is inside triangle.
///
/// This is done by computing a `side` number for each edge.
/// If the number is inside if it is on the same side for all edges.
/// Might break for very small triangles.
pub fn inside_triangle<T>(triangle: Triangle<T>, v: Vec2d<T>) -> bool
where
T: Float,
{
use vecmath::traits::Zero;
let _0: T = Zero::zero();
let ax = triangle[0][0];
let ay = triangle[0][1];
let bx = triangle[1][0];
let by = triangle[1][1];
let cx = triangle[2][0];
let cy = triangle[2][1];
let ab_side = line_side([ax, ay, bx, by], v);
let bc_side = line_side([bx, by, cx, cy], v);
let ca_side = line_side([cx, cy, ax, ay], v);
let ab_positive = ab_side >= _0;
let bc_positive = bc_side >= _0;
let ca_positive = ca_side >= _0;
ab_positive == bc_positive && bc_positive == ca_positive
}
/// Returns true if triangle is clockwise.
///
/// This is done by computing which side the third vertex is relative to
/// the line starting from the first vertex to second vertex.
///
/// The triangle is considered clockwise if the third vertex is on the line
/// between the two first vertices.
#[inline(always)]
pub fn triangle_face<T>(triangle: Triangle<T>) -> bool
where
T: Float,
{
use vecmath::traits::Zero;
let _0 = Zero::zero();
let ax = triangle[0][0];
let ay = triangle[0][1];
let bx = triangle[1][0];
let by = triangle[1][1];
let cx = triangle[2][0];
let cy = triangle[2][1];
let ab_side = line_side([ax, ay, bx, by], [cx, cy]);
ab_side <= _0
}
#[cfg(test)]
mod test_triangle {
use super::*;
#[test]
fn test_triangle() {
// Triangle counter clock-wise.
let tri_1 = [[0.0, 0.0], [1.0, 0.0], [1.0, 1.0]];
// Triangle clock-wise.
let tri_2 = [[0.0, 0.0], [1.0, 1.0], [1.0, 0.0]];
let (x, y) = (0.5, 0.25);
assert!(inside_triangle(tri_1, [x, y]));
assert!(inside_triangle(tri_2, [x, y]));
assert_eq!(triangle_face(tri_1), false);
assert!(triangle_face(tri_2));
}
}
/// Transforms from cartesian coordinates to barycentric.
#[inline(always)]
pub fn to_barycentric<T>(triangle: Triangle<T>, pos: Vec2d<T>) -> Vec3d<T>
where
T: Float,
{
use vecmath::traits::One;
let _1: T = One::one();
let x = pos[0];
let y = pos[1];
let x1 = triangle[0][0];
let y1 = triangle[0][1];
let x2 = triangle[1][0];
let y2 = triangle[1][1];
let x3 = triangle[2][0];
let y3 = triangle[2][1];
let lambda1 = ((y2 - y3) * (x - x3) + (x3 - x2) * (y - y3))
/ ((y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3));
let lambda2 = ((y3 - y1) * (x - x3) + (x1 - x3) * (y - y3))
/ ((y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3));
let lambda3 = _1 - lambda1 - lambda2;
[lambda1, lambda2, lambda3]
}
/// Transforms from barycentric coordinates to cartesian.
#[inline(always)]
pub fn from_barycentric<T>(triangle: Triangle<T>, lambda: Vec3d<T>) -> Vec2d<T>
where
T: Float,
{
let x1 = triangle[0][0];
let y1 = triangle[0][1];
let x2 = triangle[1][0];
let y2 = triangle[1][1];
let x3 = triangle[2][0];
let y3 = triangle[2][1];
[
lambda[0] * x1 + lambda[1] * x2 + lambda[2] * x3,
lambda[0] * y1 + lambda[1] * y2 + lambda[2] * y3,
]
}
#[cfg(test)]
mod test_barycentric {
use super::*;
#[test]
fn test_barycentric() {
let triangle = [[0.0, 0.0], [100.0, 0.0], [0.0, 50.0]];
let old_pos = [10.0, 20.0];
let b = to_barycentric(triangle, old_pos);
let new_pos: Vec2d = from_barycentric(triangle, b);
let eps = 0.00001;
assert!((new_pos[0] - old_pos[0]).abs() < eps);
assert!((new_pos[1] - old_pos[1]).abs() < eps);
}
}
/// Transform color with hue, saturation and value.
///
/// Source: http://beesbuzz.biz/code/hsv_color_transforms.php
#[inline(always)]
pub fn hsv(color: Color, h_rad: f32, s: f32, v: f32) -> Color {
let vsu = v * s * h_rad.cos();
let vsw = v * s * h_rad.sin();
[
(0.299 * v + 0.701 * vsu + 0.168 * vsw) * color[0]
+ (0.587 * v - 0.587 * vsu + 0.330 * vsw) * color[1]
+ (0.114 * v - 0.114 * vsu - 0.497 * vsw) * color[2],
(0.299 * v - 0.299 * vsu - 0.328 * vsw) * color[0]
+ (0.587 * v + 0.413 * vsu + 0.035 * vsw) * color[1]
+ (0.114 * v - 0.114 * vsu + 0.292 * vsw) * color[2],
(0.299 * v - 0.3 * vsu + 1.25 * vsw) * color[0]
+ (0.587 * v - 0.588 * vsu - 1.05 * vsw) * color[1]
+ (0.114 * v + 0.886 * vsu - 0.203 * vsw) * color[2],
color[3],
]
}<|fim▁end|> |
/// Create a shear matrix.
#[inline(always)]
pub fn shear<T>(v: Vec2d<T>) -> Matrix2d<T> |
<|file_name|>packed-tuple-struct-layout.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
use std::mem;
#[repr(packed)]
struct S4(u8,[u8; 3]);
#[repr(packed)]
struct S5(u8,u32);
pub fn main() {
unsafe {
let s4 = S4(1, [2,3,4]);
let transd : [u8; 4] = mem::transmute(s4);
assert_eq!(transd, [1, 2, 3, 4]);
let s5 = S5(1, 0xff_00_00_ff);
let transd : [u8; 5] = mem::transmute(s5);
// Don't worry about endianness, the u32 is palindromic.
assert_eq!(transd, [1, 0xff, 0, 0, 0xff]);
}
}<|fim▁end|> | |
<|file_name|>spark_gce.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
###
# This script sets up a Spark cluster on Google Compute Engine
# Sigmoidanalytics.com
###
from __future__ import with_statement
import logging
import os
import pipes
import random
import shutil
import subprocess
import sys
import tempfile
import time
import commands
import urllib2
from optparse import OptionParser
from sys import stderr
import shlex
import getpass
import threading
import json
###
# Make sure gcutil is installed and authenticated
# Usage: spark_gce.py <project> <no-slaves> <slave-type> <master-type> <identity-file> <zone> <cluster-name>
# Usage: spark_gce.py <project> <cluster-name> destroy
###
identity_file = ""
slave_no = ""
slave_type = ""
master_type = ""
zone = ""
cluster_name = ""
username = ""
project = ""
def read_args():
global identity_file
global slave_no
global slave_type
global master_type
global zone
global cluster_name
global username
global project
if len(sys.argv) == 8:
project = sys.argv[1]
slave_no = int(sys.argv[2])
slave_type = sys.argv[3]
master_type = sys.argv[4]
identity_file = sys.argv[5]
zone = sys.argv[6]
cluster_name = sys.argv[7]
username = getpass.getuser()
elif len(sys.argv) == 4 and sys.argv[3].lower() == "destroy":
print 'Destroying cluster ' + sys.argv[2]
project = sys.argv[1]
cluster_name = sys.argv[2]
try:
command = 'gcloud compute --project ' + project + ' instances list --format json'
output = subprocess.check_output(command, shell=True)
data = json.loads(output)
master_nodes=[]
slave_nodes=[]
for instance in data:
try:
host_name = instance['name']
host_ip = instance['networkInterfaces'][0]['accessConfigs'][0]['natIP']
if host_name == cluster_name + '-master':
command = 'gcloud compute instances delete ' + host_name + ' --project ' + project
command = shlex.split(command)
subprocess.call(command)
elif cluster_name + '-slave' in host_name:
command = 'gcloud compute instances delete ' + host_name + ' --project ' + project
command = shlex.split(command)
subprocess.call(command)
except:
pass
except:
print "Failed to Delete instances"
sys.exit(1)
sys.exit(0)
else:
print '# Usage: spark_gce.py <project> <no-slaves> <slave-type> <master-type> <identity-file> <zone> <cluster-name>'
print '# Usage: spark_gce.py <project> <cluster-name> destroy'
sys.exit(0)
def setup_network():
print '[ Setting up Network & Firewall Entries ]'
try:
command = 'gcloud compute --project=' + project + ' networks create "' + cluster_name + '-network" --range "10.240.0.0/16"'
command = shlex.split(command)
subprocess.call(command)
#Uncomment the above and comment the below section if you don't want to open all ports for public.
command = 'gcloud compute firewall-rules delete internal --project '+ project
command = 'gcloud compute firewall-rules create internal --network ' + cluster_name + '-network --allow tcp udp icmp --project '+ project
command = shlex.split(command)
subprocess.call(command)
except OSError:
print "Failed to setup Network & Firewall. Exiting.."
sys.exit(1)
def launch_master():
print '[ Launching Master ]'
command = 'gcloud compute --project "' + project + '" instances create "' + cluster_name + '-master" --zone "' + zone + '" --machine-type "' + master_type + '" --network "' + cluster_name + '-network" --maintenance-policy "MIGRATE" --scopes "https://www.googleapis.com/auth/devstorage.read_only" --image "https://www.googleapis.com/compute/v1/projects/centos-cloud/global/images/centos-6-v20141218" --boot-disk-type "pd-standard" --boot-disk-device-name "' + cluster_name + '-md"'
command = shlex.split(command)
subprocess.call(command)
def launch_slaves():
print '[ Launching Slaves ]'
for s_id in range(1,slave_no+1):
command = 'gcloud compute --project "' + project + '" instances create "' + cluster_name + '-slave' + str(s_id) + '" --zone "' + zone + '" --machine-type "' + slave_type + '" --network "' + cluster_name + '-network" --maintenance-policy "MIGRATE" --scopes "https://www.googleapis.com/auth/devstorage.read_only" --image "https://www.googleapis.com/compute/v1/projects/centos-cloud/global/images/centos-6-v20141218" --boot-disk-type "pd-standard" --boot-disk-device-name "' + cluster_name + '-s' + str(s_id) + 'd"'
command = shlex.split(command)
subprocess.call(command)
def launch_cluster():
print '[ Creating the Cluster ]'
setup_network()
launch_master()
launch_slaves()
def check_gcloud():
myexec = "gcloud"
print '[ Verifying gcloud ]'
try:
subprocess.call([myexec, 'info'])
except OSError:
print "%s executable not found. \n# Make sure gcloud is installed and authenticated\nPlease follow https://cloud.google.com/compute/docs/gcloud-compute/" % myexec
sys.exit(1)
def get_cluster_ips():
command = 'gcloud compute --project ' + project + ' instances list --format json'
output = subprocess.check_output(command, shell=True)
data = json.loads(output)
master_nodes=[]
slave_nodes=[]
for instance in data:
try:
host_name = instance['name']
host_ip = instance['networkInterfaces'][0]['accessConfigs'][0]['natIP']
if host_name == cluster_name + '-master':
master_nodes.append(host_ip)
elif cluster_name + '-slave' in host_name:
slave_nodes.append(host_ip)
except:
pass
# Return all the instances
return (master_nodes, slave_nodes)
def enable_sudo(master,command):
'''
ssh_command(master,"echo \"import os\" > setuid.py ")
ssh_command(master,"echo \"import sys\" >> setuid.py")
ssh_command(master,"echo \"import commands\" >> setuid.py")
ssh_command(master,"echo \"command=sys.argv[1]\" >> setuid.py")
ssh_command(master,"echo \"os.setuid(os.geteuid())\" >> setuid.py")
ssh_command(master,"echo \"print commands.getstatusoutput(\"command\")\" >> setuid.py")
'''
os.system("ssh -i " + identity_file + " -t -o 'UserKnownHostsFile=/dev/null' -o 'CheckHostIP=no' -o 'StrictHostKeyChecking no' "+ username + "@" + master + " '" + command + "'")
def ssh_thread(host,command):
enable_sudo(host,command)
def install_java(master_nodes,slave_nodes):
print '[ Installing Java and Development Tools ]'
master = master_nodes[0]
master_thread = threading.Thread(target=ssh_thread, args=(master,"sudo yum install -y java-1.7.0-openjdk;sudo yum install -y java-1.7.0-openjdk-devel;sudo yum groupinstall \'Development Tools\' -y"))
master_thread.start()
#ssh_thread(master,"sudo yum install -y java-1.7.0-openjdk")
for slave in slave_nodes:
slave_thread = threading.Thread(target=ssh_thread, args=(slave,"sudo yum install -y java-1.7.0-openjdk;sudo yum install -y java-1.7.0-openjdk-devel;sudo yum groupinstall \'Development Tools\' -y"))
slave_thread.start()
#ssh_thread(slave,"sudo yum install -y java-1.7.0-openjdk")
slave_thread.join()
master_thread.join()
def ssh_command(host,command):
#print "ssh -i " + identity_file + " -o 'UserKnownHostsFile=/dev/null' -o 'CheckHostIP=no' -o 'StrictHostKeyChecking no' "+ username + "@" + host + " '" + command + "'"
commands.getstatusoutput("ssh -i " + identity_file + " -o 'UserKnownHostsFile=/dev/null' -o 'CheckHostIP=no' -o 'StrictHostKeyChecking no' "+ username + "@" + host + " '" + command + "'" )
def deploy_keys(master_nodes,slave_nodes):
print '[ Generating SSH Keys on Master ]'
key_file = os.path.basename(identity_file)
master = master_nodes[0]
ssh_command(master,"ssh-keygen -q -t rsa -N \"\" -f ~/.ssh/id_rsa")
ssh_command(master,"cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys")
os.system("scp -i " + identity_file + " -oUserKnownHostsFile=/dev/null -oCheckHostIP=no -oStrictHostKeyChecking=no -o 'StrictHostKeyChecking no' "+ identity_file + " " + username + "@" + master + ":")
ssh_command(master,"chmod 600 " + key_file)
ssh_command(master,"tar czf .ssh.tgz .ssh")
ssh_command(master,"ssh-keyscan -H $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) >> ~/.ssh/known_hosts")
ssh_command(master,"ssh-keyscan -H $(cat /etc/hosts | grep $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) | cut -d\" \" -f2) >> ~/.ssh/known_hosts")
print '[ Transfering SSH keys to slaves ]'
for slave in slave_nodes:
print commands.getstatusoutput("ssh -i " + identity_file + " -oUserKnownHostsFile=/dev/null -oCheckHostIP=no -oStrictHostKeyChecking=no " + username + "@" + master + " 'scp -i " + key_file + " -oStrictHostKeyChecking=no .ssh.tgz " + username +"@" + slave + ":'")
ssh_command(slave,"tar xzf .ssh.tgz")
ssh_command(master,"ssh-keyscan -H " + slave + " >> ~/.ssh/known_hosts")
ssh_command(slave,"ssh-keyscan -H $(cat /etc/hosts | grep $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) | cut -d\" \" -f2) >> ~/.ssh/known_hosts")
ssh_command(slave,"ssh-keyscan -H $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) >> ~/.ssh/known_hosts")
def attach_drive(master_nodes,slave_nodes):
print '[ Adding new 500GB drive on Master ]'
master = master_nodes[0]
command='gcloud compute --project="' + project + '" disks create "' + cluster_name + '-m-disk" --size 500GB --type "pd-standard" --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
command = 'gcloud compute --project="' + project + '" instances attach-disk ' + cluster_name + '-master --device-name "' + cluster_name + '-m-disk" --disk ' + cluster_name + '-m-disk --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
master_thread = threading.Thread(target=ssh_thread, args=(master,"sudo mkfs.ext3 /dev/disk/by-id/google-"+ cluster_name + "-m-disk " + " -F < /dev/null"))
master_thread.start()
print '[ Adding new 500GB drive on Slaves ]'
i = 1
for slave in slave_nodes:
master = slave
command='gcloud compute --project="' + project + '" disks create "' + cluster_name + '-s' + str(i) + '-disk" --size 500GB --type "pd-standard" --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
command = 'gcloud compute --project="' + project + '" instances attach-disk ' + cluster_name + '-slave' + str(i) + ' --disk ' + cluster_name + '-s' + str(i) + '-disk --device-name "' + cluster_name + '-s' + str(i) + '-disk" --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
slave_thread = threading.Thread(target=ssh_thread, args=(slave,"sudo mkfs.ext3 /dev/disk/by-id/google-" + cluster_name + "-s" + str(i) + "-disk -F < /dev/null"))
slave_thread.start()
i=i+1
slave_thread.join()
master_thread.join()
print '[ Mounting new Volume ]'
enable_sudo(master_nodes[0],"sudo mount /dev/disk/by-id/google-"+ cluster_name + "-m-disk /mnt")
enable_sudo(master_nodes[0],"sudo chown " + username + ":" + username + " /mnt")
i=1
for slave in slave_nodes:
enable_sudo(slave,"sudo mount /dev/disk/by-id/google-"+ cluster_name + "-s" + str(i) +"-disk /mnt")
enable_sudo(slave,"sudo chown " + username + ":" + username + " /mnt")
i=i+1
print '[ All volumns mounted, will be available at /mnt ]'
<|fim▁hole|>
print '[ Downloading Binaries ]'
master = master_nodes[0]
ssh_command(master,"rm -fr sigmoid")
ssh_command(master,"mkdir sigmoid")
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/1.2.0/spark-1.2.0-bin-cdh4.tgz")
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/scala.tgz")
ssh_command(master,"cd sigmoid;tar zxf spark-1.2.0-bin-cdh4.tgz;rm spark-1.2.0-bin-cdh4.tgz")
ssh_command(master,"cd sigmoid;tar zxf scala.tgz;rm scala.tgz")
print '[ Updating Spark Configurations ]'
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;cp spark-env.sh.template spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SCALA_HOME=\"/home/`whoami`/sigmoid/scala\"' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SPARK_MEM=2454m' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo \"SPARK_JAVA_OPTS+=\\\" -Dspark.local.dir=/mnt/spark \\\"\" >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SPARK_JAVA_OPTS' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SPARK_MASTER_IP=PUT_MASTER_IP_HERE' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export MASTER=spark://PUT_MASTER_IP_HERE:7077' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export JAVA_HOME=/usr/lib/jvm/java-1.7.0-openjdk.x86_64' >> spark-env.sh")
for slave in slave_nodes:
ssh_command(master,"echo " + slave + " >> sigmoid/spark-1.2.0-bin-cdh4/conf/slaves")
ssh_command(master,"sed -i \"s/PUT_MASTER_IP_HERE/$(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1)/g\" sigmoid/spark-1.2.0-bin-cdh4/conf/spark-env.sh")
ssh_command(master,"chmod +x sigmoid/spark-1.2.0-bin-cdh4/conf/spark-env.sh")
print '[ Rsyncing Spark to all slaves ]'
#Change permissions
enable_sudo(master,"sudo chown " + username + ":" + username + " /mnt")
i=1
for slave in slave_nodes:
enable_sudo(slave,"sudo chown " + username + ":" + username + " /mnt")
for slave in slave_nodes:
ssh_command(master,"rsync -za /home/" + username + "/sigmoid " + slave + ":")
ssh_command(slave,"mkdir /mnt/spark")
ssh_command(master,"mkdir /mnt/spark")
print '[ Starting Spark Cluster ]'
ssh_command(master,"sigmoid/spark-1.2.0-bin-cdh4/sbin/start-all.sh")
#setup_shark(master_nodes,slave_nodes)
setup_hadoop(master_nodes,slave_nodes)
print "\n\nSpark Master Started, WebUI available at : http://" + master + ":8080"
def setup_hadoop(master_nodes,slave_nodes):
master = master_nodes[0]
print '[ Downloading hadoop ]'
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/sigmoidanalytics-builds/hadoop/hadoop-2.0.0-cdh4.2.0.tar.gz")
ssh_command(master,"cd sigmoid;tar zxf hadoop-2.0.0-cdh4.2.0.tar.gz")
ssh_command(master,"cd sigmoid;rm hadoop-2.0.0-cdh4.2.0.tar.gz")
print '[ Configuring Hadoop ]'
#Configure .bashrc
ssh_command(master,"echo '#HADOOP_CONFS' >> .bashrc")
ssh_command(master,"echo 'export JAVA_HOME=/usr/lib/jvm/java-1.7.0-openjdk.x86_64' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_INSTALL=/home/`whoami`/sigmoid/hadoop-2.0.0-cdh4.2.0' >> .bashrc")
ssh_command(master,"echo 'export PATH=$PATH:\$HADOOP_INSTALL/bin' >> .bashrc")
ssh_command(master,"echo 'export PATH=$PATH:\$HADOOP_INSTALL/sbin' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_MAPRED_HOME=\$HADOOP_INSTALL' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_COMMON_HOME=\$HADOOP_INSTALL' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_HDFS_HOME=\$HADOOP_INSTALL' >> .bashrc")
ssh_command(master,"echo 'export YARN_HOME=\$HADOOP_INSTALL' >> .bashrc")
#Remove *-site.xmls
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0;rm etc/hadoop/core-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0;rm etc/hadoop/yarn-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0;rm etc/hadoop/hdfs-site.xml")
#Download Our Confs
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/core-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/hdfs-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/mapred-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/yarn-site.xml")
#Config Core-site
ssh_command(master,"sed -i \"s/PUT-MASTER-IP/$(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1)/g\" sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/core-site.xml")
#Create data/node dirs
ssh_command(master,"mkdir -p /mnt/hadoop/hdfs/namenode;mkdir -p /mnt/hadoop/hdfs/datanode")
#Config slaves
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;rm slaves")
for slave in slave_nodes:
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;echo " + slave + " >> slaves")
print '[ Rsyncing with Slaves ]'
#Rsync everything
for slave in slave_nodes:
ssh_command(master,"rsync -za /home/" + username + "/sigmoid " + slave + ":")
ssh_command(slave,"mkdir -p /mnt/hadoop/hdfs/namenode;mkdir -p /mnt/hadoop/hdfs/datanode")
ssh_command(master,"rsync -za /home/" + username + "/.bashrc " + slave + ":")
print '[ Formating namenode ]'
#Format namenode
ssh_command(master,"sigmoid/hadoop-2.0.0-cdh4.2.0/bin/hdfs namenode -format")
print '[ Starting DFS ]'
#Start dfs
ssh_command(master,"sigmoid/hadoop-2.0.0-cdh4.2.0/sbin/start-dfs.sh")
def setup_shark(master_nodes,slave_nodes):
master = master_nodes[0]
print '[ Downloading Shark binaries ]'
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/spark-ui/hive-0.11.0-bin.tgz")
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/spark-ui/shark-0.9-hadoop-2.0.0-mr1-cdh4.2.0.tar.gz")
ssh_command(master,"cd sigmoid;tar zxf hive-0.11.0-bin.tgz")
ssh_command(master,"cd sigmoid;tar zxf shark-0.9-hadoop-2.0.0-mr1-cdh4.2.0.tar.gz")
ssh_command(master,"rm sigmoid/hive-0.11.0-bin.tgz")
ssh_command(master,"rm sigmoid/shark-0.9-hadoop-2.0.0-mr1-cdh4.2.0.tar.gz")
print '[ Configuring Shark ]'
ssh_command(master,"cd sigmoid/shark/;echo \"export SHARK_MASTER_MEM=1g\" > conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"SPARK_JAVA_OPTS+=\\\" -Dspark.kryoserializer.buffer.mb=10 \\\"\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export SPARK_JAVA_OPTS\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export HIVE_HOME=/home/`whoami`/sigmoid/hive-0.11.0-bin\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export SPARK_JAVA_OPTS\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export MASTER=spark://PUT_MASTER_IP_HERE:7077\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export SPARK_HOME=/home/`whoami`/sigmoid/spark-0.9.1-bin-cdh4\" >> conf/shark-env.sh")
ssh_command(master,"mkdir /mnt/tachyon")
ssh_command(master,"cd sigmoid/shark/;echo \"export TACHYON_MASTER=PUT_MASTER_IP_HERE:19998\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export TACHYON_WAREHOUSE_PATH=/mnt/tachyon\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"source /home/`whoami`/sigmoid/spark-0.9.1-bin-cdh4/conf/spark-env.sh\" >> conf/shark-env.sh")
ssh_command(master,"sed -i \"s/PUT_MASTER_IP_HERE/$(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1)/g\" sigmoid/shark/conf/shark-env.sh")
ssh_command(master,"chmod +x sigmoid/shark/conf/shark-env.sh")
print '[ Rsyncing Shark on slaves ]'
for slave in slave_nodes:
ssh_command(master,"rsync -za /home/" + username + "/sigmoid " + slave + ":")
print '[ Starting Shark Server ]'
ssh_command(master,"cd sigmoid/shark/;./bin/shark --service sharkserver 10000 > log.txt 2>&1 &")
def show_banner():
os.system("wget -qO- https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/banner")
def real_main():
show_banner()
print "[ Script Started ]"
#Read the arguments
read_args()
#Make sure gcloud is accessible.
check_gcloud()
#Launch the cluster
launch_cluster()
#Wait some time for machines to bootup
print '[ Waiting 120 Seconds for Machines to start up ]'
time.sleep(120)
#Get Master/Slave IP Addresses
(master_nodes, slave_nodes) = get_cluster_ips()
#Install Java and build-essential
install_java(master_nodes,slave_nodes)
#Generate SSH keys and deploy
deploy_keys(master_nodes,slave_nodes)
#Attach a new empty drive and format it
attach_drive(master_nodes,slave_nodes)
#Set up Spark/Shark/Hadoop
setup_spark(master_nodes,slave_nodes)
def main():
try:
real_main()
except Exception as e:
print >> stderr, "\nError:\n", e
if __name__ == "__main__":
main()<|fim▁end|> | def setup_spark(master_nodes,slave_nodes): |
<|file_name|>auxpow_testing.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright(c) 2014-2019 Daniel Kraft
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Utility routines for auxpow that are needed specifically by the regtests.
# This is mostly about actually *solving* an auxpow block(with regtest
# difficulty) or inspecting the information for verification.
import binascii
from test_framework import auxpow
def computeAuxpow(block, target, ok):
"""
Build an auxpow object(serialised as hex string) that solves
(ok = True) or doesn't solve(ok = False) the block.
"""
(tx, header) = auxpow.constructAuxpow(block)
(header, _) = mineBlock(header, target, ok)
return auxpow.finishAuxpow(tx, header)
def mineAuxpowBlock(node):
"""
Mine an auxpow block on the given RPC connection. This uses the
createauxblock and submitauxblock command pair.
"""
def create():
addr = node.getnewaddress()
return node.createauxblock(addr)
return mineAuxpowBlockWithMethods(create, node.submitauxblock)
def mineAuxpowBlockWithMethods(create, submit):
"""
Mine an auxpow block, using the given methods for creation and submission.
"""
auxblock = create()
target = auxpow.reverseHex(auxblock['_target'])
apow = computeAuxpow(auxblock['hash'], target, True)
res = submit(auxblock['hash'], apow)
assert res
return auxblock['hash']
def getCoinbaseAddr(node, blockHash):
"""
Extract the coinbase tx' payout address for the given block.
"""
blockData = node.getblock(blockHash)
txn = blockData['tx']
assert len(txn) >= 1
txData = node.getrawtransaction(txn[0], True, blockHash)
assert len(txData['vout']) >= 1 and len(txData['vin']) == 1
assert 'coinbase' in txData['vin'][0]
addr = txData['vout'][0]['scriptPubKey']['addresses']
assert len(addr) == 1
return addr[0]<|fim▁hole|> Given a block header, update the nonce until it is ok(or not)
for the given target.
"""
data = bytearray(binascii.unhexlify(header))
while True:
assert data[79] < 255
data[79] += 1
hexData = binascii.hexlify(data)
blockhash = auxpow.getScryptPoW(hexData)
if (ok and blockhash < target) or((not ok) and blockhash > target):
break
return (hexData, blockhash)<|fim▁end|> |
def mineBlock(header, target, ok):
""" |
<|file_name|>StoragePoolService.go<|end_file_name|><|fim▁begin|><|fim▁hole|>//
// Copyright 2014, Sander van Harmelen
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package cloudstack44
import (
"encoding/json"
"net/url"
"strconv"
)
type ListStorageProvidersParams struct {
p map[string]interface{}
}
func (p *ListStorageProvidersParams) toURLValues() url.Values {
u := url.Values{}
if p.p == nil {
return u
}
if v, found := p.p["keyword"]; found {
u.Set("keyword", v.(string))
}
if v, found := p.p["page"]; found {
vv := strconv.Itoa(v.(int))
u.Set("page", vv)
}
if v, found := p.p["pagesize"]; found {
vv := strconv.Itoa(v.(int))
u.Set("pagesize", vv)
}
if v, found := p.p["type"]; found {
u.Set("type", v.(string))
}
return u
}
func (p *ListStorageProvidersParams) SetKeyword(v string) {
if p.p == nil {
p.p = make(map[string]interface{})
}
p.p["keyword"] = v
return
}
func (p *ListStorageProvidersParams) SetPage(v int) {
if p.p == nil {
p.p = make(map[string]interface{})
}
p.p["page"] = v
return
}
func (p *ListStorageProvidersParams) SetPagesize(v int) {
if p.p == nil {
p.p = make(map[string]interface{})
}
p.p["pagesize"] = v
return
}
func (p *ListStorageProvidersParams) SetType(v string) {
if p.p == nil {
p.p = make(map[string]interface{})
}
p.p["storagePoolType"] = v
return
}
// You should always use this function to get a new ListStorageProvidersParams instance,
// as then you are sure you have configured all required params
func (s *StoragePoolService) NewListStorageProvidersParams(storagePoolType string) *ListStorageProvidersParams {
p := &ListStorageProvidersParams{}
p.p = make(map[string]interface{})
p.p["storagePoolType"] = storagePoolType
return p
}
// Lists storage providers.
func (s *StoragePoolService) ListStorageProviders(p *ListStorageProvidersParams) (*ListStorageProvidersResponse, error) {
resp, err := s.cs.newRequest("listStorageProviders", p.toURLValues())
if err != nil {
return nil, err
}
var r ListStorageProvidersResponse
if err := json.Unmarshal(resp, &r); err != nil {
return nil, err
}
return &r, nil
}
type ListStorageProvidersResponse struct {
Count int `json:"count"`
StorageProviders []*StorageProvider `json:"storageprovider"`
}
type StorageProvider struct {
Name string `json:"name,omitempty"`
Type string `json:"type,omitempty"`
}
type EnableStorageMaintenanceParams struct {
p map[string]interface{}
}
func (p *EnableStorageMaintenanceParams) toURLValues() url.Values {
u := url.Values{}
if p.p == nil {
return u
}
if v, found := p.p["id"]; found {
u.Set("id", v.(string))
}
return u
}
func (p *EnableStorageMaintenanceParams) SetId(v string) {
if p.p == nil {
p.p = make(map[string]interface{})
}
p.p["id"] = v
return
}
// You should always use this function to get a new EnableStorageMaintenanceParams instance,
// as then you are sure you have configured all required params
func (s *StoragePoolService) NewEnableStorageMaintenanceParams(id string) *EnableStorageMaintenanceParams {
p := &EnableStorageMaintenanceParams{}
p.p = make(map[string]interface{})
p.p["id"] = id
return p
}
// Puts storage pool into maintenance state
func (s *StoragePoolService) EnableStorageMaintenance(p *EnableStorageMaintenanceParams) (*EnableStorageMaintenanceResponse, error) {
resp, err := s.cs.newRequest("enableStorageMaintenance", p.toURLValues())
if err != nil {
return nil, err
}
var r EnableStorageMaintenanceResponse
if err := json.Unmarshal(resp, &r); err != nil {
return nil, err
}
// If we have a async client, we need to wait for the async result
if s.cs.async {
b, warn, err := s.cs.GetAsyncJobResult(r.JobID, s.cs.timeout)
if err != nil {
return nil, err
}
// If 'warn' has a value it means the job is running longer than the configured
// timeout, the resonse will contain the jobid of the running async job
if warn != nil {
return &r, warn
}
b, err = getRawValue(b)
if err != nil {
return nil, err
}
if err := json.Unmarshal(b, &r); err != nil {
return nil, err
}
}
return &r, nil
}
type EnableStorageMaintenanceResponse struct {
JobID string `json:"jobid,omitempty"`
Capacityiops int64 `json:"capacityiops,omitempty"`
Clusterid string `json:"clusterid,omitempty"`
Clustername string `json:"clustername,omitempty"`
Created string `json:"created,omitempty"`
Disksizeallocated int64 `json:"disksizeallocated,omitempty"`
Disksizetotal int64 `json:"disksizetotal,omitempty"`
Disksizeused int64 `json:"disksizeused,omitempty"`
Hypervisor string `json:"hypervisor,omitempty"`
Id string `json:"id,omitempty"`
Ipaddress string `json:"ipaddress,omitempty"`
Name string `json:"name,omitempty"`
Overprovisionfactor string `json:"overprovisionfactor,omitempty"`
Path string `json:"path,omitempty"`
Podid string `json:"podid,omitempty"`
Podname string `json:"podname,omitempty"`
Scope string `json:"scope,omitempty"`
State string `json:"state,omitempty"`
Storagecapabilities map[string]string `json:"storagecapabilities,omitempty"`
Suitableformigration bool `json:"suitableformigration,omitempty"`
Tags string `json:"tags,omitempty"`
Type string `json:"type,omitempty"`
Zoneid string `json:"zoneid,omitempty"`
Zonename string `json:"zonename,omitempty"`
}
type CancelStorageMaintenanceParams struct {
p map[string]interface{}
}
func (p *CancelStorageMaintenanceParams) toURLValues() url.Values {
u := url.Values{}
if p.p == nil {
return u
}
if v, found := p.p["id"]; found {
u.Set("id", v.(string))
}
return u
}
func (p *CancelStorageMaintenanceParams) SetId(v string) {
if p.p == nil {
p.p = make(map[string]interface{})
}
p.p["id"] = v
return
}
// You should always use this function to get a new CancelStorageMaintenanceParams instance,
// as then you are sure you have configured all required params
func (s *StoragePoolService) NewCancelStorageMaintenanceParams(id string) *CancelStorageMaintenanceParams {
p := &CancelStorageMaintenanceParams{}
p.p = make(map[string]interface{})
p.p["id"] = id
return p
}
// Cancels maintenance for primary storage
func (s *StoragePoolService) CancelStorageMaintenance(p *CancelStorageMaintenanceParams) (*CancelStorageMaintenanceResponse, error) {
resp, err := s.cs.newRequest("cancelStorageMaintenance", p.toURLValues())
if err != nil {
return nil, err
}
var r CancelStorageMaintenanceResponse
if err := json.Unmarshal(resp, &r); err != nil {
return nil, err
}
// If we have a async client, we need to wait for the async result
if s.cs.async {
b, warn, err := s.cs.GetAsyncJobResult(r.JobID, s.cs.timeout)
if err != nil {
return nil, err
}
// If 'warn' has a value it means the job is running longer than the configured
// timeout, the resonse will contain the jobid of the running async job
if warn != nil {
return &r, warn
}
b, err = getRawValue(b)
if err != nil {
return nil, err
}
if err := json.Unmarshal(b, &r); err != nil {
return nil, err
}
}
return &r, nil
}
type CancelStorageMaintenanceResponse struct {
JobID string `json:"jobid,omitempty"`
Capacityiops int64 `json:"capacityiops,omitempty"`
Clusterid string `json:"clusterid,omitempty"`
Clustername string `json:"clustername,omitempty"`
Created string `json:"created,omitempty"`
Disksizeallocated int64 `json:"disksizeallocated,omitempty"`
Disksizetotal int64 `json:"disksizetotal,omitempty"`
Disksizeused int64 `json:"disksizeused,omitempty"`
Hypervisor string `json:"hypervisor,omitempty"`
Id string `json:"id,omitempty"`
Ipaddress string `json:"ipaddress,omitempty"`
Name string `json:"name,omitempty"`
Overprovisionfactor string `json:"overprovisionfactor,omitempty"`
Path string `json:"path,omitempty"`
Podid string `json:"podid,omitempty"`
Podname string `json:"podname,omitempty"`
Scope string `json:"scope,omitempty"`
State string `json:"state,omitempty"`
Storagecapabilities map[string]string `json:"storagecapabilities,omitempty"`
Suitableformigration bool `json:"suitableformigration,omitempty"`
Tags string `json:"tags,omitempty"`
Type string `json:"type,omitempty"`
Zoneid string `json:"zoneid,omitempty"`
Zonename string `json:"zonename,omitempty"`
}<|fim▁end|> | |
<|file_name|>template.go<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package components
const (
TYPEDELIMITER = "::"
TYPEROUTER = "router"
TYPENETWORK = "network"
TYPEINSTANCE = "instance"
GROUPINSTANCE = "ernest.instance_group"
PROVIDERTYPE = `$(components.#[_component_id="credentials::vcloud"]._provider)`
DATACENTERNAME = `$(components.#[_component_id="credentials::vcloud"].vdc)`
DATACENTERTYPE = `$(components.#[_component_id="credentials::vcloud"]._provider)`
DATACENTERUSERNAME = `$(components.#[_component_id="credentials::vcloud"].username)`
DATACENTERPASSWORD = `$(components.#[_component_id="credentials::vcloud"].password)`<|fim▁hole|>)<|fim▁end|> | DATACENTERREGION = `$(components.#[_component_id="credentials::vcloud"].region)`
VCLOUDURL = `$(components.#[_component_id="credentials::vcloud"].vcloud_url)` |
<|file_name|>test_config.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015-2022 by the RBniCS authors
#
# This file is part of RBniCS.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import os
import sys
from rbnics.utils.config import Config
def test_config(tempdir):
# Create a default configuration
config = Config()
# Write config to stdout
print("===============")
config.write(sys.stdout)
print("===============")
# Change options
config.set("backends", "online backend", "online")
config.set("problems", "cache", {"disk"})
# Write config to stdout
print("===============")
config.write(sys.stdout)
print("===============")
# Write config to file
config.write(os.path.join(tempdir, ".rbnicsrc"))
# Check that file has been written
assert os.path.isfile(os.path.join(tempdir, ".rbnicsrc"))<|fim▁hole|> # Read back in
config2 = Config()
config2.read(tempdir)
# Write config2 to stdout
print("===============")
config2.write(sys.stdout)
print("===============")
# Check that read was successful
assert config == config2<|fim▁end|> | |
<|file_name|>attack.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2003 - 2018 by David White <[email protected]>
Part of the Battle for Wesnoth Project https://www.wesnoth.org/
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY.
See the COPYING file for more details.
*/
/**
* @file
* Fighting.
*/
#include "actions/attack.hpp"
#include "actions/advancement.hpp"
#include "actions/vision.hpp"
#include "ai/lua/aspect_advancements.hpp"
#include "formula/callable_objects.hpp"
#include "formula/formula.hpp"
#include "game_config.hpp"
#include "game_data.hpp"
#include "game_events/pump.hpp"
#include "gettext.hpp"
#include "log.hpp"
#include "map/map.hpp"
#include "mouse_handler_base.hpp"
#include "play_controller.hpp"
#include "preferences/game.hpp"
#include "random.hpp"
#include "replay.hpp"
#include "resources.hpp"
#include "statistics.hpp"
#include "synced_checkup.hpp"
#include "synced_user_choice.hpp"
#include "team.hpp"
#include "tod_manager.hpp"
#include "units/abilities.hpp"
#include "units/animation_component.hpp"
#include "units/helper.hpp"
#include "units/filter.hpp"
#include "units/map.hpp"
#include "units/udisplay.hpp"
#include "units/unit.hpp"
#include "whiteboard/manager.hpp"
#include "wml_exception.hpp"
static lg::log_domain log_engine("engine");
#define DBG_NG LOG_STREAM(debug, log_engine)
#define LOG_NG LOG_STREAM(info, log_engine)
#define WRN_NG LOG_STREAM(err, log_engine)
#define ERR_NG LOG_STREAM(err, log_engine)
static lg::log_domain log_attack("engine/attack");
#define DBG_AT LOG_STREAM(debug, log_attack)
#define LOG_AT LOG_STREAM(info, log_attack)
#define WRN_AT LOG_STREAM(err, log_attack)
#define ERR_AT LOG_STREAM(err, log_attack)
static lg::log_domain log_config("config");
#define LOG_CF LOG_STREAM(info, log_config)
// ==================================================================================
// BATTLE CONTEXT UNIT STATS
// ==================================================================================
battle_context_unit_stats::battle_context_unit_stats(const unit& u,
const map_location& u_loc,
int u_attack_num,
bool attacking,
const unit& opp,
const map_location& opp_loc,
const_attack_ptr opp_weapon,
const unit_map& units)
: weapon(nullptr)
, attack_num(u_attack_num)
, is_attacker(attacking)
, is_poisoned(u.get_state(unit::STATE_POISONED))
, is_slowed(u.get_state(unit::STATE_SLOWED))
, slows(false)
, drains(false)
, petrifies(false)
, plagues(false)
, poisons(false)
, backstab_pos(false)
, swarm(false)
, firststrike(false)
, disable(false)
, experience(u.experience())
, max_experience(u.max_experience())
, level(u.level())
, rounds(1)
, hp(0)
, max_hp(u.max_hitpoints())
, chance_to_hit(0)
, damage(0)
, slow_damage(0)
, drain_percent(0)
, drain_constant(0)
, num_blows(0)
, swarm_min(0)
, swarm_max(0)
, plague_type()
{
// Get the current state of the unit.
if(attack_num >= 0) {
weapon = u.attacks()[attack_num].shared_from_this();
}
if(u.hitpoints() < 0) {
LOG_CF << "Unit with " << u.hitpoints() << " hitpoints found, set to 0 for damage calculations\n";
hp = 0;
} else if(u.hitpoints() > u.max_hitpoints()) {
// If a unit has more hp than its maximum, the engine will fail with an
// assertion failure due to accessing the prob_matrix out of bounds.
hp = u.max_hitpoints();
} else {
hp = u.hitpoints();
}
// Exit if no weapon.
if(!weapon) {
return;
}
// Get the weapon characteristics as appropriate.
auto ctx = weapon->specials_context(&u, &opp, u_loc, opp_loc, attacking, opp_weapon);
boost::optional<decltype(ctx)> opp_ctx;<|fim▁hole|>
slows = weapon->bool_ability("slow");
drains = !opp.get_state("undrainable") && weapon->bool_ability("drains");
petrifies = weapon->bool_ability("petrifies");
poisons = !opp.get_state("unpoisonable") && weapon->bool_ability("poison") && !opp.get_state(unit::STATE_POISONED);
backstab_pos = is_attacker && backstab_check(u_loc, opp_loc, units, resources::gameboard->teams());
rounds = weapon->get_specials("berserk").highest("value", 1).first;
if(weapon->combat_ability("berserk", 1).second) {
rounds = weapon->combat_ability("berserk", 1).first;
}
firststrike = weapon->bool_ability("firststrike");
{
const int distance = distance_between(u_loc, opp_loc);
const bool out_of_range = distance > weapon->max_range() || distance < weapon->min_range();
disable = weapon->get_special_bool("disable") || out_of_range;
}
// Handle plague.
unit_ability_list plague_specials = weapon->get_specials("plague");
plagues = !opp.get_state("unplagueable") && !plague_specials.empty() &&
opp.undead_variation() != "null" && !resources::gameboard->map().is_village(opp_loc);
if(plagues) {
plague_type = (*plague_specials.front().first)["type"].str();
if(plague_type.empty()) {
plague_type = u.type().base_id();
}
}
// Compute chance to hit.
signed int cth = opp.defense_modifier(resources::gameboard->map().get_terrain(opp_loc)) + weapon->accuracy()
- (opp_weapon ? opp_weapon->parry() : 0);
cth = utils::clamp(cth, 0, 100);
unit_ability_list cth_specials = weapon->get_specials("chance_to_hit");
unit_abilities::effect cth_effects(cth_specials, cth, backstab_pos);
cth = cth_effects.get_composite_value();
cth = utils::clamp(cth, 0, 100);
cth = weapon->combat_ability("chance_to_hit", cth, backstab_pos).first;
if(opp.get_state("invulnerable")) {
cth = 0;
}
chance_to_hit = utils::clamp(cth, 0, 100);
// Compute base damage done with the weapon.
int base_damage = weapon->modified_damage(backstab_pos);
// Get the damage multiplier applied to the base damage of the weapon.
int damage_multiplier = 100;
// Time of day bonus.
damage_multiplier += combat_modifier(
resources::gameboard->units(), resources::gameboard->map(), u_loc, u.alignment(), u.is_fearless());
// Leadership bonus.
int leader_bonus = under_leadership(u, u_loc, weapon, opp_weapon);
if(leader_bonus != 0) {
damage_multiplier += leader_bonus;
}
// Resistance modifier.
damage_multiplier *= opp.damage_from(*weapon, !attacking, opp_loc, opp_weapon);
// Compute both the normal and slowed damage.
damage = round_damage(base_damage, damage_multiplier, 10000);
slow_damage = round_damage(base_damage, damage_multiplier, 20000);
if(is_slowed) {
damage = slow_damage;
}
// Compute drain amounts only if draining is possible.
if(drains) {
if (weapon->get_special_bool("drains")) {
unit_ability_list drain_specials = weapon->get_specials("drains");
// Compute the drain percent (with 50% as the base for backward compatibility)
unit_abilities::effect drain_percent_effects(drain_specials, 50, backstab_pos);
drain_percent = drain_percent_effects.get_composite_value();
}
if (weapon->combat_ability("drains", 50, backstab_pos).second) {
drain_percent = weapon->combat_ability("drains", 50, backstab_pos).first;
}
}
// Add heal_on_hit (the drain constant)
unit_ability_list heal_on_hit_specials = weapon->get_specials("heal_on_hit");
unit_abilities::effect heal_on_hit_effects(heal_on_hit_specials, 0, backstab_pos);
drain_constant += heal_on_hit_effects.get_composite_value();
drains = drain_constant || drain_percent;
// Compute the number of blows and handle swarm.
weapon->modified_attacks(backstab_pos, swarm_min, swarm_max);
swarm = swarm_min != swarm_max;
num_blows = calc_blows(hp);
}
battle_context_unit_stats::battle_context_unit_stats(const unit_type* u_type,
const_attack_ptr att_weapon,
bool attacking,
const unit_type* opp_type,
const_attack_ptr opp_weapon,
unsigned int opp_terrain_defense,
int lawful_bonus)
: weapon(att_weapon)
, attack_num(-2) // This is and stays invalid. Always use weapon when using this constructor.
, is_attacker(attacking)
, is_poisoned(false)
, is_slowed(false)
, slows(false)
, drains(false)
, petrifies(false)
, plagues(false)
, poisons(false)
, backstab_pos(false)
, swarm(false)
, firststrike(false)
, disable(false)
, experience(0)
, max_experience(0)
, level(0)
, rounds(1)
, hp(0)
, max_hp(0)
, chance_to_hit(0)
, damage(0)
, slow_damage(0)
, drain_percent(0)
, drain_constant(0)
, num_blows(0)
, swarm_min(0)
, swarm_max(0)
, plague_type()
{
if(!u_type || !opp_type) {
return;
}
// Get the current state of the unit.
if(u_type->hitpoints() < 0) {
hp = 0;
} else {
hp = u_type->hitpoints();
}
max_experience = u_type->experience_needed();
level = (u_type->level());
max_hp = (u_type->hitpoints());
// Exit if no weapon.
if(!weapon) {
return;
}
// Get the weapon characteristics as appropriate.
auto ctx = weapon->specials_context(*u_type, map_location::null_location(), attacking);
boost::optional<decltype(ctx)> opp_ctx;
if(opp_weapon) {
opp_ctx.emplace(opp_weapon->specials_context(*opp_type, map_location::null_location(), !attacking));
}
slows = weapon->get_special_bool("slow");
drains = !opp_type->musthave_status("undrainable") && weapon->get_special_bool("drains");
petrifies = weapon->get_special_bool("petrifies");
poisons = !opp_type->musthave_status("unpoisonable") && weapon->get_special_bool("poison");
rounds = weapon->get_specials("berserk").highest("value", 1).first;
firststrike = weapon->get_special_bool("firststrike");
disable = weapon->get_special_bool("disable");
unit_ability_list plague_specials = weapon->get_specials("plague");
plagues = !opp_type->musthave_status("unplagueable") && !plague_specials.empty() &&
opp_type->undead_variation() != "null";
if(plagues) {
plague_type = (*plague_specials.front().first)["type"].str();
if(plague_type.empty()) {
plague_type = u_type->base_id();
}
}
signed int cth = 100 - opp_terrain_defense + weapon->accuracy() - (opp_weapon ? opp_weapon->parry() : 0);
cth = utils::clamp(cth, 0, 100);
unit_ability_list cth_specials = weapon->get_specials("chance_to_hit");
unit_abilities::effect cth_effects(cth_specials, cth, backstab_pos);
cth = cth_effects.get_composite_value();
chance_to_hit = utils::clamp(cth, 0, 100);
int base_damage = weapon->modified_damage(backstab_pos);
int damage_multiplier = 100;
damage_multiplier
+= generic_combat_modifier(lawful_bonus, u_type->alignment(), u_type->musthave_status("fearless"), 0);
damage_multiplier *= opp_type->resistance_against(weapon->type(), !attacking);
damage = round_damage(base_damage, damage_multiplier, 10000);
slow_damage = round_damage(base_damage, damage_multiplier, 20000);
if(drains) {
unit_ability_list drain_specials = weapon->get_specials("drains");
// Compute the drain percent (with 50% as the base for backward compatibility)
unit_abilities::effect drain_percent_effects(drain_specials, 50, backstab_pos);
drain_percent = drain_percent_effects.get_composite_value();
}
// Add heal_on_hit (the drain constant)
unit_ability_list heal_on_hit_specials = weapon->get_specials("heal_on_hit");
unit_abilities::effect heal_on_hit_effects(heal_on_hit_specials, 0, backstab_pos);
drain_constant += heal_on_hit_effects.get_composite_value();
drains = drain_constant || drain_percent;
// Compute the number of blows and handle swarm.
weapon->modified_attacks(backstab_pos, swarm_min, swarm_max);
swarm = swarm_min != swarm_max;
num_blows = calc_blows(hp);
}
// ==================================================================================
// BATTLE CONTEXT
// ==================================================================================
battle_context::battle_context(
const unit& attacker,
const map_location& a_loc,
int a_wep_index,
const unit& defender,
const map_location& d_loc,
int d_wep_index,
const unit_map& units)
: attacker_stats_()
, defender_stats_()
, attacker_combatant_()
, defender_combatant_()
{
size_t a_wep_uindex = static_cast<size_t>(a_wep_index);
size_t d_wep_uindex = static_cast<size_t>(d_wep_index);
const_attack_ptr a_wep(a_wep_uindex < attacker.attacks().size() ? attacker.attacks()[a_wep_index].shared_from_this() : nullptr);
const_attack_ptr d_wep(d_wep_uindex < defender.attacks().size() ? defender.attacks()[d_wep_index].shared_from_this() : nullptr);
attacker_stats_.reset(new battle_context_unit_stats(attacker, a_loc, a_wep_index, true , defender, d_loc, d_wep, units));
defender_stats_.reset(new battle_context_unit_stats(defender, d_loc, d_wep_index, false, attacker, a_loc, a_wep, units));
}
void battle_context::simulate(const combatant* prev_def)
{
assert((attacker_combatant_.get() != nullptr) == (defender_combatant_.get() != nullptr));
assert(attacker_stats_);
assert(defender_stats_);
if(!attacker_combatant_) {
attacker_combatant_.reset(new combatant(*attacker_stats_));
defender_combatant_.reset(new combatant(*defender_stats_, prev_def));
attacker_combatant_->fight(*defender_combatant_);
}
}
// more like a factory method than a constructor, always calls one of the other constructors.
battle_context::battle_context(const unit_map& units,
const map_location& attacker_loc,
const map_location& defender_loc,
int attacker_weapon,
int defender_weapon,
double aggression,
const combatant* prev_def,
const unit* attacker_ptr,
const unit* defender_ptr)
: attacker_stats_(nullptr)
, defender_stats_(nullptr)
, attacker_combatant_(nullptr)
, defender_combatant_(nullptr)
{
//TODO: maybe check before dereferencing units.find(attacker_loc),units.find(defender_loc) ?
const unit& attacker = attacker_ptr ? *attacker_ptr : *units.find(attacker_loc);
const unit& defender = defender_ptr ? *defender_ptr : *units.find(defender_loc);
const double harm_weight = 1.0 - aggression;
if(attacker_weapon == -1) {
*this = choose_attacker_weapon(
attacker, defender, units, attacker_loc, defender_loc, harm_weight, prev_def
);
}
else if(defender_weapon == -1) {
*this = choose_defender_weapon(
attacker, defender, attacker_weapon, units, attacker_loc, defender_loc, prev_def
);
}
else {
*this = battle_context(attacker, attacker_loc, attacker_weapon, defender, defender_loc, defender_weapon, units);
}
assert(attacker_stats_);
assert(defender_stats_);
}
battle_context::battle_context(const battle_context_unit_stats& att, const battle_context_unit_stats& def)
: attacker_stats_(new battle_context_unit_stats(att))
, defender_stats_(new battle_context_unit_stats(def))
, attacker_combatant_(nullptr)
, defender_combatant_(nullptr)
{
}
/** @todo FIXME: better to initialize combatant initially (move into
battle_context_unit_stats?), just do fight() when required. */
const combatant& battle_context::get_attacker_combatant(const combatant* prev_def)
{
// We calculate this lazily, since AI doesn't always need it.
simulate(prev_def);
return *attacker_combatant_;
}
const combatant& battle_context::get_defender_combatant(const combatant* prev_def)
{
// We calculate this lazily, since AI doesn't always need it.
simulate(prev_def);
return *defender_combatant_;
}
// Given this harm_weight, are we better than that other context?
bool battle_context::better_attack(class battle_context& that, double harm_weight)
{
return better_combat(
get_attacker_combatant(),
get_defender_combatant(),
that.get_attacker_combatant(),
that.get_defender_combatant(),
harm_weight
);
}
// Given this harm_weight, are we better than that other context?
bool battle_context::better_defense(class battle_context& that, double harm_weight)
{
return better_combat(
get_defender_combatant(),
get_attacker_combatant(),
that.get_defender_combatant(),
that.get_attacker_combatant(),
harm_weight
);
}
// Does combat A give us a better result than combat B?
bool battle_context::better_combat(const combatant& us_a,
const combatant& them_a,
const combatant& us_b,
const combatant& them_b,
double harm_weight)
{
double a, b;
// Compare: P(we kill them) - P(they kill us).
a = them_a.hp_dist[0] - us_a.hp_dist[0] * harm_weight;
b = them_b.hp_dist[0] - us_b.hp_dist[0] * harm_weight;
if(a - b < -0.01) {
return false;
}
if(a - b > 0.01) {
return true;
}
// Add poison to calculations
double poison_a_us = (us_a.poisoned) * game_config::poison_amount;
double poison_a_them = (them_a.poisoned) * game_config::poison_amount;
double poison_b_us = (us_b.poisoned) * game_config::poison_amount;
double poison_b_them = (them_b.poisoned) * game_config::poison_amount;
// Compare: damage to them - damage to us (average_hp replaces -damage)
a = (us_a.average_hp() - poison_a_us) * harm_weight - (them_a.average_hp() - poison_a_them);
b = (us_b.average_hp() - poison_b_us) * harm_weight - (them_b.average_hp() - poison_b_them);
if(a - b < -0.01) {
return false;
}
if(a - b > 0.01) {
return true;
}
// All else equal: go for most damage.
return them_a.average_hp() < them_b.average_hp();
}
battle_context battle_context::choose_attacker_weapon(const unit& attacker,
const unit& defender,
const unit_map& units,
const map_location& attacker_loc,
const map_location& defender_loc,
double harm_weight,
const combatant* prev_def)
{
log_scope2(log_attack, "choose_attacker_weapon");
std::vector<battle_context> choices;
// What options does attacker have?
for(size_t i = 0; i < attacker.attacks().size(); ++i) {
const attack_type& att = attacker.attacks()[i];
if(att.attack_weight() <= 0) {
continue;
}
battle_context bc = choose_defender_weapon(attacker, defender, i, units, attacker_loc, defender_loc, prev_def);
//choose_defender_weapon will always choose the weapon that disabels the attackers weapon if possible.
if(bc.attacker_stats_->disable) {
continue;
}
choices.emplace_back(std::move(bc));
}
if(choices.empty()) {
return battle_context(attacker, attacker_loc, -1, defender, defender_loc, -1, units);
}
if(choices.size() == 1) {
return std::move(choices[0]);
}
// Multiple options: simulate them, save best.
battle_context* best_choice = nullptr;
for(auto& choice : choices) {
// If choose_defender_weapon didn't simulate, do so now.
choice.simulate(prev_def);
if(!best_choice || choice.better_attack(*best_choice, harm_weight)) {
best_choice = &choice;
}
}
if(best_choice) {
return std::move(*best_choice);
}
else {
return battle_context(attacker, attacker_loc, -1, defender, defender_loc, -1, units);
}
}
/** @todo FIXME: Hand previous defender unit in here. */
battle_context battle_context::choose_defender_weapon(const unit& attacker,
const unit& defender,
unsigned attacker_weapon,
const unit_map& units,
const map_location& attacker_loc,
const map_location& defender_loc,
const combatant* prev_def)
{
log_scope2(log_attack, "choose_defender_weapon");
VALIDATE(attacker_weapon < attacker.attacks().size(), _("An invalid attacker weapon got selected."));
const attack_type& att = attacker.attacks()[attacker_weapon];
auto no_weapon = [&]() { return battle_context(attacker, attacker_loc, attacker_weapon, defender, defender_loc, -1, units); };
std::vector<battle_context> choices;
// What options does defender have?
for(size_t i = 0; i < defender.attacks().size(); ++i) {
const attack_type& def = defender.attacks()[i];
if(def.range() != att.range() || def.defense_weight() <= 0) {
//no need to calculate the battle_context here.
continue;
}
battle_context bc(attacker, attacker_loc, attacker_weapon, defender, defender_loc, i, units);
if(bc.defender_stats_->disable) {
continue;
}
if(bc.attacker_stats_->disable) {
//the defenders attack disables the attakers attack: always choose this one.
return bc;
}
choices.emplace_back(std::move(bc));
}
if(choices.empty()) {
return no_weapon();
}
if(choices.size() == 1) {
//only one usable weapon, don't simulate
return std::move(choices[0]);
}
// Multiple options:
// First pass : get the best weight and the minimum simple rating for this weight.
// simple rating = number of blows * damage per blows (resistance taken in account) * cth * weight
// Eligible attacks for defense should have a simple rating greater or equal to this weight.
int min_rating = 0;
{
double max_weight = 0.0;
for(const auto& choice : choices) {
const attack_type& def = defender.attacks()[choice.defender_stats_->attack_num];
if(def.defense_weight() >= max_weight) {
const battle_context_unit_stats& def_stats = *choice.defender_stats_;
max_weight = def.defense_weight();
int rating = static_cast<int>(
def_stats.num_blows * def_stats.damage * def_stats.chance_to_hit * def.defense_weight());
if(def.defense_weight() > max_weight || rating < min_rating) {
min_rating = rating;
}
}
}
}
battle_context* best_choice = nullptr;
// Multiple options: simulate them, save best.
for(auto& choice : choices) {
const attack_type& def = defender.attacks()[choice.defender_stats_->attack_num];
choice.simulate(prev_def);
int simple_rating = static_cast<int>(
choice.defender_stats_->num_blows * choice.defender_stats_->damage * choice.defender_stats_->chance_to_hit * def.defense_weight());
//FIXME: make sure there is no mostake in the better_combat call-
if(simple_rating >= min_rating && (!best_choice || choice.better_defense(*best_choice, 1.0))) {
best_choice = &choice;
}
}
return best_choice ? std::move(*best_choice) : no_weapon();
}
// ==================================================================================
// HELPERS
// ==================================================================================
namespace
{
void refresh_weapon_index(int& weap_index, const std::string& weap_id, attack_itors attacks)
{
// No attacks to choose from.
if(attacks.empty()) {
weap_index = -1;
return;
}
// The currently selected attack fits.
if(weap_index >= 0 && weap_index < static_cast<int>(attacks.size()) && attacks[weap_index].id() == weap_id) {
return;
}
// Look up the weapon by id.
if(!weap_id.empty()) {
for(int i = 0; i < static_cast<int>(attacks.size()); ++i) {
if(attacks[i].id() == weap_id) {
weap_index = i;
return;
}
}
}
// Lookup has failed.
weap_index = -1;
return;
}
/** Helper class for performing an attack. */
class attack
{
public:
attack(const map_location& attacker,
const map_location& defender,
int attack_with,
int defend_with,
bool update_display = true);
void perform();
private:
class attack_end_exception
{
};
bool perform_hit(bool, statistics::attack_context&);
void fire_event(const std::string& n);
void refresh_bc();
/** Structure holding unit info used in the attack action. */
struct unit_info
{
const map_location loc_;
int weapon_;
unit_map& units_;
std::size_t id_; /**< unit.underlying_id() */
std::string weap_id_;
int orig_attacks_;
int n_attacks_; /**< Number of attacks left. */
int cth_;
int damage_;
int xp_;
unit_info(const map_location& loc, int weapon, unit_map& units);
unit& get_unit();
bool valid();
std::string dump();
};
/**
* Used in perform_hit to confirm a replay is in sync.
* Check OOS_error_ after this method, true if error detected.
*/
void check_replay_attack_result(bool&, int, int&, config, unit_info&);
void unit_killed(
unit_info&, unit_info&, const battle_context_unit_stats*&, const battle_context_unit_stats*&, bool);
std::unique_ptr<battle_context> bc_;
const battle_context_unit_stats* a_stats_;
const battle_context_unit_stats* d_stats_;
int abs_n_attack_, abs_n_defend_;
// update_att_fog_ is not used, other than making some code simpler.
bool update_att_fog_, update_def_fog_, update_minimap_;
unit_info a_, d_;
unit_map& units_;
std::ostringstream errbuf_;
bool update_display_;
bool OOS_error_;
bool use_prng_;
std::vector<bool> prng_attacker_;
std::vector<bool> prng_defender_;
};
attack::unit_info::unit_info(const map_location& loc, int weapon, unit_map& units)
: loc_(loc)
, weapon_(weapon)
, units_(units)
, id_()
, weap_id_()
, orig_attacks_(0)
, n_attacks_(0)
, cth_(0)
, damage_(0)
, xp_(0)
{
unit_map::iterator i = units_.find(loc_);
if(!i.valid()) {
return;
}
id_ = i->underlying_id();
}
unit& attack::unit_info::get_unit()
{
unit_map::iterator i = units_.find(loc_);
assert(i.valid() && i->underlying_id() == id_);
return *i;
}
bool attack::unit_info::valid()
{
unit_map::iterator i = units_.find(loc_);
return i.valid() && i->underlying_id() == id_;
}
std::string attack::unit_info::dump()
{
std::stringstream s;
s << get_unit().type_id() << " (" << loc_.wml_x() << ',' << loc_.wml_y() << ')';
return s.str();
}
attack::attack(const map_location& attacker,
const map_location& defender,
int attack_with,
int defend_with,
bool update_display)
: bc_(nullptr)
, a_stats_(nullptr)
, d_stats_(nullptr)
, abs_n_attack_(0)
, abs_n_defend_(0)
, update_att_fog_(false)
, update_def_fog_(false)
, update_minimap_(false)
, a_(attacker, attack_with, resources::gameboard->units())
, d_(defender, defend_with, resources::gameboard->units())
, units_(resources::gameboard->units())
, errbuf_()
, update_display_(update_display)
, OOS_error_(false)
//new experimental prng mode.
, use_prng_(preferences::get("use_prng") == "yes" && randomness::generator->is_networked() == false)
{
if(use_prng_) {
std::cerr << "Using experimental PRNG for combat\n";
}
}
void attack::fire_event(const std::string& n)
{
LOG_NG << "attack: firing '" << n << "' event\n";
// prepare the event data for weapon filtering
config ev_data;
config& a_weapon_cfg = ev_data.add_child("first");
config& d_weapon_cfg = ev_data.add_child("second");
// Need these to ensure weapon filters work correctly
boost::optional<attack_type::specials_context_t> a_ctx, d_ctx;
if(a_stats_->weapon != nullptr && a_.valid()) {
if(d_stats_->weapon != nullptr && d_.valid()) {
a_ctx.emplace(a_stats_->weapon->specials_context(nullptr, nullptr, a_.loc_, d_.loc_, true, d_stats_->weapon));
} else {
a_ctx.emplace(a_stats_->weapon->specials_context(nullptr, a_.loc_, true));
}
a_stats_->weapon->write(a_weapon_cfg);
}
if(d_stats_->weapon != nullptr && d_.valid()) {
if(a_stats_->weapon != nullptr && a_.valid()) {
d_ctx.emplace(d_stats_->weapon->specials_context(nullptr, nullptr, d_.loc_, a_.loc_, false, a_stats_->weapon));
} else {
d_ctx.emplace(d_stats_->weapon->specials_context(nullptr, d_.loc_, false));
}
d_stats_->weapon->write(d_weapon_cfg);
}
if(a_weapon_cfg["name"].empty()) {
a_weapon_cfg["name"] = "none";
}
if(d_weapon_cfg["name"].empty()) {
d_weapon_cfg["name"] = "none";
}
if(n == "attack_end") {
// We want to fire attack_end event in any case! Even if one of units was removed by WML.
resources::game_events->pump().fire(n, a_.loc_, d_.loc_, ev_data);
return;
}
// damage_inflicted is set in these two events.
// TODO: should we set this value from unit_info::damage, or continue using the WML variable?
if(n == "attacker_hits" || n == "defender_hits") {
ev_data["damage_inflicted"] = resources::gamedata->get_variable("damage_inflicted");
}
const int defender_side = d_.get_unit().side();
bool wml_aborted;
std::tie(std::ignore, wml_aborted) = resources::game_events->pump().fire(n,
game_events::entity_location(a_.loc_, a_.id_),
game_events::entity_location(d_.loc_, d_.id_), ev_data);
// The event could have killed either the attacker or
// defender, so we have to make sure they still exist.
refresh_bc();
if(wml_aborted || !a_.valid() || !d_.valid()
|| !resources::gameboard->get_team(a_.get_unit().side()).is_enemy(d_.get_unit().side())
) {
actions::recalculate_fog(defender_side);
if(update_display_) {
display::get_singleton()->redraw_minimap();
}
fire_event("attack_end");
throw attack_end_exception();
}
}
void attack::refresh_bc()
{
// Fix index of weapons.
if(a_.valid()) {
refresh_weapon_index(a_.weapon_, a_.weap_id_, a_.get_unit().attacks());
}
if(d_.valid()) {
refresh_weapon_index(d_.weapon_, d_.weap_id_, d_.get_unit().attacks());
}
if(!a_.valid() || !d_.valid()) {
// Fix pointer to weapons.
const_cast<battle_context_unit_stats*>(a_stats_)->weapon
= a_.valid() && a_.weapon_ >= 0 ? a_.get_unit().attacks()[a_.weapon_].shared_from_this() : nullptr;
const_cast<battle_context_unit_stats*>(d_stats_)->weapon
= d_.valid() && d_.weapon_ >= 0 ? d_.get_unit().attacks()[d_.weapon_].shared_from_this() : nullptr;
return;
}
bc_.reset(new battle_context(units_, a_.loc_, d_.loc_, a_.weapon_, d_.weapon_));
a_stats_ = &bc_->get_attacker_stats();
d_stats_ = &bc_->get_defender_stats();
a_.cth_ = a_stats_->chance_to_hit;
d_.cth_ = d_stats_->chance_to_hit;
a_.damage_ = a_stats_->damage;
d_.damage_ = d_stats_->damage;
}
bool attack::perform_hit(bool attacker_turn, statistics::attack_context& stats)
{
unit_info& attacker = attacker_turn ? a_ : d_;
unit_info& defender = attacker_turn ? d_ : a_;
// NOTE: we need to use a reference-to-pointer here so a_stats_ and d_stats_ can be
// modified without. Using a pointer directly would render them invalid when that happened.
const battle_context_unit_stats*& attacker_stats = attacker_turn ? a_stats_ : d_stats_;
const battle_context_unit_stats*& defender_stats = attacker_turn ? d_stats_ : a_stats_;
int& abs_n = attacker_turn ? abs_n_attack_ : abs_n_defend_;
bool& update_fog = attacker_turn ? update_def_fog_ : update_att_fog_;
int ran_num;
if(use_prng_) {
std::vector<bool>& prng_seq = attacker_turn ? prng_attacker_ : prng_defender_;
if(prng_seq.empty()) {
const int ntotal = attacker.cth_*attacker.n_attacks_;
int num_hits = ntotal/100;
const int additional_hit_chance = ntotal%100;
if(additional_hit_chance > 0 && randomness::generator->get_random_int(0, 99) < additional_hit_chance) {
++num_hits;
}
std::vector<int> indexes;
for(int i = 0; i != attacker.n_attacks_; ++i) {
prng_seq.push_back(false);
indexes.push_back(i);
}
for(int i = 0; i != num_hits; ++i) {
int n = randomness::generator->get_random_int(0, static_cast<int>(indexes.size())-1);
prng_seq[indexes[n]] = true;
indexes.erase(indexes.begin() + n);
}
}
bool does_hit = prng_seq.back();
prng_seq.pop_back();
ran_num = does_hit ? 0 : 99;
} else {
ran_num = randomness::generator->get_random_int(0, 99);
}
bool hits = (ran_num < attacker.cth_);
int damage = 0;
if(hits) {
damage = attacker.damage_;
resources::gamedata->get_variable("damage_inflicted") = damage;
}
// Make sure that if we're serializing a game here,
// we got the same results as the game did originally.
const config local_results {"chance", attacker.cth_, "hits", hits, "damage", damage};
config replay_results;
bool equals_replay = checkup_instance->local_checkup(local_results, replay_results);
if(!equals_replay) {
check_replay_attack_result(hits, ran_num, damage, replay_results, attacker);
}
// can do no more damage than the defender has hitpoints
int damage_done = std::min<int>(defender.get_unit().hitpoints(), attacker.damage_);
// expected damage = damage potential * chance to hit (as a percentage)
double expected_damage = damage_done * attacker.cth_ * 0.01;
if(attacker_turn) {
stats.attack_expected_damage(expected_damage, 0);
} else {
stats.attack_expected_damage(0, expected_damage);
}
int drains_damage = 0;
if(hits && attacker_stats->drains) {
drains_damage = damage_done * attacker_stats->drain_percent / 100 + attacker_stats->drain_constant;
// don't drain so much that the attacker gets more than his maximum hitpoints
drains_damage =
std::min<int>(drains_damage, attacker.get_unit().max_hitpoints() - attacker.get_unit().hitpoints());
// if drain is negative, don't allow drain to kill the attacker
drains_damage = std::max<int>(drains_damage, 1 - attacker.get_unit().hitpoints());
}
if(update_display_) {
std::ostringstream float_text;
std::vector<std::string> extra_hit_sounds;
if(hits) {
const unit& defender_unit = defender.get_unit();
if(attacker_stats->poisons && !defender_unit.get_state(unit::STATE_POISONED)) {
float_text << (defender_unit.gender() == unit_race::FEMALE ? _("female^poisoned") : _("poisoned"))
<< '\n';
extra_hit_sounds.push_back(game_config::sounds::status::poisoned);
}
if(attacker_stats->slows && !defender_unit.get_state(unit::STATE_SLOWED)) {
float_text << (defender_unit.gender() == unit_race::FEMALE ? _("female^slowed") : _("slowed")) << '\n';
extra_hit_sounds.push_back(game_config::sounds::status::slowed);
}
if(attacker_stats->petrifies) {
float_text << (defender_unit.gender() == unit_race::FEMALE ? _("female^petrified") : _("petrified"))
<< '\n';
extra_hit_sounds.push_back(game_config::sounds::status::petrified);
}
}
unit_display::unit_attack(
game_display::get_singleton(),
*resources::gameboard,
attacker.loc_, defender.loc_,
damage,
*attacker_stats->weapon, defender_stats->weapon,
abs_n, float_text.str(), drains_damage, "",
&extra_hit_sounds
);
}
bool dies = defender.get_unit().take_hit(damage);
LOG_NG << "defender took " << damage << (dies ? " and died\n" : "\n");
if(attacker_turn) {
stats.attack_result(hits
? (dies
? statistics::attack_context::KILLS
: statistics::attack_context::HITS)
: statistics::attack_context::MISSES,
attacker.cth_, damage_done, drains_damage
);
} else {
stats.defend_result(hits
? (dies
? statistics::attack_context::KILLS
: statistics::attack_context::HITS)
: statistics::attack_context::MISSES,
attacker.cth_, damage_done, drains_damage
);
}
replay_results.clear();
// There was also a attribute cfg["unit_hit"] which was never used so i deleted.
equals_replay = checkup_instance->local_checkup(config{"dies", dies}, replay_results);
if(!equals_replay) {
bool results_dies = replay_results["dies"].to_bool();
errbuf_ << "SYNC: In attack " << a_.dump() << " vs " << d_.dump() << ": the data source says the "
<< (attacker_turn ? "defender" : "attacker") << ' ' << (results_dies ? "perished" : "survived")
<< " while in-game calculations show it " << (dies ? "perished" : "survived")
<< " (over-riding game calculations with data source results)\n";
dies = results_dies;
// Set hitpoints to 0 so later checks don't invalidate the death.
if(results_dies) {
defender.get_unit().set_hitpoints(0);
}
OOS_error_ = true;
}
if(hits) {
try {
fire_event(attacker_turn ? "attacker_hits" : "defender_hits");
} catch(const attack_end_exception&) {
refresh_bc();
return false;
}
} else {
try {
fire_event(attacker_turn ? "attacker_misses" : "defender_misses");
} catch(const attack_end_exception&) {
refresh_bc();
return false;
}
}
refresh_bc();
bool attacker_dies = false;
if(drains_damage > 0) {
attacker.get_unit().heal(drains_damage);
} else if(drains_damage < 0) {
attacker_dies = attacker.get_unit().take_hit(-drains_damage);
}
if(dies) {
unit_killed(attacker, defender, attacker_stats, defender_stats, false);
update_fog = true;
}
if(attacker_dies) {
unit_killed(defender, attacker, defender_stats, attacker_stats, true);
(attacker_turn ? update_att_fog_ : update_def_fog_) = true;
}
if(dies) {
update_minimap_ = true;
return false;
}
if(hits) {
unit& defender_unit = defender.get_unit();
if(attacker_stats->poisons && !defender_unit.get_state(unit::STATE_POISONED)) {
defender_unit.set_state(unit::STATE_POISONED, true);
LOG_NG << "defender poisoned\n";
}
if(attacker_stats->slows && !defender_unit.get_state(unit::STATE_SLOWED)) {
defender_unit.set_state(unit::STATE_SLOWED, true);
update_fog = true;
defender.damage_ = defender_stats->slow_damage;
LOG_NG << "defender slowed\n";
}
// If the defender is petrified, the fight stops immediately
if(attacker_stats->petrifies) {
defender_unit.set_state(unit::STATE_PETRIFIED, true);
update_fog = true;
attacker.n_attacks_ = 0;
defender.n_attacks_ = -1; // Petrified.
resources::game_events->pump().fire("petrified", defender.loc_, attacker.loc_);
refresh_bc();
}
}
// Delay until here so that poison and slow go through
if(attacker_dies) {
update_minimap_ = true;
return false;
}
--attacker.n_attacks_;
return true;
}
void attack::unit_killed(unit_info& attacker,
unit_info& defender,
const battle_context_unit_stats*& attacker_stats,
const battle_context_unit_stats*& defender_stats,
bool drain_killed)
{
attacker.xp_ = game_config::kill_xp(defender.get_unit().level());
defender.xp_ = 0;
display::get_singleton()->invalidate(attacker.loc_);
game_events::entity_location death_loc(defender.loc_, defender.id_);
game_events::entity_location attacker_loc(attacker.loc_, attacker.id_);
std::string undead_variation = defender.get_unit().undead_variation();
fire_event("attack_end");
refresh_bc();
// Get weapon info for last_breath and die events.
config dat;
config a_weapon_cfg = attacker_stats->weapon && attacker.valid() ? attacker_stats->weapon->to_config() : config();
config d_weapon_cfg = defender_stats->weapon && defender.valid() ? defender_stats->weapon->to_config() : config();
if(a_weapon_cfg["name"].empty()) {
a_weapon_cfg["name"] = "none";
}
if(d_weapon_cfg["name"].empty()) {
d_weapon_cfg["name"] = "none";
}
dat.add_child("first", d_weapon_cfg);
dat.add_child("second", a_weapon_cfg);
resources::game_events->pump().fire("last_breath", death_loc, attacker_loc, dat);
refresh_bc();
// WML has invalidated the dying unit, abort.
if(!defender.valid() || defender.get_unit().hitpoints() > 0) {
return;
}
if(!attacker.valid()) {
unit_display::unit_die(
defender.loc_,
defender.get_unit(),
nullptr,
defender_stats->weapon
);
} else {
unit_display::unit_die(
defender.loc_,
defender.get_unit(),
attacker_stats->weapon,
defender_stats->weapon,
attacker.loc_,
&attacker.get_unit()
);
}
resources::game_events->pump().fire("die", death_loc, attacker_loc, dat);
refresh_bc();
if(!defender.valid() || defender.get_unit().hitpoints() > 0) {
// WML has invalidated the dying unit, abort
return;
}
units_.erase(defender.loc_);
resources::whiteboard->on_kill_unit();
// Plague units make new units on the target hex.
if(attacker.valid() && attacker_stats->plagues && !drain_killed) {
LOG_NG << "trying to reanimate " << attacker_stats->plague_type << '\n';
if(const unit_type* reanimator = unit_types.find(attacker_stats->plague_type)) {
LOG_NG << "found unit type:" << reanimator->id() << '\n';
unit_ptr newunit = unit::create(*reanimator, attacker.get_unit().side(), true, unit_race::MALE);
newunit->set_attacks(0);
newunit->set_movement(0, true);
newunit->set_facing(map_location::get_opposite_dir(attacker.get_unit().facing()));
// Apply variation
if(undead_variation != "null") {
config mod;
config& variation = mod.add_child("effect");
variation["apply_to"] = "variation";
variation["name"] = undead_variation;
newunit->add_modification("variation", mod);
newunit->heal_fully();
}
newunit->set_location(death_loc);
units_.insert(newunit);
game_events::entity_location reanim_loc(defender.loc_, newunit->underlying_id());
resources::game_events->pump().fire("unit_placed", reanim_loc);
preferences::encountered_units().insert(newunit->type_id());
if(update_display_) {
display::get_singleton()->invalidate(death_loc);
}
}
} else {
LOG_NG << "unit not reanimated\n";
}
}
void attack::perform()
{
// Stop the user from issuing any commands while the units are fighting.
const events::command_disabler disable_commands;
if(!a_.valid() || !d_.valid()) {
return;
}
// no attack weapon => stop here and don't attack
if(a_.weapon_ < 0) {
a_.get_unit().set_attacks(a_.get_unit().attacks_left() - 1);
a_.get_unit().set_movement(-1, true);
return;
}
if(a_.get_unit().attacks_left() <= 0) {
LOG_NG << "attack::perform(): not enough ap.\n";
return;
}
a_.get_unit().set_facing(a_.loc_.get_relative_dir(d_.loc_));
d_.get_unit().set_facing(d_.loc_.get_relative_dir(a_.loc_));
a_.get_unit().set_attacks(a_.get_unit().attacks_left() - 1);
VALIDATE(a_.weapon_ < static_cast<int>(a_.get_unit().attacks().size()),
_("An invalid attacker weapon got selected."));
a_.get_unit().set_movement(a_.get_unit().movement_left() - a_.get_unit().attacks()[a_.weapon_].movement_used(), true);
a_.get_unit().set_state(unit::STATE_NOT_MOVED, false);
a_.get_unit().set_resting(false);
d_.get_unit().set_resting(false);
// If the attacker was invisible, she isn't anymore!
a_.get_unit().set_state(unit::STATE_UNCOVERED, true);
bc_.reset(new battle_context(units_, a_.loc_, d_.loc_, a_.weapon_, d_.weapon_));
a_stats_ = &bc_->get_attacker_stats();
d_stats_ = &bc_->get_defender_stats();
if(a_stats_->disable) {
LOG_NG << "attack::perform(): tried to attack with a disabled attack.\n";
return;
}
if(a_stats_->weapon) {
a_.weap_id_ = a_stats_->weapon->id();
}
if(d_stats_->weapon) {
d_.weap_id_ = d_stats_->weapon->id();
}
try {
fire_event("attack");
} catch(const attack_end_exception&) {
return;
}
refresh_bc();
DBG_NG << "getting attack statistics\n";
statistics::attack_context attack_stats(
a_.get_unit(), d_.get_unit(), a_stats_->chance_to_hit, d_stats_->chance_to_hit);
a_.orig_attacks_ = a_stats_->num_blows;
d_.orig_attacks_ = d_stats_->num_blows;
a_.n_attacks_ = a_.orig_attacks_;
d_.n_attacks_ = d_.orig_attacks_;
a_.xp_ = game_config::combat_xp(d_.get_unit().level());
d_.xp_ = game_config::combat_xp(a_.get_unit().level());
bool defender_strikes_first = (d_stats_->firststrike && !a_stats_->firststrike);
unsigned int rounds = std::max<unsigned int>(a_stats_->rounds, d_stats_->rounds) - 1;
const int defender_side = d_.get_unit().side();
LOG_NG << "Fight: (" << a_.loc_ << ") vs (" << d_.loc_ << ") ATT: " << a_stats_->weapon->name() << " "
<< a_stats_->damage << "-" << a_stats_->num_blows << "(" << a_stats_->chance_to_hit
<< "%) vs DEF: " << (d_stats_->weapon ? d_stats_->weapon->name() : "none") << " " << d_stats_->damage << "-"
<< d_stats_->num_blows << "(" << d_stats_->chance_to_hit << "%)"
<< (defender_strikes_first ? " defender first-strike" : "") << "\n";
// Play the pre-fight animation
unit_display::unit_draw_weapon(a_.loc_, a_.get_unit(), a_stats_->weapon, d_stats_->weapon, d_.loc_, &d_.get_unit());
for(;;) {
DBG_NG << "start of attack loop...\n";
++abs_n_attack_;
if(a_.n_attacks_ > 0 && !defender_strikes_first) {
if(!perform_hit(true, attack_stats)) {
DBG_NG << "broke from attack loop on attacker turn\n";
break;
}
}
// If the defender got to strike first, they use it up here.
defender_strikes_first = false;
++abs_n_defend_;
if(d_.n_attacks_ > 0) {
if(!perform_hit(false, attack_stats)) {
DBG_NG << "broke from attack loop on defender turn\n";
break;
}
}
// Continue the fight to death; if one of the units got petrified,
// either n_attacks or n_defends is -1
if(rounds > 0 && d_.n_attacks_ == 0 && a_.n_attacks_ == 0) {
a_.n_attacks_ = a_.orig_attacks_;
d_.n_attacks_ = d_.orig_attacks_;
--rounds;
defender_strikes_first = (d_stats_->firststrike && !a_stats_->firststrike);
}
if(a_.n_attacks_ <= 0 && d_.n_attacks_ <= 0) {
fire_event("attack_end");
refresh_bc();
break;
}
}
// Set by attacker_hits and defender_hits events.
resources::gamedata->clear_variable("damage_inflicted");
if(update_def_fog_) {
actions::recalculate_fog(defender_side);
}
// TODO: if we knew the viewing team, we could skip this display update
if(update_minimap_ && update_display_) {
display::get_singleton()->redraw_minimap();
}
if(a_.valid()) {
unit& u = a_.get_unit();
u.anim_comp().set_standing();
u.set_experience(u.experience() + a_.xp_);
}
if(d_.valid()) {
unit& u = d_.get_unit();
u.anim_comp().set_standing();
u.set_experience(u.experience() + d_.xp_);
}
unit_display::unit_sheath_weapon(a_.loc_, a_.valid() ? &a_.get_unit() : nullptr, a_stats_->weapon, d_stats_->weapon,
d_.loc_, d_.valid() ? &d_.get_unit() : nullptr);
if(update_display_) {
game_display::get_singleton()->invalidate_unit();
display::get_singleton()->invalidate(a_.loc_);
display::get_singleton()->invalidate(d_.loc_);
}
if(OOS_error_) {
replay::process_error(errbuf_.str());
}
}
void attack::check_replay_attack_result(
bool& hits, int ran_num, int& damage, config replay_results, unit_info& attacker)
{
int results_chance = replay_results["chance"];
bool results_hits = replay_results["hits"].to_bool();
int results_damage = replay_results["damage"];
#if 0
errbuf_ << "SYNC: In attack " << a_.dump() << " vs " << d_.dump()
<< " replay data differs from local calculated data:"
<< " chance to hit in data source: " << results_chance
<< " chance to hit in calculated: " << attacker.cth_
<< " chance to hit in data source: " << results_chance
<< " chance to hit in calculated: " << attacker.cth_
;
attacker.cth_ = results_chance;
hits = results_hits;
damage = results_damage;
OOS_error_ = true;
#endif
if(results_chance != attacker.cth_) {
errbuf_ << "SYNC: In attack " << a_.dump() << " vs " << d_.dump()
<< ": chance to hit is inconsistent. Data source: " << results_chance
<< "; Calculation: " << attacker.cth_ << " (over-riding game calculations with data source results)\n";
attacker.cth_ = results_chance;
OOS_error_ = true;
}
if(results_hits != hits) {
errbuf_ << "SYNC: In attack " << a_.dump() << " vs " << d_.dump() << ": the data source says the hit was "
<< (results_hits ? "successful" : "unsuccessful") << ", while in-game calculations say the hit was "
<< (hits ? "successful" : "unsuccessful") << " random number: " << ran_num << " = " << (ran_num % 100)
<< "/" << results_chance << " (over-riding game calculations with data source results)\n";
hits = results_hits;
OOS_error_ = true;
}
if(results_damage != damage) {
errbuf_ << "SYNC: In attack " << a_.dump() << " vs " << d_.dump() << ": the data source says the hit did "
<< results_damage << " damage, while in-game calculations show the hit doing " << damage
<< " damage (over-riding game calculations with data source results)\n";
damage = results_damage;
OOS_error_ = true;
}
}
} // end anonymous namespace
// ==================================================================================
// FREE-STANDING FUNCTIONS
// ==================================================================================
void attack_unit(const map_location& attacker,
const map_location& defender,
int attack_with,
int defend_with,
bool update_display)
{
attack dummy(attacker, defender, attack_with, defend_with, update_display);
dummy.perform();
}
void attack_unit_and_advance(const map_location& attacker,
const map_location& defender,
int attack_with,
int defend_with,
bool update_display,
const ai::unit_advancements_aspect& ai_advancement)
{
attack_unit(attacker, defender, attack_with, defend_with, update_display);
unit_map::const_iterator atku = resources::gameboard->units().find(attacker);
if(atku != resources::gameboard->units().end()) {
advance_unit_at(advance_unit_params(attacker).ai_advancements(ai_advancement));
}
unit_map::const_iterator defu = resources::gameboard->units().find(defender);
if(defu != resources::gameboard->units().end()) {
advance_unit_at(advance_unit_params(defender).ai_advancements(ai_advancement));
}
}
int under_leadership(const unit &u, const map_location& loc, const_attack_ptr weapon, const_attack_ptr opp_weapon)
{
unit_ability_list abil = u.get_abilities("leadership", loc, weapon, opp_weapon);
unit_abilities::effect leader_effect(abil, 0, false);
return leader_effect.get_composite_value();
}
//begin of weapon emulates function.
bool unit::abilities_filter_matches(const config& cfg, bool attacker, int res) const
{
if(!(cfg["active_on"].empty() || (attacker && cfg["active_on"] == "offense") || (!attacker && cfg["active_on"] == "defense"))) {
return false;
}
if(!unit_abilities::filter_base_matches(cfg, res)) {
return false;
}
return true;
}
//functions for emulate weapon specials.
//filter opponent and affect self/opponent/both option.
bool unit::ability_filter_fighter(const std::string& ability, const std::string& filter_attacker , const config& cfg, const map_location& loc, const unit& u2) const
{
const config &filter = cfg.child(filter_attacker);
if(!filter) {
return true;
}
return unit_filter(vconfig(filter)).set_use_flat_tod(ability == "illuminates").matches(*this, loc, u2);
}
static bool ability_apply_filter(const unit_map::const_iterator un, const unit_map::const_iterator up, const std::string& ability, const config& cfg, const map_location& loc, const map_location& opp_loc, bool attacker )
{
if(!up->ability_filter_fighter(ability, "filter_opponent", cfg, opp_loc, *un)){
return true;
}
if(!un->ability_filter_fighter(ability, "filter_student", cfg, loc, *up)){
return true;
}
if((attacker && !un->ability_filter_fighter(ability, "filter_attacker", cfg, loc, *up)) || (!attacker && !up->ability_filter_fighter(ability, "filter_attacker", cfg, opp_loc, *un))){
return true;
}
if((!attacker && !un->ability_filter_fighter(ability, "filter_defender", cfg, loc, *up)) || (attacker && !up->ability_filter_fighter(ability, "filter_defender", cfg, opp_loc, *un))){
return true;
}
return false;
}
bool leadership_affects_self(const std::string& ability,const unit_map& units, const map_location& loc, bool attacker, const_attack_ptr weapon,const_attack_ptr opp_weapon)
{
const unit_map::const_iterator un = units.find(loc);
if(un == units.end()) {
return false;
}
unit_ability_list abil = un->get_abilities(ability, weapon, opp_weapon);
for(unit_ability_list::iterator i = abil.begin(); i != abil.end();) {
const std::string& apply_to = (*i->first)["apply_to"];
if(apply_to.empty() || apply_to == "both" || apply_to == "self") {
return true;
}
if(attacker && apply_to == "attacker") {
return true;
}
if(!attacker && apply_to == "defender") {
return true;
}
++i;
}
return false;
}
bool leadership_affects_opponent(const std::string& ability,const unit_map& units, const map_location& loc, bool attacker, const_attack_ptr weapon,const_attack_ptr opp_weapon)
{
const unit_map::const_iterator un = units.find(loc);
if(un == units.end()) {
return false;
}
unit_ability_list abil = un->get_abilities(ability, weapon, opp_weapon);
for(unit_ability_list::iterator i = abil.begin(); i != abil.end();) {
const std::string& apply_to = (*i->first)["apply_to"];
if(apply_to == "both" || apply_to == "opponent") {
return true;
}
if(attacker && apply_to == "defender") {
return true;
}
if(!attacker && apply_to == "attacker") {
return true;
}
++i;
}
return false;
}
//sub function for emulate chance_to_hit,damage drains and attacks special.
std::pair<int, bool> ability_leadership(const std::string& ability,const unit_map& units, const map_location& loc, const map_location& opp_loc, bool attacker, int abil_value, bool backstab_pos, const_attack_ptr weapon, const_attack_ptr opp_weapon)
{
const unit_map::const_iterator un = units.find(loc);
const unit_map::const_iterator up = units.find(opp_loc);
if(un == units.end()) {
return {abil_value, false};
}
unit_ability_list abil = un->get_abilities(ability, weapon, opp_weapon);
for(unit_ability_list::iterator i = abil.begin(); i != abil.end();) {
const config &filter = (*i->first).child("filter_opponent");
const config &filter_student = (*i->first).child("filter_student");
const config &filter_attacker = (*i->first).child("filter_attacker");
const config &filter_defender = (*i->first).child("filter_defender");
bool show_result = false;
if(up == units.end() && !filter_student && !filter && !filter_attacker && !filter_defender) {
show_result = un->abilities_filter_matches(*i->first, attacker, abil_value);
} else if(up == units.end() && (filter_student || filter || filter_attacker || filter_defender)) {
return {abil_value, false};
} else {
show_result = !(!un->abilities_filter_matches(*i->first, attacker, abil_value) || ability_apply_filter(un, up, ability, *i->first, loc, opp_loc, attacker));
}
if(!show_result) {
i = abil.erase(i);
} else {
++i;
}
}
if(!abil.empty()) {
unit_abilities::effect leader_effect(abil, abil_value, backstab_pos);
return {leader_effect.get_composite_value(), true};
}
return {abil_value, false};
}
//sub function for wmulate boolean special(slow, poison...)
bool bool_leadership(const std::string& ability,const unit_map& units, const map_location& loc, const map_location& opp_loc, bool attacker, const_attack_ptr weapon, const_attack_ptr opp_weapon)
{
const unit_map::const_iterator un = units.find(loc);
const unit_map::const_iterator up = units.find(opp_loc);
if(un == units.end() || up == units.end()) {
return false;
}
unit_ability_list abil = un->get_abilities(ability, weapon, opp_weapon);
for(unit_ability_list::iterator i = abil.begin(); i != abil.end();) {
const std::string& active_on = (*i->first)["active_on"];
if(!(active_on.empty() || (attacker && active_on == "offense") || (!attacker && active_on == "defense")) || ability_apply_filter(un, up, ability, *i->first, loc, opp_loc, attacker)) {
i = abil.erase(i);
} else {
++i;
}
}
if(!abil.empty()) {
return true;
}
return false;
}
//emulate boolean special for self/adjacent and/or opponent.
bool attack_type::bool_ability(const std::string& ability) const
{
bool abil_bool= get_special_bool(ability);
const unit_map& units = display::get_singleton()->get_units();
if(leadership_affects_self(ability, units, self_loc_, is_attacker_, shared_from_this(), other_attack_)) {
abil_bool = get_special_bool(ability) || bool_leadership(ability, units, self_loc_, other_loc_, is_attacker_, shared_from_this(), other_attack_);
}
if(leadership_affects_opponent(ability, units, other_loc_, !is_attacker_, other_attack_, shared_from_this())) {
abil_bool = get_special_bool(ability) || bool_leadership(ability, units, other_loc_, self_loc_, !is_attacker_, other_attack_, shared_from_this());
}
return abil_bool;
}
//emulate numerical special for self/adjacent and/or opponent.
std::pair<int, bool> attack_type::combat_ability(const std::string& ability, int abil_value, bool backstab_pos) const
{
const unit_map& units = display::get_singleton()->get_units();
if(leadership_affects_self(ability, units, self_loc_, is_attacker_, shared_from_this(), other_attack_)) {
return ability_leadership(ability, units, self_loc_, other_loc_, is_attacker_, abil_value, backstab_pos, shared_from_this(), other_attack_);
}
if(leadership_affects_opponent(ability, units, other_loc_, !is_attacker_, other_attack_, shared_from_this())) {
return ability_leadership(ability, units, other_loc_,self_loc_, !is_attacker_, abil_value, backstab_pos, other_attack_, shared_from_this());
}
return {abil_value, false};
}
//end of emulate weapon special functions.
int combat_modifier(const unit_map& units,
const gamemap& map,
const map_location& loc,
unit_type::ALIGNMENT alignment,
bool is_fearless)
{
const tod_manager& tod_m = *resources::tod_manager;
const time_of_day& effective_tod = tod_m.get_illuminated_time_of_day(units, map, loc);
return combat_modifier(effective_tod, alignment, is_fearless);
}
int combat_modifier(const time_of_day& effective_tod,
unit_type::ALIGNMENT alignment,
bool is_fearless)
{
const tod_manager& tod_m = *resources::tod_manager;
const int lawful_bonus = effective_tod.lawful_bonus;
return generic_combat_modifier(lawful_bonus, alignment, is_fearless, tod_m.get_max_liminal_bonus());
}
int generic_combat_modifier(int lawful_bonus, unit_type::ALIGNMENT alignment, bool is_fearless, int max_liminal_bonus)
{
int bonus;
switch(alignment.v) {
case unit_type::ALIGNMENT::LAWFUL:
bonus = lawful_bonus;
break;
case unit_type::ALIGNMENT::NEUTRAL:
bonus = 0;
break;
case unit_type::ALIGNMENT::CHAOTIC:
bonus = -lawful_bonus;
break;
case unit_type::ALIGNMENT::LIMINAL:
bonus = std::max(0, max_liminal_bonus-std::abs(lawful_bonus));
break;
default:
bonus = 0;
}
if(is_fearless) {
bonus = std::max<int>(bonus, 0);
}
return bonus;
}
bool backstab_check(const map_location& attacker_loc,
const map_location& defender_loc,
const unit_map& units,
const std::vector<team>& teams)
{
const unit_map::const_iterator defender = units.find(defender_loc);
if(defender == units.end()) {
return false; // No defender
}
adjacent_loc_array_t adj;
get_adjacent_tiles(defender_loc, adj.data());
unsigned i;
for(i = 0; i < adj.size(); ++i) {
if(adj[i] == attacker_loc) {
break;
}
}
if(i >= 6) {
return false; // Attack not from adjacent location
}
const unit_map::const_iterator opp = units.find(adj[(i + 3) % 6]);
// No opposite unit.
if(opp == units.end()) {
return false;
}
if(opp->incapacitated()) {
return false;
}
// If sides aren't valid teams, then they are enemies.
if(std::size_t(defender->side() - 1) >= teams.size() || std::size_t(opp->side() - 1) >= teams.size()) {
return true;
}
// Defender and opposite are enemies.
if(teams[defender->side() - 1].is_enemy(opp->side())) {
return true;
}
// Defender and opposite are friends.
return false;
}<|fim▁end|> |
if(opp_weapon) {
opp_ctx.emplace(opp_weapon->specials_context(&opp, &u, opp_loc, u_loc, !attacking, weapon));
} |
<|file_name|>DataWriter.cpp<|end_file_name|><|fim▁begin|>#include "StdAfx.h"
#include ".\datawriter.h"
using namespace std;
DataWriter::DataWriter(const std::string &fileName)
{
this->fileName = fileName;
fileStream = NULL;
//Initialize the filestream
fileStream = new fstream(fileName.c_str(), ios::out|ios::binary|ios::trunc);
}
void DataWriter::Write(int data, const size_t size)
{
if (fileStream)
<|fim▁hole|> int sizeCount = 0;
while (data > 0)
{
fileStream->put(char(data%256));
data /= 256;
++sizeCount;
}
while (sizeCount < size) //Fill the remaining characters
{
fileStream->put(char(0));
++sizeCount;
}
}
}
}
void DataWriter::Write(const char data)
{
if (fileStream)
{
if (fileStream->is_open())
{
fileStream->put(data);
}
}
}
void DataWriter::Write(const char* data, const size_t size)
{
if (!data)
{
std::cout << "Warning: attempted to write null pointer\n";
return;
}
if (fileStream)
{
if (fileStream->is_open())
{
if (strlen(data) > size)
{
cout << "Warning: Attempting to write data to area larger than specified size\n";
return;
}
fileStream->write(data,strlen(data));
if (strlen(data) < size)
{
for (unsigned int i = 0; i < size - strlen(data); ++i)
{
fileStream->put(char(0));//The files we're dealing with are little-endian, so fill after the placement of the data
}
}
}
}
}<|fim▁end|> | {
if (fileStream->is_open())
{
|
<|file_name|>htmlbaseelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, document_from_node};
use dom::virtualmethods::VirtualMethods;
use url::{Url, UrlParser};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLBaseElement {
htmlelement: HTMLElement
}
impl HTMLBaseElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
/// https://html.spec.whatwg.org/multipage/#frozen-base-url
pub fn frozen_base_url(&self) -> Url {
let href = self.upcast::<Element>().get_attribute(&ns!(""), &atom!("href"))
.expect("The frozen base url is only defined for base elements \<|fim▁hole|> let base = document.fallback_base_url();
let parsed = UrlParser::new().base_url(&base).parse(&href.value());
parsed.unwrap_or(base)
}
/// Update the cached base element in response to binding or unbinding from
/// a tree.
pub fn bind_unbind(&self, tree_in_doc: bool) {
if !tree_in_doc {
return;
}
if self.upcast::<Element>().has_attribute(&atom!("href")) {
let document = document_from_node(self);
document.refresh_base_element();
}
}
}
impl VirtualMethods for HTMLBaseElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if *attr.local_name() == atom!(href) {
document_from_node(self).refresh_base_element();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().bind_to_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().unbind_from_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
}<|fim▁end|> | that have a base url.");
let document = document_from_node(self); |
<|file_name|>debugging.js<|end_file_name|><|fim▁begin|>/*
* Flocking Debugging Unit Generators
* http://github.com/colinbdclark/flocking
*
* Copyright 2011-2014, Colin Clark
* Dual licensed under the MIT and GPL Version 2 licenses.
*/
/*global require*/
/*jshint white: false, newcap: true, regexp: true, browser: true,
forin: false, nomen: true, bitwise: false, maxerr: 100,
indent: 4, plusplus: false, curly: true, eqeqeq: true,
freeze: true, latedef: true, noarg: true, nonew: true, quotmark: double, undef: true,
unused: true, strict: true, asi: false, boss: false, evil: false, expr: false,
funcscope: false*/
var fluid = fluid || require("infusion"),
flock = fluid.registerNamespace("flock");
(function () {
"use strict";
// TODO: Unit tests.
flock.ugen.print = function (input, output, options) {
var that = flock.ugen(input, output, options);
that.gen = function (numSamps) {
var inputs = that.inputs,
out = that.output,
m = that.model,
label = m.label,
chan = inputs.channel,
// Basic multichannel support. This should be inproved
// by factoring the multichannel input code out of flock.ugen.out.
source = chan ? inputs.source.output[chan.output[0]] : inputs.source.output,
trig = inputs.trigger.output[0],
freq = inputs.freq.output[0],
i,
j,
val;
if (trig > 0.0 && m.prevTrig <= 0.0) {
fluid.log(fluid.logLevel.IMPORTANT, label + source);
}
if (m.freq !== freq) {
m.sampInterval = Math.round(m.sampleRate / freq);
m.freq = freq;
m.counter = m.sampInterval;
}
for (i = 0, j = 0 ; i < numSamps; i++, j += m.strides.source) {
if (m.counter >= m.sampInterval) {
fluid.log(fluid.logLevel.IMPORTANT, label + source[j]);
m.counter = 0;<|fim▁hole|> }
m.counter++;
out[i] = val = source[i];
}
m.value = m.unscaledValue = val;
};
that.init = function () {
var o = that.options;
that.model.label = o.label ? o.label + ": " : "";
that.onInputChanged();
};
that.init();
return that;
};
flock.ugenDefaults("flock.ugen.print", {
rate: "audio",
inputs: {
source: null,
trigger: 0.0,
freq: 1.0
},
ugenOptions: {
model: {
unscaledValue: 0.0,
value: 0.0,
counter: 0
},
strideInputs: ["source"]
}
});
}());<|fim▁end|> | |
<|file_name|>type_allocation_places.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
use crate::config::MovableTypesHookOutput;
use crate::cpp_data::{CppItem, CppPath};
use crate::cpp_type::{CppPointerLikeTypeKind, CppType};
use crate::processor::ProcessorData;
use log::{info, trace};
use ritual_common::errors::Result;
use std::collections::HashMap;
#[derive(Default, Debug)]
struct TypeStats {
virtual_functions: Vec<String>,
pointer_encounters: Vec<String>,
non_pointer_encounters: Vec<String>,
}
fn log_results(data_map: &HashMap<CppPath, TypeStats>) {
for (name, stats) in data_map {
trace!("type = {}; stats = {:?}", name.to_cpp_pseudo_code(), stats);
}
for (path, stats) in data_map {
let suggestion = if stats.virtual_functions.is_empty() {
if stats.pointer_encounters.is_empty() {
if stats.non_pointer_encounters.len() == MAX_ITEMS {
"movable (no pointers, no virtual functions)"<|fim▁hole|> && stats.non_pointer_encounters.len() == MAX_ITEMS
{
"probably movable (few pointers)"
} else if stats.pointer_encounters.len() == MAX_ITEMS {
"immovable (many pointers)"
} else {
"unknown (too few items)"
}
} else {
"immovable (has virtual functions)"
};
info!("{:?} is {}", path.to_templateless_string(), suggestion);
info!("path = {}", path.to_cpp_pseudo_code());
info!("* virtual_functions ({}):", stats.virtual_functions.len());
for item in &stats.virtual_functions {
info!("* * {}", item);
}
info!("* pointer_encounters ({}):", stats.pointer_encounters.len());
for item in &stats.pointer_encounters {
info!("* * {}", item);
}
info!(
"* non_pointer_encounters ({}):",
stats.non_pointer_encounters.len()
);
for item in &stats.non_pointer_encounters {
info!("* * {}", item);
}
}
}
fn check_type(
cpp_type: &CppType,
is_behind_pointer: bool,
data_map: &mut HashMap<CppPath, TypeStats>,
item_text: &str,
) {
match cpp_type {
CppType::Class(path) => {
let good_path = path.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if is_behind_pointer {
if stats.pointer_encounters.len() < MAX_ITEMS {
stats.pointer_encounters.push(item_text.to_string());
}
} else if stats.non_pointer_encounters.len() < MAX_ITEMS {
stats.non_pointer_encounters.push(item_text.to_string());
}
}
if let Some(args) = &path.last().template_arguments {
for arg in args {
check_type(arg, false, data_map, item_text);
}
}
}
CppType::PointerLike { kind, target, .. } => {
check_type(
target,
*kind == CppPointerLikeTypeKind::Pointer,
data_map,
item_text,
);
}
_ => {}
}
}
const MAX_ITEMS: usize = 10;
/// Detects the preferred type allocation place for each type based on
/// API of all known methods. Doesn't actually change the data,
/// only suggests stack allocated types for manual configuration.
pub fn suggest_allocation_places(data: &mut ProcessorData<'_>) -> Result<()> {
let mut data_map = HashMap::new();
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Type(type1) = &item.item {
if !type1.kind.is_class() {
continue;
}
if let Some(hook) = data.config.movable_types_hook() {
if hook(&type1.path)? != MovableTypesHookOutput::Unknown {
continue;
}
}
let good_path = type1.path.deinstantiate();
data_map.insert(good_path, Default::default());
}
}
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Function(function) = &item.item {
if function.is_private() {
continue;
}
let item_text = function.short_text();
for t in &function.arguments {
check_type(&t.argument_type, false, &mut data_map, &item_text);
}
check_type(&function.return_type, false, &mut data_map, &item_text);
if function.is_virtual() {
let type1 = function.class_path()?;
let good_path = type1.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if stats.virtual_functions.len() < MAX_ITEMS {
stats.virtual_functions.push(item_text);
}
}
}
}
}
log_results(&data_map);
Ok(())
}<|fim▁end|> | } else {
"probably movable (no pointers, no virtual functions, but too few items)"
}
} else if stats.pointer_encounters.len() < 5 |
<|file_name|>moment.min.js<|end_file_name|><|fim▁begin|>//! moment.js
//! version : 2.15.1
//! authors : Tim Wood, Iskren Chernev, Moment.js contributors
//! license : MIT
//! momentjs.com
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
global.moment = factory()
}(this, function () { 'use strict';
var hookCallback;
function utils_hooks__hooks () {
return hookCallback.apply(null, arguments);
}
// This is done to register the method called with moment()
// without creating circular dependencies.
function setHookCallback (callback) {
hookCallback = callback;
}
function isArray(input) {
return input instanceof Array || Object.prototype.toString.call(input) === '[object Array]';
}
function isObject(input) {
// IE8 will treat undefined and null as object if it wasn't for
// input != null
return input != null && Object.prototype.toString.call(input) === '[object Object]';
}
function isObjectEmpty(obj) {
var k;
for (k in obj) {
// even if its not own property I'd still call it non-empty
return false;
}
return true;
}
function isDate(input) {
return input instanceof Date || Object.prototype.toString.call(input) === '[object Date]';
}
function map(arr, fn) {
var res = [], i;
for (i = 0; i < arr.length; ++i) {
res.push(fn(arr[i], i));
}
return res;
}
function hasOwnProp(a, b) {
return Object.prototype.hasOwnProperty.call(a, b);
}
function extend(a, b) {
for (var i in b) {
if (hasOwnProp(b, i)) {
a[i] = b[i];
}
}
if (hasOwnProp(b, 'toString')) {
a.toString = b.toString;
}
if (hasOwnProp(b, 'valueOf')) {
a.valueOf = b.valueOf;
}
return a;
}
function create_utc__createUTC (input, format, locale, strict) {
return createLocalOrUTC(input, format, locale, strict, true).utc();
}
function defaultParsingFlags() {
// We need to deep clone this object.
return {
empty : false,
unusedTokens : [],
unusedInput : [],
overflow : -2,
charsLeftOver : 0,
nullInput : false,
invalidMonth : null,
invalidFormat : false,
userInvalidated : false,
iso : false,
parsedDateParts : [],
meridiem : null
};
}
function getParsingFlags(m) {
if (m._pf == null) {
m._pf = defaultParsingFlags();
}
return m._pf;
}
var some;
if (Array.prototype.some) {
some = Array.prototype.some;
} else {
some = function (fun) {
var t = Object(this);
var len = t.length >>> 0;
for (var i = 0; i < len; i++) {
if (i in t && fun.call(this, t[i], i, t)) {
return true;
}
}
return false;
};
}
function valid__isValid(m) {
if (m._isValid == null) {
var flags = getParsingFlags(m);
var parsedParts = some.call(flags.parsedDateParts, function (i) {
return i != null;
});
var isNowValid = !isNaN(m._d.getTime()) &&
flags.overflow < 0 &&
!flags.empty &&
!flags.invalidMonth &&
!flags.invalidWeekday &&
!flags.nullInput &&
!flags.invalidFormat &&
!flags.userInvalidated &&
(!flags.meridiem || (flags.meridiem && parsedParts));
if (m._strict) {
isNowValid = isNowValid &&
flags.charsLeftOver === 0 &&
flags.unusedTokens.length === 0 &&
flags.bigHour === undefined;
}
if (Object.isFrozen == null || !Object.isFrozen(m)) {
m._isValid = isNowValid;
}
else {
return isNowValid;
}
}
return m._isValid;
}
function valid__createInvalid (flags) {
var m = create_utc__createUTC(NaN);
if (flags != null) {
extend(getParsingFlags(m), flags);
}
else {
getParsingFlags(m).userInvalidated = true;
}
return m;
}
function isUndefined(input) {
return input === void 0;
}
// Plugins that add properties should also add the key here (null value),
// so we can properly clone ourselves.
var momentProperties = utils_hooks__hooks.momentProperties = [];
function copyConfig(to, from) {
var i, prop, val;
if (!isUndefined(from._isAMomentObject)) {
to._isAMomentObject = from._isAMomentObject;
}
if (!isUndefined(from._i)) {
to._i = from._i;
}
if (!isUndefined(from._f)) {
to._f = from._f;
}
if (!isUndefined(from._l)) {
to._l = from._l;
}
if (!isUndefined(from._strict)) {
to._strict = from._strict;
}
if (!isUndefined(from._tzm)) {
to._tzm = from._tzm;
}
if (!isUndefined(from._isUTC)) {
to._isUTC = from._isUTC;
}
if (!isUndefined(from._offset)) {
to._offset = from._offset;
}
if (!isUndefined(from._pf)) {
to._pf = getParsingFlags(from);
}
if (!isUndefined(from._locale)) {
to._locale = from._locale;
}
if (momentProperties.length > 0) {
for (i in momentProperties) {
prop = momentProperties[i];
val = from[prop];
if (!isUndefined(val)) {
to[prop] = val;
}
}
}
return to;
}
var updateInProgress = false;
// Moment prototype object
function Moment(config) {
copyConfig(this, config);
this._d = new Date(config._d != null ? config._d.getTime() : NaN);
// Prevent infinite loop in case updateOffset creates new moment
// objects.
if (updateInProgress === false) {
updateInProgress = true;
utils_hooks__hooks.updateOffset(this);
updateInProgress = false;
}
}
function isMoment (obj) {
return obj instanceof Moment || (obj != null && obj._isAMomentObject != null);
}
function absFloor (number) {
if (number < 0) {
// -0 -> 0
return Math.ceil(number) || 0;
} else {
return Math.floor(number);
}
}
function toInt(argumentForCoercion) {
var coercedNumber = +argumentForCoercion,
value = 0;
if (coercedNumber !== 0 && isFinite(coercedNumber)) {
value = absFloor(coercedNumber);
}
return value;
}
// compare two arrays, return the number of differences
function compareArrays(array1, array2, dontConvert) {
var len = Math.min(array1.length, array2.length),
lengthDiff = Math.abs(array1.length - array2.length),
diffs = 0,
i;
for (i = 0; i < len; i++) {
if ((dontConvert && array1[i] !== array2[i]) ||
(!dontConvert && toInt(array1[i]) !== toInt(array2[i]))) {
diffs++;
}
}
return diffs + lengthDiff;
}
function warn(msg) {
if (utils_hooks__hooks.suppressDeprecationWarnings === false &&
(typeof console !== 'undefined') && console.warn) {
console.warn('Deprecation warning: ' + msg);
}
}
function deprecate(msg, fn) {
var firstTime = true;
return extend(function () {
if (utils_hooks__hooks.deprecationHandler != null) {
utils_hooks__hooks.deprecationHandler(null, msg);
}
if (firstTime) {
var args = [];
var arg;
for (var i = 0; i < arguments.length; i++) {
arg = '';
if (typeof arguments[i] === 'object') {
arg += '\n[' + i + '] ';
for (var key in arguments[0]) {
arg += key + ': ' + arguments[0][key] + ', ';
}
arg = arg.slice(0, -2); // Remove trailing comma and space
} else {
arg = arguments[i];
}
args.push(arg);
}
warn(msg + '\nArguments: ' + Array.prototype.slice.call(args).join('') + '\n' + (new Error()).stack);
firstTime = false;
}
return fn.apply(this, arguments);
}, fn);
}
var deprecations = {};
function deprecateSimple(name, msg) {
if (utils_hooks__hooks.deprecationHandler != null) {
utils_hooks__hooks.deprecationHandler(name, msg);
}
if (!deprecations[name]) {
warn(msg);
deprecations[name] = true;
}
}
utils_hooks__hooks.suppressDeprecationWarnings = false;
utils_hooks__hooks.deprecationHandler = null;
function isFunction(input) {
return input instanceof Function || Object.prototype.toString.call(input) === '[object Function]';
}
function locale_set__set (config) {
var prop, i;
for (i in config) {
prop = config[i];
if (isFunction(prop)) {
this[i] = prop;
} else {
this['_' + i] = prop;
}
}
this._config = config;
// Lenient ordinal parsing accepts just a number in addition to
// number + (possibly) stuff coming from _ordinalParseLenient.
this._ordinalParseLenient = new RegExp(this._ordinalParse.source + '|' + (/\d{1,2}/).source);
}
function mergeConfigs(parentConfig, childConfig) {
var res = extend({}, parentConfig), prop;
for (prop in childConfig) {
if (hasOwnProp(childConfig, prop)) {
if (isObject(parentConfig[prop]) && isObject(childConfig[prop])) {
res[prop] = {};
extend(res[prop], parentConfig[prop]);
extend(res[prop], childConfig[prop]);
} else if (childConfig[prop] != null) {
res[prop] = childConfig[prop];
} else {
delete res[prop];
}
}
}
for (prop in parentConfig) {
if (hasOwnProp(parentConfig, prop) &&
!hasOwnProp(childConfig, prop) &&
isObject(parentConfig[prop])) {
// make sure changes to properties don't modify parent config
res[prop] = extend({}, res[prop]);
}
}
return res;
}
function Locale(config) {
if (config != null) {
this.set(config);
}
}
var keys;
if (Object.keys) {
keys = Object.keys;
} else {
keys = function (obj) {
var i, res = [];
for (i in obj) {
if (hasOwnProp(obj, i)) {
res.push(i);
}
}
return res;
};
}
var defaultCalendar = {
sameDay : '[Today at] LT',
nextDay : '[Tomorrow at] LT',
nextWeek : 'dddd [at] LT',
lastDay : '[Yesterday at] LT',
lastWeek : '[Last] dddd [at] LT',
sameElse : 'L'
};
function locale_calendar__calendar (key, mom, now) {
var output = this._calendar[key] || this._calendar['sameElse'];
return isFunction(output) ? output.call(mom, now) : output;
}
var defaultLongDateFormat = {
LTS : 'h:mm:ss A',
LT : 'h:mm A',
L : 'MM/DD/YYYY',
LL : 'MMMM D, YYYY',
LLL : 'MMMM D, YYYY h:mm A',
LLLL : 'dddd, MMMM D, YYYY h:mm A'
};
function longDateFormat (key) {
var format = this._longDateFormat[key],
formatUpper = this._longDateFormat[key.toUpperCase()];
if (format || !formatUpper) {
return format;
}
this._longDateFormat[key] = formatUpper.replace(/MMMM|MM|DD|dddd/g, function (val) {
return val.slice(1);
});
return this._longDateFormat[key];
}
var defaultInvalidDate = 'Invalid date';
function invalidDate () {
return this._invalidDate;
}
var defaultOrdinal = '%d';
var defaultOrdinalParse = /\d{1,2}/;
function ordinal (number) {
return this._ordinal.replace('%d', number);
}
var defaultRelativeTime = {
future : 'in %s',
past : '%s ago',
s : 'a few seconds',
m : 'a minute',
mm : '%d minutes',
h : 'an hour',
hh : '%d hours',
d : 'a day',
dd : '%d days',
M : 'a month',
MM : '%d months',
y : 'a year',
yy : '%d years'
};
function relative__relativeTime (number, withoutSuffix, string, isFuture) {
var output = this._relativeTime[string];
return (isFunction(output)) ?
output(number, withoutSuffix, string, isFuture) :
output.replace(/%d/i, number);
}
function pastFuture (diff, output) {
var format = this._relativeTime[diff > 0 ? 'future' : 'past'];
return isFunction(format) ? format(output) : format.replace(/%s/i, output);
}
var aliases = {};
function addUnitAlias (unit, shorthand) {
var lowerCase = unit.toLowerCase();
aliases[lowerCase] = aliases[lowerCase + 's'] = aliases[shorthand] = unit;
}
function normalizeUnits(units) {
return typeof units === 'string' ? aliases[units] || aliases[units.toLowerCase()] : undefined;
}
function normalizeObjectUnits(inputObject) {
var normalizedInput = {},
normalizedProp,
prop;
for (prop in inputObject) {
if (hasOwnProp(inputObject, prop)) {
normalizedProp = normalizeUnits(prop);
if (normalizedProp) {
normalizedInput[normalizedProp] = inputObject[prop];
}
}
}
return normalizedInput;
}
var priorities = {};
function addUnitPriority(unit, priority) {
priorities[unit] = priority;
}
function getPrioritizedUnits(unitsObj) {
var units = [];
for (var u in unitsObj) {
units.push({unit: u, priority: priorities[u]});
}
units.sort(function (a, b) {
return a.priority - b.priority;
});
return units;
}
function makeGetSet (unit, keepTime) {
return function (value) {
if (value != null) {
get_set__set(this, unit, value);
utils_hooks__hooks.updateOffset(this, keepTime);
return this;
} else {
return get_set__get(this, unit);
}
};
}
function get_set__get (mom, unit) {
return mom.isValid() ?
mom._d['get' + (mom._isUTC ? 'UTC' : '') + unit]() : NaN;
}
function get_set__set (mom, unit, value) {
if (mom.isValid()) {
mom._d['set' + (mom._isUTC ? 'UTC' : '') + unit](value);
}
}
// MOMENTS
function stringGet (units) {
units = normalizeUnits(units);
if (isFunction(this[units])) {
return this[units]();
}
return this;
}
function stringSet (units, value) {
if (typeof units === 'object') {
units = normalizeObjectUnits(units);
var prioritized = getPrioritizedUnits(units);
for (var i = 0; i < prioritized.length; i++) {
this[prioritized[i].unit](units[prioritized[i].unit]);
}
} else {
units = normalizeUnits(units);
if (isFunction(this[units])) {
return this[units](value);
}
}
return this;
}
function zeroFill(number, targetLength, forceSign) {
var absNumber = '' + Math.abs(number),
zerosToFill = targetLength - absNumber.length,
sign = number >= 0;
return (sign ? (forceSign ? '+' : '') : '-') +
Math.pow(10, Math.max(0, zerosToFill)).toString().substr(1) + absNumber;
}
var formattingTokens = /(\[[^\[]*\])|(\\)?([Hh]mm(ss)?|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Qo?|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|kk?|mm?|ss?|S{1,9}|x|X|zz?|ZZ?|.)/g;
var localFormattingTokens = /(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g;
var formatFunctions = {};
var formatTokenFunctions = {};
// token: 'M'
// padded: ['MM', 2]
// ordinal: 'Mo'
// callback: function () { this.month() + 1 }
function addFormatToken (token, padded, ordinal, callback) {
var func = callback;
if (typeof callback === 'string') {
func = function () {
return this[callback]();
};
}
if (token) {
formatTokenFunctions[token] = func;
}
if (padded) {
formatTokenFunctions[padded[0]] = function () {
return zeroFill(func.apply(this, arguments), padded[1], padded[2]);
};
}
if (ordinal) {
formatTokenFunctions[ordinal] = function () {
return this.localeData().ordinal(func.apply(this, arguments), token);
};
}
}
function removeFormattingTokens(input) {
if (input.match(/\[[\s\S]/)) {
return input.replace(/^\[|\]$/g, '');
}
return input.replace(/\\/g, '');
}
function makeFormatFunction(format) {
var array = format.match(formattingTokens), i, length;
for (i = 0, length = array.length; i < length; i++) {
if (formatTokenFunctions[array[i]]) {
array[i] = formatTokenFunctions[array[i]];
} else {
array[i] = removeFormattingTokens(array[i]);
}
}
return function (mom) {
var output = '', i;
for (i = 0; i < length; i++) {
output += array[i] instanceof Function ? array[i].call(mom, format) : array[i];
}
return output;
};
}
// format date using native date object
function formatMoment(m, format) {
if (!m.isValid()) {
return m.localeData().invalidDate();
}
format = expandFormat(format, m.localeData());
formatFunctions[format] = formatFunctions[format] || makeFormatFunction(format);
return formatFunctions[format](m);
}
function expandFormat(format, locale) {
var i = 5;
function replaceLongDateFormatTokens(input) {
return locale.longDateFormat(input) || input;
}
localFormattingTokens.lastIndex = 0;
while (i >= 0 && localFormattingTokens.test(format)) {
format = format.replace(localFormattingTokens, replaceLongDateFormatTokens);
localFormattingTokens.lastIndex = 0;
i -= 1;
}
return format;
}
var match1 = /\d/; // 0 - 9
var match2 = /\d\d/; // 00 - 99
var match3 = /\d{3}/; // 000 - 999
var match4 = /\d{4}/; // 0000 - 9999
var match6 = /[+-]?\d{6}/; // -999999 - 999999
var match1to2 = /\d\d?/; // 0 - 99
var match3to4 = /\d\d\d\d?/; // 999 - 9999
var match5to6 = /\d\d\d\d\d\d?/; // 99999 - 999999
var match1to3 = /\d{1,3}/; // 0 - 999
var match1to4 = /\d{1,4}/; // 0 - 9999
var match1to6 = /[+-]?\d{1,6}/; // -999999 - 999999
var matchUnsigned = /\d+/; // 0 - inf
var matchSigned = /[+-]?\d+/; // -inf - inf
var matchOffset = /Z|[+-]\d\d:?\d\d/gi; // +00:00 -00:00 +0000 -0000 or Z
var matchShortOffset = /Z|[+-]\d\d(?::?\d\d)?/gi; // +00 -00 +00:00 -00:00 +0000 -0000 or Z
var matchTimestamp = /[+-]?\d+(\.\d{1,3})?/; // 123456789 123456789.123
// any word (or two) characters or numbers including two/three word month in arabic.
// includes scottish gaelic two word and hyphenated months
var matchWord = /[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i;
var regexes = {};
function addRegexToken (token, regex, strictRegex) {
regexes[token] = isFunction(regex) ? regex : function (isStrict, localeData) {
return (isStrict && strictRegex) ? strictRegex : regex;
};
}
function getParseRegexForToken (token, config) {
if (!hasOwnProp(regexes, token)) {
return new RegExp(unescapeFormat(token));
}
return regexes[token](config._strict, config._locale);
}
// Code from http://stackoverflow.com/questions/3561493/is-there-a-regexp-escape-function-in-javascript
function unescapeFormat(s) {
return regexEscape(s.replace('\\', '').replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g, function (matched, p1, p2, p3, p4) {
return p1 || p2 || p3 || p4;
}));
}
function regexEscape(s) {
return s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&');
}
var tokens = {};
function addParseToken (token, callback) {
var i, func = callback;
if (typeof token === 'string') {
token = [token];
}
if (typeof callback === 'number') {
func = function (input, array) {
array[callback] = toInt(input);
};
}
for (i = 0; i < token.length; i++) {
tokens[token[i]] = func;
}
}
function addWeekParseToken (token, callback) {
addParseToken(token, function (input, array, config, token) {
config._w = config._w || {};
callback(input, config._w, config, token);
});
}
function addTimeToArrayFromToken(token, input, config) {
if (input != null && hasOwnProp(tokens, token)) {
tokens[token](input, config._a, config, token);
}
}
var YEAR = 0;
var MONTH = 1;
var DATE = 2;
var HOUR = 3;
var MINUTE = 4;
var SECOND = 5;
var MILLISECOND = 6;
var WEEK = 7;
var WEEKDAY = 8;
var indexOf;
if (Array.prototype.indexOf) {
indexOf = Array.prototype.indexOf;
} else {
indexOf = function (o) {
// I know
var i;
for (i = 0; i < this.length; ++i) {
if (this[i] === o) {
return i;
}
}
return -1;
};
}
function daysInMonth(year, month) {
return new Date(Date.UTC(year, month + 1, 0)).getUTCDate();
}
// FORMATTING
addFormatToken('M', ['MM', 2], 'Mo', function () {
return this.month() + 1;
});
addFormatToken('MMM', 0, 0, function (format) {
return this.localeData().monthsShort(this, format);
});
addFormatToken('MMMM', 0, 0, function (format) {
return this.localeData().months(this, format);<|fim▁hole|>
addUnitAlias('month', 'M');
// PRIORITY
addUnitPriority('month', 8);
// PARSING
addRegexToken('M', match1to2);
addRegexToken('MM', match1to2, match2);
addRegexToken('MMM', function (isStrict, locale) {
return locale.monthsShortRegex(isStrict);
});
addRegexToken('MMMM', function (isStrict, locale) {
return locale.monthsRegex(isStrict);
});
addParseToken(['M', 'MM'], function (input, array) {
array[MONTH] = toInt(input) - 1;
});
addParseToken(['MMM', 'MMMM'], function (input, array, config, token) {
var month = config._locale.monthsParse(input, token, config._strict);
// if we didn't find a month name, mark the date as invalid.
if (month != null) {
array[MONTH] = month;
} else {
getParsingFlags(config).invalidMonth = input;
}
});
// LOCALES
var MONTHS_IN_FORMAT = /D[oD]?(\[[^\[\]]*\]|\s+)+MMMM?/;
var defaultLocaleMonths = 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_');
function localeMonths (m, format) {
if (!m) {
return this._months;
}
return isArray(this._months) ? this._months[m.month()] :
this._months[(this._months.isFormat || MONTHS_IN_FORMAT).test(format) ? 'format' : 'standalone'][m.month()];
}
var defaultLocaleMonthsShort = 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_');
function localeMonthsShort (m, format) {
if (!m) {
return this._monthsShort;
}
return isArray(this._monthsShort) ? this._monthsShort[m.month()] :
this._monthsShort[MONTHS_IN_FORMAT.test(format) ? 'format' : 'standalone'][m.month()];
}
function units_month__handleStrictParse(monthName, format, strict) {
var i, ii, mom, llc = monthName.toLocaleLowerCase();
if (!this._monthsParse) {
// this is not used
this._monthsParse = [];
this._longMonthsParse = [];
this._shortMonthsParse = [];
for (i = 0; i < 12; ++i) {
mom = create_utc__createUTC([2000, i]);
this._shortMonthsParse[i] = this.monthsShort(mom, '').toLocaleLowerCase();
this._longMonthsParse[i] = this.months(mom, '').toLocaleLowerCase();
}
}
if (strict) {
if (format === 'MMM') {
ii = indexOf.call(this._shortMonthsParse, llc);
return ii !== -1 ? ii : null;
} else {
ii = indexOf.call(this._longMonthsParse, llc);
return ii !== -1 ? ii : null;
}
} else {
if (format === 'MMM') {
ii = indexOf.call(this._shortMonthsParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._longMonthsParse, llc);
return ii !== -1 ? ii : null;
} else {
ii = indexOf.call(this._longMonthsParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._shortMonthsParse, llc);
return ii !== -1 ? ii : null;
}
}
}
function localeMonthsParse (monthName, format, strict) {
var i, mom, regex;
if (this._monthsParseExact) {
return units_month__handleStrictParse.call(this, monthName, format, strict);
}
if (!this._monthsParse) {
this._monthsParse = [];
this._longMonthsParse = [];
this._shortMonthsParse = [];
}
// TODO: add sorting
// Sorting makes sure if one month (or abbr) is a prefix of another
// see sorting in computeMonthsParse
for (i = 0; i < 12; i++) {
// make the regex if we don't have it already
mom = create_utc__createUTC([2000, i]);
if (strict && !this._longMonthsParse[i]) {
this._longMonthsParse[i] = new RegExp('^' + this.months(mom, '').replace('.', '') + '$', 'i');
this._shortMonthsParse[i] = new RegExp('^' + this.monthsShort(mom, '').replace('.', '') + '$', 'i');
}
if (!strict && !this._monthsParse[i]) {
regex = '^' + this.months(mom, '') + '|^' + this.monthsShort(mom, '');
this._monthsParse[i] = new RegExp(regex.replace('.', ''), 'i');
}
// test the regex
if (strict && format === 'MMMM' && this._longMonthsParse[i].test(monthName)) {
return i;
} else if (strict && format === 'MMM' && this._shortMonthsParse[i].test(monthName)) {
return i;
} else if (!strict && this._monthsParse[i].test(monthName)) {
return i;
}
}
}
// MOMENTS
function setMonth (mom, value) {
var dayOfMonth;
if (!mom.isValid()) {
// No op
return mom;
}
if (typeof value === 'string') {
if (/^\d+$/.test(value)) {
value = toInt(value);
} else {
value = mom.localeData().monthsParse(value);
// TODO: Another silent failure?
if (typeof value !== 'number') {
return mom;
}
}
}
dayOfMonth = Math.min(mom.date(), daysInMonth(mom.year(), value));
mom._d['set' + (mom._isUTC ? 'UTC' : '') + 'Month'](value, dayOfMonth);
return mom;
}
function getSetMonth (value) {
if (value != null) {
setMonth(this, value);
utils_hooks__hooks.updateOffset(this, true);
return this;
} else {
return get_set__get(this, 'Month');
}
}
function getDaysInMonth () {
return daysInMonth(this.year(), this.month());
}
var defaultMonthsShortRegex = matchWord;
function monthsShortRegex (isStrict) {
if (this._monthsParseExact) {
if (!hasOwnProp(this, '_monthsRegex')) {
computeMonthsParse.call(this);
}
if (isStrict) {
return this._monthsShortStrictRegex;
} else {
return this._monthsShortRegex;
}
} else {
if (!hasOwnProp(this, '_monthsShortRegex')) {
this._monthsShortRegex = defaultMonthsShortRegex;
}
return this._monthsShortStrictRegex && isStrict ?
this._monthsShortStrictRegex : this._monthsShortRegex;
}
}
var defaultMonthsRegex = matchWord;
function monthsRegex (isStrict) {
if (this._monthsParseExact) {
if (!hasOwnProp(this, '_monthsRegex')) {
computeMonthsParse.call(this);
}
if (isStrict) {
return this._monthsStrictRegex;
} else {
return this._monthsRegex;
}
} else {
if (!hasOwnProp(this, '_monthsRegex')) {
this._monthsRegex = defaultMonthsRegex;
}
return this._monthsStrictRegex && isStrict ?
this._monthsStrictRegex : this._monthsRegex;
}
}
function computeMonthsParse () {
function cmpLenRev(a, b) {
return b.length - a.length;
}
var shortPieces = [], longPieces = [], mixedPieces = [],
i, mom;
for (i = 0; i < 12; i++) {
// make the regex if we don't have it already
mom = create_utc__createUTC([2000, i]);
shortPieces.push(this.monthsShort(mom, ''));
longPieces.push(this.months(mom, ''));
mixedPieces.push(this.months(mom, ''));
mixedPieces.push(this.monthsShort(mom, ''));
}
// Sorting makes sure if one month (or abbr) is a prefix of another it
// will match the longer piece.
shortPieces.sort(cmpLenRev);
longPieces.sort(cmpLenRev);
mixedPieces.sort(cmpLenRev);
for (i = 0; i < 12; i++) {
shortPieces[i] = regexEscape(shortPieces[i]);
longPieces[i] = regexEscape(longPieces[i]);
}
for (i = 0; i < 24; i++) {
mixedPieces[i] = regexEscape(mixedPieces[i]);
}
this._monthsRegex = new RegExp('^(' + mixedPieces.join('|') + ')', 'i');
this._monthsShortRegex = this._monthsRegex;
this._monthsStrictRegex = new RegExp('^(' + longPieces.join('|') + ')', 'i');
this._monthsShortStrictRegex = new RegExp('^(' + shortPieces.join('|') + ')', 'i');
}
// FORMATTING
addFormatToken('Y', 0, 0, function () {
var y = this.year();
return y <= 9999 ? '' + y : '+' + y;
});
addFormatToken(0, ['YY', 2], 0, function () {
return this.year() % 100;
});
addFormatToken(0, ['YYYY', 4], 0, 'year');
addFormatToken(0, ['YYYYY', 5], 0, 'year');
addFormatToken(0, ['YYYYYY', 6, true], 0, 'year');
// ALIASES
addUnitAlias('year', 'y');
// PRIORITIES
addUnitPriority('year', 1);
// PARSING
addRegexToken('Y', matchSigned);
addRegexToken('YY', match1to2, match2);
addRegexToken('YYYY', match1to4, match4);
addRegexToken('YYYYY', match1to6, match6);
addRegexToken('YYYYYY', match1to6, match6);
addParseToken(['YYYYY', 'YYYYYY'], YEAR);
addParseToken('YYYY', function (input, array) {
array[YEAR] = input.length === 2 ? utils_hooks__hooks.parseTwoDigitYear(input) : toInt(input);
});
addParseToken('YY', function (input, array) {
array[YEAR] = utils_hooks__hooks.parseTwoDigitYear(input);
});
addParseToken('Y', function (input, array) {
array[YEAR] = parseInt(input, 10);
});
// HELPERS
function daysInYear(year) {
return isLeapYear(year) ? 366 : 365;
}
function isLeapYear(year) {
return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0;
}
// HOOKS
utils_hooks__hooks.parseTwoDigitYear = function (input) {
return toInt(input) + (toInt(input) > 68 ? 1900 : 2000);
};
// MOMENTS
var getSetYear = makeGetSet('FullYear', true);
function getIsLeapYear () {
return isLeapYear(this.year());
}
function createDate (y, m, d, h, M, s, ms) {
//can't just apply() to create a date:
//http://stackoverflow.com/questions/181348/instantiating-a-javascript-object-by-calling-prototype-constructor-apply
var date = new Date(y, m, d, h, M, s, ms);
//the date constructor remaps years 0-99 to 1900-1999
if (y < 100 && y >= 0 && isFinite(date.getFullYear())) {
date.setFullYear(y);
}
return date;
}
function createUTCDate (y) {
var date = new Date(Date.UTC.apply(null, arguments));
//the Date.UTC function remaps years 0-99 to 1900-1999
if (y < 100 && y >= 0 && isFinite(date.getUTCFullYear())) {
date.setUTCFullYear(y);
}
return date;
}
// start-of-first-week - start-of-year
function firstWeekOffset(year, dow, doy) {
var // first-week day -- which january is always in the first week (4 for iso, 1 for other)
fwd = 7 + dow - doy,
// first-week day local weekday -- which local weekday is fwd
fwdlw = (7 + createUTCDate(year, 0, fwd).getUTCDay() - dow) % 7;
return -fwdlw + fwd - 1;
}
//http://en.wikipedia.org/wiki/ISO_week_date#Calculating_a_date_given_the_year.2C_week_number_and_weekday
function dayOfYearFromWeeks(year, week, weekday, dow, doy) {
var localWeekday = (7 + weekday - dow) % 7,
weekOffset = firstWeekOffset(year, dow, doy),
dayOfYear = 1 + 7 * (week - 1) + localWeekday + weekOffset,
resYear, resDayOfYear;
if (dayOfYear <= 0) {
resYear = year - 1;
resDayOfYear = daysInYear(resYear) + dayOfYear;
} else if (dayOfYear > daysInYear(year)) {
resYear = year + 1;
resDayOfYear = dayOfYear - daysInYear(year);
} else {
resYear = year;
resDayOfYear = dayOfYear;
}
return {
year: resYear,
dayOfYear: resDayOfYear
};
}
function weekOfYear(mom, dow, doy) {
var weekOffset = firstWeekOffset(mom.year(), dow, doy),
week = Math.floor((mom.dayOfYear() - weekOffset - 1) / 7) + 1,
resWeek, resYear;
if (week < 1) {
resYear = mom.year() - 1;
resWeek = week + weeksInYear(resYear, dow, doy);
} else if (week > weeksInYear(mom.year(), dow, doy)) {
resWeek = week - weeksInYear(mom.year(), dow, doy);
resYear = mom.year() + 1;
} else {
resYear = mom.year();
resWeek = week;
}
return {
week: resWeek,
year: resYear
};
}
function weeksInYear(year, dow, doy) {
var weekOffset = firstWeekOffset(year, dow, doy),
weekOffsetNext = firstWeekOffset(year + 1, dow, doy);
return (daysInYear(year) - weekOffset + weekOffsetNext) / 7;
}
// FORMATTING
addFormatToken('w', ['ww', 2], 'wo', 'week');
addFormatToken('W', ['WW', 2], 'Wo', 'isoWeek');
// ALIASES
addUnitAlias('week', 'w');
addUnitAlias('isoWeek', 'W');
// PRIORITIES
addUnitPriority('week', 5);
addUnitPriority('isoWeek', 5);
// PARSING
addRegexToken('w', match1to2);
addRegexToken('ww', match1to2, match2);
addRegexToken('W', match1to2);
addRegexToken('WW', match1to2, match2);
addWeekParseToken(['w', 'ww', 'W', 'WW'], function (input, week, config, token) {
week[token.substr(0, 1)] = toInt(input);
});
// HELPERS
// LOCALES
function localeWeek (mom) {
return weekOfYear(mom, this._week.dow, this._week.doy).week;
}
var defaultLocaleWeek = {
dow : 0, // Sunday is the first day of the week.
doy : 6 // The week that contains Jan 1st is the first week of the year.
};
function localeFirstDayOfWeek () {
return this._week.dow;
}
function localeFirstDayOfYear () {
return this._week.doy;
}
// MOMENTS
function getSetWeek (input) {
var week = this.localeData().week(this);
return input == null ? week : this.add((input - week) * 7, 'd');
}
function getSetISOWeek (input) {
var week = weekOfYear(this, 1, 4).week;
return input == null ? week : this.add((input - week) * 7, 'd');
}
// FORMATTING
addFormatToken('d', 0, 'do', 'day');
addFormatToken('dd', 0, 0, function (format) {
return this.localeData().weekdaysMin(this, format);
});
addFormatToken('ddd', 0, 0, function (format) {
return this.localeData().weekdaysShort(this, format);
});
addFormatToken('dddd', 0, 0, function (format) {
return this.localeData().weekdays(this, format);
});
addFormatToken('e', 0, 0, 'weekday');
addFormatToken('E', 0, 0, 'isoWeekday');
// ALIASES
addUnitAlias('day', 'd');
addUnitAlias('weekday', 'e');
addUnitAlias('isoWeekday', 'E');
// PRIORITY
addUnitPriority('day', 11);
addUnitPriority('weekday', 11);
addUnitPriority('isoWeekday', 11);
// PARSING
addRegexToken('d', match1to2);
addRegexToken('e', match1to2);
addRegexToken('E', match1to2);
addRegexToken('dd', function (isStrict, locale) {
return locale.weekdaysMinRegex(isStrict);
});
addRegexToken('ddd', function (isStrict, locale) {
return locale.weekdaysShortRegex(isStrict);
});
addRegexToken('dddd', function (isStrict, locale) {
return locale.weekdaysRegex(isStrict);
});
addWeekParseToken(['dd', 'ddd', 'dddd'], function (input, week, config, token) {
var weekday = config._locale.weekdaysParse(input, token, config._strict);
// if we didn't get a weekday name, mark the date as invalid
if (weekday != null) {
week.d = weekday;
} else {
getParsingFlags(config).invalidWeekday = input;
}
});
addWeekParseToken(['d', 'e', 'E'], function (input, week, config, token) {
week[token] = toInt(input);
});
// HELPERS
function parseWeekday(input, locale) {
if (typeof input !== 'string') {
return input;
}
if (!isNaN(input)) {
return parseInt(input, 10);
}
input = locale.weekdaysParse(input);
if (typeof input === 'number') {
return input;
}
return null;
}
function parseIsoWeekday(input, locale) {
if (typeof input === 'string') {
return locale.weekdaysParse(input) % 7 || 7;
}
return isNaN(input) ? null : input;
}
// LOCALES
var defaultLocaleWeekdays = 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_');
function localeWeekdays (m, format) {
if (!m) {
return this._weekdays;
}
return isArray(this._weekdays) ? this._weekdays[m.day()] :
this._weekdays[this._weekdays.isFormat.test(format) ? 'format' : 'standalone'][m.day()];
}
var defaultLocaleWeekdaysShort = 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_');
function localeWeekdaysShort (m) {
return (m) ? this._weekdaysShort[m.day()] : this._weekdaysShort;
}
var defaultLocaleWeekdaysMin = 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_');
function localeWeekdaysMin (m) {
return (m) ? this._weekdaysMin[m.day()] : this._weekdaysMin;
}
function day_of_week__handleStrictParse(weekdayName, format, strict) {
var i, ii, mom, llc = weekdayName.toLocaleLowerCase();
if (!this._weekdaysParse) {
this._weekdaysParse = [];
this._shortWeekdaysParse = [];
this._minWeekdaysParse = [];
for (i = 0; i < 7; ++i) {
mom = create_utc__createUTC([2000, 1]).day(i);
this._minWeekdaysParse[i] = this.weekdaysMin(mom, '').toLocaleLowerCase();
this._shortWeekdaysParse[i] = this.weekdaysShort(mom, '').toLocaleLowerCase();
this._weekdaysParse[i] = this.weekdays(mom, '').toLocaleLowerCase();
}
}
if (strict) {
if (format === 'dddd') {
ii = indexOf.call(this._weekdaysParse, llc);
return ii !== -1 ? ii : null;
} else if (format === 'ddd') {
ii = indexOf.call(this._shortWeekdaysParse, llc);
return ii !== -1 ? ii : null;
} else {
ii = indexOf.call(this._minWeekdaysParse, llc);
return ii !== -1 ? ii : null;
}
} else {
if (format === 'dddd') {
ii = indexOf.call(this._weekdaysParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._shortWeekdaysParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._minWeekdaysParse, llc);
return ii !== -1 ? ii : null;
} else if (format === 'ddd') {
ii = indexOf.call(this._shortWeekdaysParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._weekdaysParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._minWeekdaysParse, llc);
return ii !== -1 ? ii : null;
} else {
ii = indexOf.call(this._minWeekdaysParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._weekdaysParse, llc);
if (ii !== -1) {
return ii;
}
ii = indexOf.call(this._shortWeekdaysParse, llc);
return ii !== -1 ? ii : null;
}
}
}
function localeWeekdaysParse (weekdayName, format, strict) {
var i, mom, regex;
if (this._weekdaysParseExact) {
return day_of_week__handleStrictParse.call(this, weekdayName, format, strict);
}
if (!this._weekdaysParse) {
this._weekdaysParse = [];
this._minWeekdaysParse = [];
this._shortWeekdaysParse = [];
this._fullWeekdaysParse = [];
}
for (i = 0; i < 7; i++) {
// make the regex if we don't have it already
mom = create_utc__createUTC([2000, 1]).day(i);
if (strict && !this._fullWeekdaysParse[i]) {
this._fullWeekdaysParse[i] = new RegExp('^' + this.weekdays(mom, '').replace('.', '\.?') + '$', 'i');
this._shortWeekdaysParse[i] = new RegExp('^' + this.weekdaysShort(mom, '').replace('.', '\.?') + '$', 'i');
this._minWeekdaysParse[i] = new RegExp('^' + this.weekdaysMin(mom, '').replace('.', '\.?') + '$', 'i');
}
if (!this._weekdaysParse[i]) {
regex = '^' + this.weekdays(mom, '') + '|^' + this.weekdaysShort(mom, '') + '|^' + this.weekdaysMin(mom, '');
this._weekdaysParse[i] = new RegExp(regex.replace('.', ''), 'i');
}
// test the regex
if (strict && format === 'dddd' && this._fullWeekdaysParse[i].test(weekdayName)) {
return i;
} else if (strict && format === 'ddd' && this._shortWeekdaysParse[i].test(weekdayName)) {
return i;
} else if (strict && format === 'dd' && this._minWeekdaysParse[i].test(weekdayName)) {
return i;
} else if (!strict && this._weekdaysParse[i].test(weekdayName)) {
return i;
}
}
}
// MOMENTS
function getSetDayOfWeek (input) {
if (!this.isValid()) {
return input != null ? this : NaN;
}
var day = this._isUTC ? this._d.getUTCDay() : this._d.getDay();
if (input != null) {
input = parseWeekday(input, this.localeData());
return this.add(input - day, 'd');
} else {
return day;
}
}
function getSetLocaleDayOfWeek (input) {
if (!this.isValid()) {
return input != null ? this : NaN;
}
var weekday = (this.day() + 7 - this.localeData()._week.dow) % 7;
return input == null ? weekday : this.add(input - weekday, 'd');
}
function getSetISODayOfWeek (input) {
if (!this.isValid()) {
return input != null ? this : NaN;
}
// behaves the same as moment#day except
// as a getter, returns 7 instead of 0 (1-7 range instead of 0-6)
// as a setter, sunday should belong to the previous week.
if (input != null) {
var weekday = parseIsoWeekday(input, this.localeData());
return this.day(this.day() % 7 ? weekday : weekday - 7);
} else {
return this.day() || 7;
}
}
var defaultWeekdaysRegex = matchWord;
function weekdaysRegex (isStrict) {
if (this._weekdaysParseExact) {
if (!hasOwnProp(this, '_weekdaysRegex')) {
computeWeekdaysParse.call(this);
}
if (isStrict) {
return this._weekdaysStrictRegex;
} else {
return this._weekdaysRegex;
}
} else {
if (!hasOwnProp(this, '_weekdaysRegex')) {
this._weekdaysRegex = defaultWeekdaysRegex;
}
return this._weekdaysStrictRegex && isStrict ?
this._weekdaysStrictRegex : this._weekdaysRegex;
}
}
var defaultWeekdaysShortRegex = matchWord;
function weekdaysShortRegex (isStrict) {
if (this._weekdaysParseExact) {
if (!hasOwnProp(this, '_weekdaysRegex')) {
computeWeekdaysParse.call(this);
}
if (isStrict) {
return this._weekdaysShortStrictRegex;
} else {
return this._weekdaysShortRegex;
}
} else {
if (!hasOwnProp(this, '_weekdaysShortRegex')) {
this._weekdaysShortRegex = defaultWeekdaysShortRegex;
}
return this._weekdaysShortStrictRegex && isStrict ?
this._weekdaysShortStrictRegex : this._weekdaysShortRegex;
}
}
var defaultWeekdaysMinRegex = matchWord;
function weekdaysMinRegex (isStrict) {
if (this._weekdaysParseExact) {
if (!hasOwnProp(this, '_weekdaysRegex')) {
computeWeekdaysParse.call(this);
}
if (isStrict) {
return this._weekdaysMinStrictRegex;
} else {
return this._weekdaysMinRegex;
}
} else {
if (!hasOwnProp(this, '_weekdaysMinRegex')) {
this._weekdaysMinRegex = defaultWeekdaysMinRegex;
}
return this._weekdaysMinStrictRegex && isStrict ?
this._weekdaysMinStrictRegex : this._weekdaysMinRegex;
}
}
function computeWeekdaysParse () {
function cmpLenRev(a, b) {
return b.length - a.length;
}
var minPieces = [], shortPieces = [], longPieces = [], mixedPieces = [],
i, mom, minp, shortp, longp;
for (i = 0; i < 7; i++) {
// make the regex if we don't have it already
mom = create_utc__createUTC([2000, 1]).day(i);
minp = this.weekdaysMin(mom, '');
shortp = this.weekdaysShort(mom, '');
longp = this.weekdays(mom, '');
minPieces.push(minp);
shortPieces.push(shortp);
longPieces.push(longp);
mixedPieces.push(minp);
mixedPieces.push(shortp);
mixedPieces.push(longp);
}
// Sorting makes sure if one weekday (or abbr) is a prefix of another it
// will match the longer piece.
minPieces.sort(cmpLenRev);
shortPieces.sort(cmpLenRev);
longPieces.sort(cmpLenRev);
mixedPieces.sort(cmpLenRev);
for (i = 0; i < 7; i++) {
shortPieces[i] = regexEscape(shortPieces[i]);
longPieces[i] = regexEscape(longPieces[i]);
mixedPieces[i] = regexEscape(mixedPieces[i]);
}
this._weekdaysRegex = new RegExp('^(' + mixedPieces.join('|') + ')', 'i');
this._weekdaysShortRegex = this._weekdaysRegex;
this._weekdaysMinRegex = this._weekdaysRegex;
this._weekdaysStrictRegex = new RegExp('^(' + longPieces.join('|') + ')', 'i');
this._weekdaysShortStrictRegex = new RegExp('^(' + shortPieces.join('|') + ')', 'i');
this._weekdaysMinStrictRegex = new RegExp('^(' + minPieces.join('|') + ')', 'i');
}
// FORMATTING
function hFormat() {
return this.hours() % 12 || 12;
}
function kFormat() {
return this.hours() || 24;
}
addFormatToken('H', ['HH', 2], 0, 'hour');
addFormatToken('h', ['hh', 2], 0, hFormat);
addFormatToken('k', ['kk', 2], 0, kFormat);
addFormatToken('hmm', 0, 0, function () {
return '' + hFormat.apply(this) + zeroFill(this.minutes(), 2);
});
addFormatToken('hmmss', 0, 0, function () {
return '' + hFormat.apply(this) + zeroFill(this.minutes(), 2) +
zeroFill(this.seconds(), 2);
});
addFormatToken('Hmm', 0, 0, function () {
return '' + this.hours() + zeroFill(this.minutes(), 2);
});
addFormatToken('Hmmss', 0, 0, function () {
return '' + this.hours() + zeroFill(this.minutes(), 2) +
zeroFill(this.seconds(), 2);
});
function meridiem (token, lowercase) {
addFormatToken(token, 0, 0, function () {
return this.localeData().meridiem(this.hours(), this.minutes(), lowercase);
});
}
meridiem('a', true);
meridiem('A', false);
// ALIASES
addUnitAlias('hour', 'h');
// PRIORITY
addUnitPriority('hour', 13);
// PARSING
function matchMeridiem (isStrict, locale) {
return locale._meridiemParse;
}
addRegexToken('a', matchMeridiem);
addRegexToken('A', matchMeridiem);
addRegexToken('H', match1to2);
addRegexToken('h', match1to2);
addRegexToken('HH', match1to2, match2);
addRegexToken('hh', match1to2, match2);
addRegexToken('hmm', match3to4);
addRegexToken('hmmss', match5to6);
addRegexToken('Hmm', match3to4);
addRegexToken('Hmmss', match5to6);
addParseToken(['H', 'HH'], HOUR);
addParseToken(['a', 'A'], function (input, array, config) {
config._isPm = config._locale.isPM(input);
config._meridiem = input;
});
addParseToken(['h', 'hh'], function (input, array, config) {
array[HOUR] = toInt(input);
getParsingFlags(config).bigHour = true;
});
addParseToken('hmm', function (input, array, config) {
var pos = input.length - 2;
array[HOUR] = toInt(input.substr(0, pos));
array[MINUTE] = toInt(input.substr(pos));
getParsingFlags(config).bigHour = true;
});
addParseToken('hmmss', function (input, array, config) {
var pos1 = input.length - 4;
var pos2 = input.length - 2;
array[HOUR] = toInt(input.substr(0, pos1));
array[MINUTE] = toInt(input.substr(pos1, 2));
array[SECOND] = toInt(input.substr(pos2));
getParsingFlags(config).bigHour = true;
});
addParseToken('Hmm', function (input, array, config) {
var pos = input.length - 2;
array[HOUR] = toInt(input.substr(0, pos));
array[MINUTE] = toInt(input.substr(pos));
});
addParseToken('Hmmss', function (input, array, config) {
var pos1 = input.length - 4;
var pos2 = input.length - 2;
array[HOUR] = toInt(input.substr(0, pos1));
array[MINUTE] = toInt(input.substr(pos1, 2));
array[SECOND] = toInt(input.substr(pos2));
});
// LOCALES
function localeIsPM (input) {
// IE8 Quirks Mode & IE7 Standards Mode do not allow accessing strings like arrays
// Using charAt should be more compatible.
return ((input + '').toLowerCase().charAt(0) === 'p');
}
var defaultLocaleMeridiemParse = /[ap]\.?m?\.?/i;
function localeMeridiem (hours, minutes, isLower) {
if (hours > 11) {
return isLower ? 'pm' : 'PM';
} else {
return isLower ? 'am' : 'AM';
}
}
// MOMENTS
// Setting the hour should keep the time, because the user explicitly
// specified which hour he wants. So trying to maintain the same hour (in
// a new timezone) makes sense. Adding/subtracting hours does not follow
// this rule.
var getSetHour = makeGetSet('Hours', true);
var baseConfig = {
calendar: defaultCalendar,
longDateFormat: defaultLongDateFormat,
invalidDate: defaultInvalidDate,
ordinal: defaultOrdinal,
ordinalParse: defaultOrdinalParse,
relativeTime: defaultRelativeTime,
months: defaultLocaleMonths,
monthsShort: defaultLocaleMonthsShort,
week: defaultLocaleWeek,
weekdays: defaultLocaleWeekdays,
weekdaysMin: defaultLocaleWeekdaysMin,
weekdaysShort: defaultLocaleWeekdaysShort,
meridiemParse: defaultLocaleMeridiemParse
};
// internal storage for locale config files
var locales = {};
var globalLocale;
function normalizeLocale(key) {
return key ? key.toLowerCase().replace('_', '-') : key;
}
// pick the locale from the array
// try ['en-au', 'en-gb'] as 'en-au', 'en-gb', 'en', as in move through the list trying each
// substring from most specific to least, but move to the next array item if it's a more specific variant than the current root
function chooseLocale(names) {
var i = 0, j, next, locale, split;
while (i < names.length) {
split = normalizeLocale(names[i]).split('-');
j = split.length;
next = normalizeLocale(names[i + 1]);
next = next ? next.split('-') : null;
while (j > 0) {
locale = loadLocale(split.slice(0, j).join('-'));
if (locale) {
return locale;
}
if (next && next.length >= j && compareArrays(split, next, true) >= j - 1) {
//the next array item is better than a shallower substring of this one
break;
}
j--;
}
i++;
}
return null;
}
function loadLocale(name) {
var oldLocale = null;
// TODO: Find a better way to register and load all the locales in Node
if (!locales[name] && (typeof module !== 'undefined') &&
module && module.exports) {
try {
oldLocale = globalLocale._abbr;
require('./locale/' + name);
// because defineLocale currently also sets the global locale, we
// want to undo that for lazy loaded locales
locale_locales__getSetGlobalLocale(oldLocale);
} catch (e) { }
}
return locales[name];
}
// This function will load locale and then set the global locale. If
// no arguments are passed in, it will simply return the current global
// locale key.
function locale_locales__getSetGlobalLocale (key, values) {
var data;
if (key) {
if (isUndefined(values)) {
data = locale_locales__getLocale(key);
}
else {
data = defineLocale(key, values);
}
if (data) {
// moment.duration._locale = moment._locale = data;
globalLocale = data;
}
}
return globalLocale._abbr;
}
function defineLocale (name, config) {
if (config !== null) {
var parentConfig = baseConfig;
config.abbr = name;
if (locales[name] != null) {
deprecateSimple('defineLocaleOverride',
'use moment.updateLocale(localeName, config) to change ' +
'an existing locale. moment.defineLocale(localeName, ' +
'config) should only be used for creating a new locale ' +
'See http://momentjs.com/guides/#/warnings/define-locale/ for more info.');
parentConfig = locales[name]._config;
} else if (config.parentLocale != null) {
if (locales[config.parentLocale] != null) {
parentConfig = locales[config.parentLocale]._config;
} else {
// treat as if there is no base config
deprecateSimple('parentLocaleUndefined',
'specified parentLocale is not defined yet. See http://momentjs.com/guides/#/warnings/parent-locale/');
}
}
locales[name] = new Locale(mergeConfigs(parentConfig, config));
// backwards compat for now: also set the locale
locale_locales__getSetGlobalLocale(name);
return locales[name];
} else {
// useful for testing
delete locales[name];
return null;
}
}
function updateLocale(name, config) {
if (config != null) {
var locale, parentConfig = baseConfig;
// MERGE
if (locales[name] != null) {
parentConfig = locales[name]._config;
}
config = mergeConfigs(parentConfig, config);
locale = new Locale(config);
locale.parentLocale = locales[name];
locales[name] = locale;
// backwards compat for now: also set the locale
locale_locales__getSetGlobalLocale(name);
} else {
// pass null for config to unupdate, useful for tests
if (locales[name] != null) {
if (locales[name].parentLocale != null) {
locales[name] = locales[name].parentLocale;
} else if (locales[name] != null) {
delete locales[name];
}
}
}
return locales[name];
}
// returns locale data
function locale_locales__getLocale (key) {
var locale;
if (key && key._locale && key._locale._abbr) {
key = key._locale._abbr;
}
if (!key) {
return globalLocale;
}
if (!isArray(key)) {
//short-circuit everything else
locale = loadLocale(key);
if (locale) {
return locale;
}
key = [key];
}
return chooseLocale(key);
}
function locale_locales__listLocales() {
return keys(locales);
}
function checkOverflow (m) {
var overflow;
var a = m._a;
if (a && getParsingFlags(m).overflow === -2) {
overflow =
a[MONTH] < 0 || a[MONTH] > 11 ? MONTH :
a[DATE] < 1 || a[DATE] > daysInMonth(a[YEAR], a[MONTH]) ? DATE :
a[HOUR] < 0 || a[HOUR] > 24 || (a[HOUR] === 24 && (a[MINUTE] !== 0 || a[SECOND] !== 0 || a[MILLISECOND] !== 0)) ? HOUR :
a[MINUTE] < 0 || a[MINUTE] > 59 ? MINUTE :
a[SECOND] < 0 || a[SECOND] > 59 ? SECOND :
a[MILLISECOND] < 0 || a[MILLISECOND] > 999 ? MILLISECOND :
-1;
if (getParsingFlags(m)._overflowDayOfYear && (overflow < YEAR || overflow > DATE)) {
overflow = DATE;
}
if (getParsingFlags(m)._overflowWeeks && overflow === -1) {
overflow = WEEK;
}
if (getParsingFlags(m)._overflowWeekday && overflow === -1) {
overflow = WEEKDAY;
}
getParsingFlags(m).overflow = overflow;
}
return m;
}
// iso 8601 regex
// 0000-00-00 0000-W00 or 0000-W00-0 + T + 00 or 00:00 or 00:00:00 or 00:00:00.000 + +00:00 or +0000 or +00)
var extendedIsoRegex = /^\s*((?:[+-]\d{6}|\d{4})-(?:\d\d-\d\d|W\d\d-\d|W\d\d|\d\d\d|\d\d))(?:(T| )(\d\d(?::\d\d(?::\d\d(?:[.,]\d+)?)?)?)([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?/;
var basicIsoRegex = /^\s*((?:[+-]\d{6}|\d{4})(?:\d\d\d\d|W\d\d\d|W\d\d|\d\d\d|\d\d))(?:(T| )(\d\d(?:\d\d(?:\d\d(?:[.,]\d+)?)?)?)([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?/;
var tzRegex = /Z|[+-]\d\d(?::?\d\d)?/;
var isoDates = [
['YYYYYY-MM-DD', /[+-]\d{6}-\d\d-\d\d/],
['YYYY-MM-DD', /\d{4}-\d\d-\d\d/],
['GGGG-[W]WW-E', /\d{4}-W\d\d-\d/],
['GGGG-[W]WW', /\d{4}-W\d\d/, false],
['YYYY-DDD', /\d{4}-\d{3}/],
['YYYY-MM', /\d{4}-\d\d/, false],
['YYYYYYMMDD', /[+-]\d{10}/],
['YYYYMMDD', /\d{8}/],
// YYYYMM is NOT allowed by the standard
['GGGG[W]WWE', /\d{4}W\d{3}/],
['GGGG[W]WW', /\d{4}W\d{2}/, false],
['YYYYDDD', /\d{7}/]
];
// iso time formats and regexes
var isoTimes = [
['HH:mm:ss.SSSS', /\d\d:\d\d:\d\d\.\d+/],
['HH:mm:ss,SSSS', /\d\d:\d\d:\d\d,\d+/],
['HH:mm:ss', /\d\d:\d\d:\d\d/],
['HH:mm', /\d\d:\d\d/],
['HHmmss.SSSS', /\d\d\d\d\d\d\.\d+/],
['HHmmss,SSSS', /\d\d\d\d\d\d,\d+/],
['HHmmss', /\d\d\d\d\d\d/],
['HHmm', /\d\d\d\d/],
['HH', /\d\d/]
];
var aspNetJsonRegex = /^\/?Date\((\-?\d+)/i;
// date from iso format
function configFromISO(config) {
var i, l,
string = config._i,
match = extendedIsoRegex.exec(string) || basicIsoRegex.exec(string),
allowTime, dateFormat, timeFormat, tzFormat;
if (match) {
getParsingFlags(config).iso = true;
for (i = 0, l = isoDates.length; i < l; i++) {
if (isoDates[i][1].exec(match[1])) {
dateFormat = isoDates[i][0];
allowTime = isoDates[i][2] !== false;
break;
}
}
if (dateFormat == null) {
config._isValid = false;
return;
}
if (match[3]) {
for (i = 0, l = isoTimes.length; i < l; i++) {
if (isoTimes[i][1].exec(match[3])) {
// match[2] should be 'T' or space
timeFormat = (match[2] || ' ') + isoTimes[i][0];
break;
}
}
if (timeFormat == null) {
config._isValid = false;
return;
}
}
if (!allowTime && timeFormat != null) {
config._isValid = false;
return;
}
if (match[4]) {
if (tzRegex.exec(match[4])) {
tzFormat = 'Z';
} else {
config._isValid = false;
return;
}
}
config._f = dateFormat + (timeFormat || '') + (tzFormat || '');
configFromStringAndFormat(config);
} else {
config._isValid = false;
}
}
// date from iso format or fallback
function configFromString(config) {
var matched = aspNetJsonRegex.exec(config._i);
if (matched !== null) {
config._d = new Date(+matched[1]);
return;
}
configFromISO(config);
if (config._isValid === false) {
delete config._isValid;
utils_hooks__hooks.createFromInputFallback(config);
}
}
utils_hooks__hooks.createFromInputFallback = deprecate(
'value provided is not in a recognized ISO format. moment construction falls back to js Date(), ' +
'which is not reliable across all browsers and versions. Non ISO date formats are ' +
'discouraged and will be removed in an upcoming major release. Please refer to ' +
'http://momentjs.com/guides/#/warnings/js-date/ for more info.',
function (config) {
config._d = new Date(config._i + (config._useUTC ? ' UTC' : ''));
}
);
// Pick the first defined of two or three arguments.
function defaults(a, b, c) {
if (a != null) {
return a;
}
if (b != null) {
return b;
}
return c;
}
function currentDateArray(config) {
// hooks is actually the exported moment object
var nowValue = new Date(utils_hooks__hooks.now());
if (config._useUTC) {
return [nowValue.getUTCFullYear(), nowValue.getUTCMonth(), nowValue.getUTCDate()];
}
return [nowValue.getFullYear(), nowValue.getMonth(), nowValue.getDate()];
}
// convert an array to a date.
// the array should mirror the parameters below
// note: all values past the year are optional and will default to the lowest possible value.
// [year, month, day , hour, minute, second, millisecond]
function configFromArray (config) {
var i, date, input = [], currentDate, yearToUse;
if (config._d) {
return;
}
currentDate = currentDateArray(config);
//compute day of the year from weeks and weekdays
if (config._w && config._a[DATE] == null && config._a[MONTH] == null) {
dayOfYearFromWeekInfo(config);
}
//if the day of the year is set, figure out what it is
if (config._dayOfYear) {
yearToUse = defaults(config._a[YEAR], currentDate[YEAR]);
if (config._dayOfYear > daysInYear(yearToUse)) {
getParsingFlags(config)._overflowDayOfYear = true;
}
date = createUTCDate(yearToUse, 0, config._dayOfYear);
config._a[MONTH] = date.getUTCMonth();
config._a[DATE] = date.getUTCDate();
}
// Default to current date.
// * if no year, month, day of month are given, default to today
// * if day of month is given, default month and year
// * if month is given, default only year
// * if year is given, don't default anything
for (i = 0; i < 3 && config._a[i] == null; ++i) {
config._a[i] = input[i] = currentDate[i];
}
// Zero out whatever was not defaulted, including time
for (; i < 7; i++) {
config._a[i] = input[i] = (config._a[i] == null) ? (i === 2 ? 1 : 0) : config._a[i];
}
// Check for 24:00:00.000
if (config._a[HOUR] === 24 &&
config._a[MINUTE] === 0 &&
config._a[SECOND] === 0 &&
config._a[MILLISECOND] === 0) {
config._nextDay = true;
config._a[HOUR] = 0;
}
config._d = (config._useUTC ? createUTCDate : createDate).apply(null, input);
// Apply timezone offset from input. The actual utcOffset can be changed
// with parseZone.
if (config._tzm != null) {
config._d.setUTCMinutes(config._d.getUTCMinutes() - config._tzm);
}
if (config._nextDay) {
config._a[HOUR] = 24;
}
}
function dayOfYearFromWeekInfo(config) {
var w, weekYear, week, weekday, dow, doy, temp, weekdayOverflow;
w = config._w;
if (w.GG != null || w.W != null || w.E != null) {
dow = 1;
doy = 4;
// TODO: We need to take the current isoWeekYear, but that depends on
// how we interpret now (local, utc, fixed offset). So create
// a now version of current config (take local/utc/offset flags, and
// create now).
weekYear = defaults(w.GG, config._a[YEAR], weekOfYear(local__createLocal(), 1, 4).year);
week = defaults(w.W, 1);
weekday = defaults(w.E, 1);
if (weekday < 1 || weekday > 7) {
weekdayOverflow = true;
}
} else {
dow = config._locale._week.dow;
doy = config._locale._week.doy;
weekYear = defaults(w.gg, config._a[YEAR], weekOfYear(local__createLocal(), dow, doy).year);
week = defaults(w.w, 1);
if (w.d != null) {
// weekday -- low day numbers are considered next week
weekday = w.d;
if (weekday < 0 || weekday > 6) {
weekdayOverflow = true;
}
} else if (w.e != null) {
// local weekday -- counting starts from begining of week
weekday = w.e + dow;
if (w.e < 0 || w.e > 6) {
weekdayOverflow = true;
}
} else {
// default to begining of week
weekday = dow;
}
}
if (week < 1 || week > weeksInYear(weekYear, dow, doy)) {
getParsingFlags(config)._overflowWeeks = true;
} else if (weekdayOverflow != null) {
getParsingFlags(config)._overflowWeekday = true;
} else {
temp = dayOfYearFromWeeks(weekYear, week, weekday, dow, doy);
config._a[YEAR] = temp.year;
config._dayOfYear = temp.dayOfYear;
}
}
// constant that refers to the ISO standard
utils_hooks__hooks.ISO_8601 = function () {};
// date from string and format string
function configFromStringAndFormat(config) {
// TODO: Move this to another part of the creation flow to prevent circular deps
if (config._f === utils_hooks__hooks.ISO_8601) {
configFromISO(config);
return;
}
config._a = [];
getParsingFlags(config).empty = true;
// This array is used to make a Date, either with `new Date` or `Date.UTC`
var string = '' + config._i,
i, parsedInput, tokens, token, skipped,
stringLength = string.length,
totalParsedInputLength = 0;
tokens = expandFormat(config._f, config._locale).match(formattingTokens) || [];
for (i = 0; i < tokens.length; i++) {
token = tokens[i];
parsedInput = (string.match(getParseRegexForToken(token, config)) || [])[0];
// console.log('token', token, 'parsedInput', parsedInput,
// 'regex', getParseRegexForToken(token, config));
if (parsedInput) {
skipped = string.substr(0, string.indexOf(parsedInput));
if (skipped.length > 0) {
getParsingFlags(config).unusedInput.push(skipped);
}
string = string.slice(string.indexOf(parsedInput) + parsedInput.length);
totalParsedInputLength += parsedInput.length;
}
// don't parse if it's not a known token
if (formatTokenFunctions[token]) {
if (parsedInput) {
getParsingFlags(config).empty = false;
}
else {
getParsingFlags(config).unusedTokens.push(token);
}
addTimeToArrayFromToken(token, parsedInput, config);
}
else if (config._strict && !parsedInput) {
getParsingFlags(config).unusedTokens.push(token);
}
}
// add remaining unparsed input length to the string
getParsingFlags(config).charsLeftOver = stringLength - totalParsedInputLength;
if (string.length > 0) {
getParsingFlags(config).unusedInput.push(string);
}
// clear _12h flag if hour is <= 12
if (config._a[HOUR] <= 12 &&
getParsingFlags(config).bigHour === true &&
config._a[HOUR] > 0) {
getParsingFlags(config).bigHour = undefined;
}
getParsingFlags(config).parsedDateParts = config._a.slice(0);
getParsingFlags(config).meridiem = config._meridiem;
// handle meridiem
config._a[HOUR] = meridiemFixWrap(config._locale, config._a[HOUR], config._meridiem);
configFromArray(config);
checkOverflow(config);
}
function meridiemFixWrap (locale, hour, meridiem) {
var isPm;
if (meridiem == null) {
// nothing to do
return hour;
}
if (locale.meridiemHour != null) {
return locale.meridiemHour(hour, meridiem);
} else if (locale.isPM != null) {
// Fallback
isPm = locale.isPM(meridiem);
if (isPm && hour < 12) {
hour += 12;
}
if (!isPm && hour === 12) {
hour = 0;
}
return hour;
} else {
// this is not supposed to happen
return hour;
}
}
// date from string and array of format strings
function configFromStringAndArray(config) {
var tempConfig,
bestMoment,
scoreToBeat,
i,
currentScore;
if (config._f.length === 0) {
getParsingFlags(config).invalidFormat = true;
config._d = new Date(NaN);
return;
}
for (i = 0; i < config._f.length; i++) {
currentScore = 0;
tempConfig = copyConfig({}, config);
if (config._useUTC != null) {
tempConfig._useUTC = config._useUTC;
}
tempConfig._f = config._f[i];
configFromStringAndFormat(tempConfig);
if (!valid__isValid(tempConfig)) {
continue;
}
// if there is any input that was not parsed add a penalty for that format
currentScore += getParsingFlags(tempConfig).charsLeftOver;
//or tokens
currentScore += getParsingFlags(tempConfig).unusedTokens.length * 10;
getParsingFlags(tempConfig).score = currentScore;
if (scoreToBeat == null || currentScore < scoreToBeat) {
scoreToBeat = currentScore;
bestMoment = tempConfig;
}
}
extend(config, bestMoment || tempConfig);
}
function configFromObject(config) {
if (config._d) {
return;
}
var i = normalizeObjectUnits(config._i);
config._a = map([i.year, i.month, i.day || i.date, i.hour, i.minute, i.second, i.millisecond], function (obj) {
return obj && parseInt(obj, 10);
});
configFromArray(config);
}
function createFromConfig (config) {
var res = new Moment(checkOverflow(prepareConfig(config)));
if (res._nextDay) {
// Adding is smart enough around DST
res.add(1, 'd');
res._nextDay = undefined;
}
return res;
}
function prepareConfig (config) {
var input = config._i,
format = config._f;
config._locale = config._locale || locale_locales__getLocale(config._l);
if (input === null || (format === undefined && input === '')) {
return valid__createInvalid({nullInput: true});
}
if (typeof input === 'string') {
config._i = input = config._locale.preparse(input);
}
if (isMoment(input)) {
return new Moment(checkOverflow(input));
} else if (isArray(format)) {
configFromStringAndArray(config);
} else if (isDate(input)) {
config._d = input;
} else if (format) {
configFromStringAndFormat(config);
} else {
configFromInput(config);
}
if (!valid__isValid(config)) {
config._d = null;
}
return config;
}
function configFromInput(config) {
var input = config._i;
if (input === undefined) {
config._d = new Date(utils_hooks__hooks.now());
} else if (isDate(input)) {
config._d = new Date(input.valueOf());
} else if (typeof input === 'string') {
configFromString(config);
} else if (isArray(input)) {
config._a = map(input.slice(0), function (obj) {
return parseInt(obj, 10);
});
configFromArray(config);
} else if (typeof(input) === 'object') {
configFromObject(config);
} else if (typeof(input) === 'number') {
// from milliseconds
config._d = new Date(input);
} else {
utils_hooks__hooks.createFromInputFallback(config);
}
}
function createLocalOrUTC (input, format, locale, strict, isUTC) {
var c = {};
if (typeof(locale) === 'boolean') {
strict = locale;
locale = undefined;
}
if ((isObject(input) && isObjectEmpty(input)) ||
(isArray(input) && input.length === 0)) {
input = undefined;
}
// object construction must be done this way.
// https://github.com/moment/moment/issues/1423
c._isAMomentObject = true;
c._useUTC = c._isUTC = isUTC;
c._l = locale;
c._i = input;
c._f = format;
c._strict = strict;
return createFromConfig(c);
}
function local__createLocal (input, format, locale, strict) {
return createLocalOrUTC(input, format, locale, strict, false);
}
var prototypeMin = deprecate(
'moment().min is deprecated, use moment.max instead. http://momentjs.com/guides/#/warnings/min-max/',
function () {
var other = local__createLocal.apply(null, arguments);
if (this.isValid() && other.isValid()) {
return other < this ? this : other;
} else {
return valid__createInvalid();
}
}
);
var prototypeMax = deprecate(
'moment().max is deprecated, use moment.min instead. http://momentjs.com/guides/#/warnings/min-max/',
function () {
var other = local__createLocal.apply(null, arguments);
if (this.isValid() && other.isValid()) {
return other > this ? this : other;
} else {
return valid__createInvalid();
}
}
);
// Pick a moment m from moments so that m[fn](other) is true for all
// other. This relies on the function fn to be transitive.
//
// moments should either be an array of moment objects or an array, whose
// first element is an array of moment objects.
function pickBy(fn, moments) {
var res, i;
if (moments.length === 1 && isArray(moments[0])) {
moments = moments[0];
}
if (!moments.length) {
return local__createLocal();
}
res = moments[0];
for (i = 1; i < moments.length; ++i) {
if (!moments[i].isValid() || moments[i][fn](res)) {
res = moments[i];
}
}
return res;
}
// TODO: Use [].sort instead?
function min () {
var args = [].slice.call(arguments, 0);
return pickBy('isBefore', args);
}
function max () {
var args = [].slice.call(arguments, 0);
return pickBy('isAfter', args);
}
var now = function () {
return Date.now ? Date.now() : +(new Date());
};
function Duration (duration) {
var normalizedInput = normalizeObjectUnits(duration),
years = normalizedInput.year || 0,
quarters = normalizedInput.quarter || 0,
months = normalizedInput.month || 0,
weeks = normalizedInput.week || 0,
days = normalizedInput.day || 0,
hours = normalizedInput.hour || 0,
minutes = normalizedInput.minute || 0,
seconds = normalizedInput.second || 0,
milliseconds = normalizedInput.millisecond || 0;
// representation for dateAddRemove
this._milliseconds = +milliseconds +
seconds * 1e3 + // 1000
minutes * 6e4 + // 1000 * 60
hours * 1000 * 60 * 60; //using 1000 * 60 * 60 instead of 36e5 to avoid floating point rounding errors https://github.com/moment/moment/issues/2978
// Because of dateAddRemove treats 24 hours as different from a
// day when working around DST, we need to store them separately
this._days = +days +
weeks * 7;
// It is impossible translate months into days without knowing
// which months you are are talking about, so we have to store
// it separately.
this._months = +months +
quarters * 3 +
years * 12;
this._data = {};
this._locale = locale_locales__getLocale();
this._bubble();
}
function isDuration (obj) {
return obj instanceof Duration;
}
function absRound (number) {
if (number < 0) {
return Math.round(-1 * number) * -1;
} else {
return Math.round(number);
}
}
// FORMATTING
function offset (token, separator) {
addFormatToken(token, 0, 0, function () {
var offset = this.utcOffset();
var sign = '+';
if (offset < 0) {
offset = -offset;
sign = '-';
}
return sign + zeroFill(~~(offset / 60), 2) + separator + zeroFill(~~(offset) % 60, 2);
});
}
offset('Z', ':');
offset('ZZ', '');
// PARSING
addRegexToken('Z', matchShortOffset);
addRegexToken('ZZ', matchShortOffset);
addParseToken(['Z', 'ZZ'], function (input, array, config) {
config._useUTC = true;
config._tzm = offsetFromString(matchShortOffset, input);
});
// HELPERS
// timezone chunker
// '+10:00' > ['10', '00']
// '-1530' > ['-15', '30']
var chunkOffset = /([\+\-]|\d\d)/gi;
function offsetFromString(matcher, string) {
var matches = ((string || '').match(matcher) || []);
var chunk = matches[matches.length - 1] || [];
var parts = (chunk + '').match(chunkOffset) || ['-', 0, 0];
var minutes = +(parts[1] * 60) + toInt(parts[2]);
return parts[0] === '+' ? minutes : -minutes;
}
// Return a moment from input, that is local/utc/zone equivalent to model.
function cloneWithOffset(input, model) {
var res, diff;
if (model._isUTC) {
res = model.clone();
diff = (isMoment(input) || isDate(input) ? input.valueOf() : local__createLocal(input).valueOf()) - res.valueOf();
// Use low-level api, because this fn is low-level api.
res._d.setTime(res._d.valueOf() + diff);
utils_hooks__hooks.updateOffset(res, false);
return res;
} else {
return local__createLocal(input).local();
}
}
function getDateOffset (m) {
// On Firefox.24 Date#getTimezoneOffset returns a floating point.
// https://github.com/moment/moment/pull/1871
return -Math.round(m._d.getTimezoneOffset() / 15) * 15;
}
// HOOKS
// This function will be called whenever a moment is mutated.
// It is intended to keep the offset in sync with the timezone.
utils_hooks__hooks.updateOffset = function () {};
// MOMENTS
// keepLocalTime = true means only change the timezone, without
// affecting the local hour. So 5:31:26 +0300 --[utcOffset(2, true)]-->
// 5:31:26 +0200 It is possible that 5:31:26 doesn't exist with offset
// +0200, so we adjust the time as needed, to be valid.
//
// Keeping the time actually adds/subtracts (one hour)
// from the actual represented time. That is why we call updateOffset
// a second time. In case it wants us to change the offset again
// _changeInProgress == true case, then we have to adjust, because
// there is no such time in the given timezone.
function getSetOffset (input, keepLocalTime) {
var offset = this._offset || 0,
localAdjust;
if (!this.isValid()) {
return input != null ? this : NaN;
}
if (input != null) {
if (typeof input === 'string') {
input = offsetFromString(matchShortOffset, input);
} else if (Math.abs(input) < 16) {
input = input * 60;
}
if (!this._isUTC && keepLocalTime) {
localAdjust = getDateOffset(this);
}
this._offset = input;
this._isUTC = true;
if (localAdjust != null) {
this.add(localAdjust, 'm');
}
if (offset !== input) {
if (!keepLocalTime || this._changeInProgress) {
add_subtract__addSubtract(this, create__createDuration(input - offset, 'm'), 1, false);
} else if (!this._changeInProgress) {
this._changeInProgress = true;
utils_hooks__hooks.updateOffset(this, true);
this._changeInProgress = null;
}
}
return this;
} else {
return this._isUTC ? offset : getDateOffset(this);
}
}
function getSetZone (input, keepLocalTime) {
if (input != null) {
if (typeof input !== 'string') {
input = -input;
}
this.utcOffset(input, keepLocalTime);
return this;
} else {
return -this.utcOffset();
}
}
function setOffsetToUTC (keepLocalTime) {
return this.utcOffset(0, keepLocalTime);
}
function setOffsetToLocal (keepLocalTime) {
if (this._isUTC) {
this.utcOffset(0, keepLocalTime);
this._isUTC = false;
if (keepLocalTime) {
this.subtract(getDateOffset(this), 'm');
}
}
return this;
}
function setOffsetToParsedOffset () {
if (this._tzm) {
this.utcOffset(this._tzm);
} else if (typeof this._i === 'string') {
var tZone = offsetFromString(matchOffset, this._i);
if (tZone === 0) {
this.utcOffset(0, true);
} else {
this.utcOffset(offsetFromString(matchOffset, this._i));
}
}
return this;
}
function hasAlignedHourOffset (input) {
if (!this.isValid()) {
return false;
}
input = input ? local__createLocal(input).utcOffset() : 0;
return (this.utcOffset() - input) % 60 === 0;
}
function isDaylightSavingTime () {
return (
this.utcOffset() > this.clone().month(0).utcOffset() ||
this.utcOffset() > this.clone().month(5).utcOffset()
);
}
function isDaylightSavingTimeShifted () {
if (!isUndefined(this._isDSTShifted)) {
return this._isDSTShifted;
}
var c = {};
copyConfig(c, this);
c = prepareConfig(c);
if (c._a) {
var other = c._isUTC ? create_utc__createUTC(c._a) : local__createLocal(c._a);
this._isDSTShifted = this.isValid() &&
compareArrays(c._a, other.toArray()) > 0;
} else {
this._isDSTShifted = false;
}
return this._isDSTShifted;
}
function isLocal () {
return this.isValid() ? !this._isUTC : false;
}
function isUtcOffset () {
return this.isValid() ? this._isUTC : false;
}
function isUtc () {
return this.isValid() ? this._isUTC && this._offset === 0 : false;
}
// ASP.NET json date format regex
var aspNetRegex = /^(\-)?(?:(\d*)[. ])?(\d+)\:(\d+)(?:\:(\d+)(\.\d*)?)?$/;
// from http://docs.closure-library.googlecode.com/git/closure_goog_date_date.js.source.html
// somewhat more in line with 4.4.3.2 2004 spec, but allows decimal anywhere
// and further modified to allow for strings containing both week and day
var isoRegex = /^(-)?P(?:(-?[0-9,.]*)Y)?(?:(-?[0-9,.]*)M)?(?:(-?[0-9,.]*)W)?(?:(-?[0-9,.]*)D)?(?:T(?:(-?[0-9,.]*)H)?(?:(-?[0-9,.]*)M)?(?:(-?[0-9,.]*)S)?)?$/;
function create__createDuration (input, key) {
var duration = input,
// matching against regexp is expensive, do it on demand
match = null,
sign,
ret,
diffRes;
if (isDuration(input)) {
duration = {
ms : input._milliseconds,
d : input._days,
M : input._months
};
} else if (typeof input === 'number') {
duration = {};
if (key) {
duration[key] = input;
} else {
duration.milliseconds = input;
}
} else if (!!(match = aspNetRegex.exec(input))) {
sign = (match[1] === '-') ? -1 : 1;
duration = {
y : 0,
d : toInt(match[DATE]) * sign,
h : toInt(match[HOUR]) * sign,
m : toInt(match[MINUTE]) * sign,
s : toInt(match[SECOND]) * sign,
ms : toInt(absRound(match[MILLISECOND] * 1000)) * sign // the millisecond decimal point is included in the match
};
} else if (!!(match = isoRegex.exec(input))) {
sign = (match[1] === '-') ? -1 : 1;
duration = {
y : parseIso(match[2], sign),
M : parseIso(match[3], sign),
w : parseIso(match[4], sign),
d : parseIso(match[5], sign),
h : parseIso(match[6], sign),
m : parseIso(match[7], sign),
s : parseIso(match[8], sign)
};
} else if (duration == null) {// checks for null or undefined
duration = {};
} else if (typeof duration === 'object' && ('from' in duration || 'to' in duration)) {
diffRes = momentsDifference(local__createLocal(duration.from), local__createLocal(duration.to));
duration = {};
duration.ms = diffRes.milliseconds;
duration.M = diffRes.months;
}
ret = new Duration(duration);
if (isDuration(input) && hasOwnProp(input, '_locale')) {
ret._locale = input._locale;
}
return ret;
}
create__createDuration.fn = Duration.prototype;
function parseIso (inp, sign) {
// We'd normally use ~~inp for this, but unfortunately it also
// converts floats to ints.
// inp may be undefined, so careful calling replace on it.
var res = inp && parseFloat(inp.replace(',', '.'));
// apply sign while we're at it
return (isNaN(res) ? 0 : res) * sign;
}
function positiveMomentsDifference(base, other) {
var res = {milliseconds: 0, months: 0};
res.months = other.month() - base.month() +
(other.year() - base.year()) * 12;
if (base.clone().add(res.months, 'M').isAfter(other)) {
--res.months;
}
res.milliseconds = +other - +(base.clone().add(res.months, 'M'));
return res;
}
function momentsDifference(base, other) {
var res;
if (!(base.isValid() && other.isValid())) {
return {milliseconds: 0, months: 0};
}
other = cloneWithOffset(other, base);
if (base.isBefore(other)) {
res = positiveMomentsDifference(base, other);
} else {
res = positiveMomentsDifference(other, base);
res.milliseconds = -res.milliseconds;
res.months = -res.months;
}
return res;
}
// TODO: remove 'name' arg after deprecation is removed
function createAdder(direction, name) {
return function (val, period) {
var dur, tmp;
//invert the arguments, but complain about it
if (period !== null && !isNaN(+period)) {
deprecateSimple(name, 'moment().' + name + '(period, number) is deprecated. Please use moment().' + name + '(number, period). ' +
'See http://momentjs.com/guides/#/warnings/add-inverted-param/ for more info.');
tmp = val; val = period; period = tmp;
}
val = typeof val === 'string' ? +val : val;
dur = create__createDuration(val, period);
add_subtract__addSubtract(this, dur, direction);
return this;
};
}
function add_subtract__addSubtract (mom, duration, isAdding, updateOffset) {
var milliseconds = duration._milliseconds,
days = absRound(duration._days),
months = absRound(duration._months);
if (!mom.isValid()) {
// No op
return;
}
updateOffset = updateOffset == null ? true : updateOffset;
if (milliseconds) {
mom._d.setTime(mom._d.valueOf() + milliseconds * isAdding);
}
if (days) {
get_set__set(mom, 'Date', get_set__get(mom, 'Date') + days * isAdding);
}
if (months) {
setMonth(mom, get_set__get(mom, 'Month') + months * isAdding);
}
if (updateOffset) {
utils_hooks__hooks.updateOffset(mom, days || months);
}
}
var add_subtract__add = createAdder(1, 'add');
var add_subtract__subtract = createAdder(-1, 'subtract');
function getCalendarFormat(myMoment, now) {
var diff = myMoment.diff(now, 'days', true);
return diff < -6 ? 'sameElse' :
diff < -1 ? 'lastWeek' :
diff < 0 ? 'lastDay' :
diff < 1 ? 'sameDay' :
diff < 2 ? 'nextDay' :
diff < 7 ? 'nextWeek' : 'sameElse';
}
function moment_calendar__calendar (time, formats) {
// We want to compare the start of today, vs this.
// Getting start-of-today depends on whether we're local/utc/offset or not.
var now = time || local__createLocal(),
sod = cloneWithOffset(now, this).startOf('day'),
format = utils_hooks__hooks.calendarFormat(this, sod) || 'sameElse';
var output = formats && (isFunction(formats[format]) ? formats[format].call(this, now) : formats[format]);
return this.format(output || this.localeData().calendar(format, this, local__createLocal(now)));
}
function clone () {
return new Moment(this);
}
function isAfter (input, units) {
var localInput = isMoment(input) ? input : local__createLocal(input);
if (!(this.isValid() && localInput.isValid())) {
return false;
}
units = normalizeUnits(!isUndefined(units) ? units : 'millisecond');
if (units === 'millisecond') {
return this.valueOf() > localInput.valueOf();
} else {
return localInput.valueOf() < this.clone().startOf(units).valueOf();
}
}
function isBefore (input, units) {
var localInput = isMoment(input) ? input : local__createLocal(input);
if (!(this.isValid() && localInput.isValid())) {
return false;
}
units = normalizeUnits(!isUndefined(units) ? units : 'millisecond');
if (units === 'millisecond') {
return this.valueOf() < localInput.valueOf();
} else {
return this.clone().endOf(units).valueOf() < localInput.valueOf();
}
}
function isBetween (from, to, units, inclusivity) {
inclusivity = inclusivity || '()';
return (inclusivity[0] === '(' ? this.isAfter(from, units) : !this.isBefore(from, units)) &&
(inclusivity[1] === ')' ? this.isBefore(to, units) : !this.isAfter(to, units));
}
function isSame (input, units) {
var localInput = isMoment(input) ? input : local__createLocal(input),
inputMs;
if (!(this.isValid() && localInput.isValid())) {
return false;
}
units = normalizeUnits(units || 'millisecond');
if (units === 'millisecond') {
return this.valueOf() === localInput.valueOf();
} else {
inputMs = localInput.valueOf();
return this.clone().startOf(units).valueOf() <= inputMs && inputMs <= this.clone().endOf(units).valueOf();
}
}
function isSameOrAfter (input, units) {
return this.isSame(input, units) || this.isAfter(input,units);
}
function isSameOrBefore (input, units) {
return this.isSame(input, units) || this.isBefore(input,units);
}
function diff (input, units, asFloat) {
var that,
zoneDelta,
delta, output;
if (!this.isValid()) {
return NaN;
}
that = cloneWithOffset(input, this);
if (!that.isValid()) {
return NaN;
}
zoneDelta = (that.utcOffset() - this.utcOffset()) * 6e4;
units = normalizeUnits(units);
if (units === 'year' || units === 'month' || units === 'quarter') {
output = monthDiff(this, that);
if (units === 'quarter') {
output = output / 3;
} else if (units === 'year') {
output = output / 12;
}
} else {
delta = this - that;
output = units === 'second' ? delta / 1e3 : // 1000
units === 'minute' ? delta / 6e4 : // 1000 * 60
units === 'hour' ? delta / 36e5 : // 1000 * 60 * 60
units === 'day' ? (delta - zoneDelta) / 864e5 : // 1000 * 60 * 60 * 24, negate dst
units === 'week' ? (delta - zoneDelta) / 6048e5 : // 1000 * 60 * 60 * 24 * 7, negate dst
delta;
}
return asFloat ? output : absFloor(output);
}
function monthDiff (a, b) {
// difference in months
var wholeMonthDiff = ((b.year() - a.year()) * 12) + (b.month() - a.month()),
// b is in (anchor - 1 month, anchor + 1 month)
anchor = a.clone().add(wholeMonthDiff, 'months'),
anchor2, adjust;
if (b - anchor < 0) {
anchor2 = a.clone().add(wholeMonthDiff - 1, 'months');
// linear across the month
adjust = (b - anchor) / (anchor - anchor2);
} else {
anchor2 = a.clone().add(wholeMonthDiff + 1, 'months');
// linear across the month
adjust = (b - anchor) / (anchor2 - anchor);
}
//check for negative zero, return zero if negative zero
return -(wholeMonthDiff + adjust) || 0;
}
utils_hooks__hooks.defaultFormat = 'YYYY-MM-DDTHH:mm:ssZ';
utils_hooks__hooks.defaultFormatUtc = 'YYYY-MM-DDTHH:mm:ss[Z]';
function toString () {
return this.clone().locale('en').format('ddd MMM DD YYYY HH:mm:ss [GMT]ZZ');
}
function moment_format__toISOString () {
var m = this.clone().utc();
if (0 < m.year() && m.year() <= 9999) {
if (isFunction(Date.prototype.toISOString)) {
// native implementation is ~50x faster, use it when we can
return this.toDate().toISOString();
} else {
return formatMoment(m, 'YYYY-MM-DD[T]HH:mm:ss.SSS[Z]');
}
} else {
return formatMoment(m, 'YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]');
}
}
function format (inputString) {
if (!inputString) {
inputString = this.isUtc() ? utils_hooks__hooks.defaultFormatUtc : utils_hooks__hooks.defaultFormat;
}
var output = formatMoment(this, inputString);
return this.localeData().postformat(output);
}
function from (time, withoutSuffix) {
if (this.isValid() &&
((isMoment(time) && time.isValid()) ||
local__createLocal(time).isValid())) {
return create__createDuration({to: this, from: time}).locale(this.locale()).humanize(!withoutSuffix);
} else {
return this.localeData().invalidDate();
}
}
function fromNow (withoutSuffix) {
return this.from(local__createLocal(), withoutSuffix);
}
function to (time, withoutSuffix) {
if (this.isValid() &&
((isMoment(time) && time.isValid()) ||
local__createLocal(time).isValid())) {
return create__createDuration({from: this, to: time}).locale(this.locale()).humanize(!withoutSuffix);
} else {
return this.localeData().invalidDate();
}
}
function toNow (withoutSuffix) {
return this.to(local__createLocal(), withoutSuffix);
}
// If passed a locale key, it will set the locale for this
// instance. Otherwise, it will return the locale configuration
// variables for this instance.
function locale (key) {
var newLocaleData;
if (key === undefined) {
return this._locale._abbr;
} else {
newLocaleData = locale_locales__getLocale(key);
if (newLocaleData != null) {
this._locale = newLocaleData;
}
return this;
}
}
var lang = deprecate(
'moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.',
function (key) {
if (key === undefined) {
return this.localeData();
} else {
return this.locale(key);
}
}
);
function localeData () {
return this._locale;
}
function startOf (units) {
units = normalizeUnits(units);
// the following switch intentionally omits break keywords
// to utilize falling through the cases.
switch (units) {
case 'year':
this.month(0);
/* falls through */
case 'quarter':
case 'month':
this.date(1);
/* falls through */
case 'week':
case 'isoWeek':
case 'day':
case 'date':
this.hours(0);
/* falls through */
case 'hour':
this.minutes(0);
/* falls through */
case 'minute':
this.seconds(0);
/* falls through */
case 'second':
this.milliseconds(0);
}
// weeks are a special case
if (units === 'week') {
this.weekday(0);
}
if (units === 'isoWeek') {
this.isoWeekday(1);
}
// quarters are also special
if (units === 'quarter') {
this.month(Math.floor(this.month() / 3) * 3);
}
return this;
}
function endOf (units) {
units = normalizeUnits(units);
if (units === undefined || units === 'millisecond') {
return this;
}
// 'date' is an alias for 'day', so it should be considered as such.
if (units === 'date') {
units = 'day';
}
return this.startOf(units).add(1, (units === 'isoWeek' ? 'week' : units)).subtract(1, 'ms');
}
function to_type__valueOf () {
return this._d.valueOf() - ((this._offset || 0) * 60000);
}
function unix () {
return Math.floor(this.valueOf() / 1000);
}
function toDate () {
return new Date(this.valueOf());
}
function toArray () {
var m = this;
return [m.year(), m.month(), m.date(), m.hour(), m.minute(), m.second(), m.millisecond()];
}
function toObject () {
var m = this;
return {
years: m.year(),
months: m.month(),
date: m.date(),
hours: m.hours(),
minutes: m.minutes(),
seconds: m.seconds(),
milliseconds: m.milliseconds()
};
}
function toJSON () {
// new Date(NaN).toJSON() === null
return this.isValid() ? this.toISOString() : null;
}
function moment_valid__isValid () {
return valid__isValid(this);
}
function parsingFlags () {
return extend({}, getParsingFlags(this));
}
function invalidAt () {
return getParsingFlags(this).overflow;
}
function creationData() {
return {
input: this._i,
format: this._f,
locale: this._locale,
isUTC: this._isUTC,
strict: this._strict
};
}
// FORMATTING
addFormatToken(0, ['gg', 2], 0, function () {
return this.weekYear() % 100;
});
addFormatToken(0, ['GG', 2], 0, function () {
return this.isoWeekYear() % 100;
});
function addWeekYearFormatToken (token, getter) {
addFormatToken(0, [token, token.length], 0, getter);
}
addWeekYearFormatToken('gggg', 'weekYear');
addWeekYearFormatToken('ggggg', 'weekYear');
addWeekYearFormatToken('GGGG', 'isoWeekYear');
addWeekYearFormatToken('GGGGG', 'isoWeekYear');
// ALIASES
addUnitAlias('weekYear', 'gg');
addUnitAlias('isoWeekYear', 'GG');
// PRIORITY
addUnitPriority('weekYear', 1);
addUnitPriority('isoWeekYear', 1);
// PARSING
addRegexToken('G', matchSigned);
addRegexToken('g', matchSigned);
addRegexToken('GG', match1to2, match2);
addRegexToken('gg', match1to2, match2);
addRegexToken('GGGG', match1to4, match4);
addRegexToken('gggg', match1to4, match4);
addRegexToken('GGGGG', match1to6, match6);
addRegexToken('ggggg', match1to6, match6);
addWeekParseToken(['gggg', 'ggggg', 'GGGG', 'GGGGG'], function (input, week, config, token) {
week[token.substr(0, 2)] = toInt(input);
});
addWeekParseToken(['gg', 'GG'], function (input, week, config, token) {
week[token] = utils_hooks__hooks.parseTwoDigitYear(input);
});
// MOMENTS
function getSetWeekYear (input) {
return getSetWeekYearHelper.call(this,
input,
this.week(),
this.weekday(),
this.localeData()._week.dow,
this.localeData()._week.doy);
}
function getSetISOWeekYear (input) {
return getSetWeekYearHelper.call(this,
input, this.isoWeek(), this.isoWeekday(), 1, 4);
}
function getISOWeeksInYear () {
return weeksInYear(this.year(), 1, 4);
}
function getWeeksInYear () {
var weekInfo = this.localeData()._week;
return weeksInYear(this.year(), weekInfo.dow, weekInfo.doy);
}
function getSetWeekYearHelper(input, week, weekday, dow, doy) {
var weeksTarget;
if (input == null) {
return weekOfYear(this, dow, doy).year;
} else {
weeksTarget = weeksInYear(input, dow, doy);
if (week > weeksTarget) {
week = weeksTarget;
}
return setWeekAll.call(this, input, week, weekday, dow, doy);
}
}
function setWeekAll(weekYear, week, weekday, dow, doy) {
var dayOfYearData = dayOfYearFromWeeks(weekYear, week, weekday, dow, doy),
date = createUTCDate(dayOfYearData.year, 0, dayOfYearData.dayOfYear);
this.year(date.getUTCFullYear());
this.month(date.getUTCMonth());
this.date(date.getUTCDate());
return this;
}
// FORMATTING
addFormatToken('Q', 0, 'Qo', 'quarter');
// ALIASES
addUnitAlias('quarter', 'Q');
// PRIORITY
addUnitPriority('quarter', 7);
// PARSING
addRegexToken('Q', match1);
addParseToken('Q', function (input, array) {
array[MONTH] = (toInt(input) - 1) * 3;
});
// MOMENTS
function getSetQuarter (input) {
return input == null ? Math.ceil((this.month() + 1) / 3) : this.month((input - 1) * 3 + this.month() % 3);
}
// FORMATTING
addFormatToken('D', ['DD', 2], 'Do', 'date');
// ALIASES
addUnitAlias('date', 'D');
// PRIOROITY
addUnitPriority('date', 9);
// PARSING
addRegexToken('D', match1to2);
addRegexToken('DD', match1to2, match2);
addRegexToken('Do', function (isStrict, locale) {
return isStrict ? locale._ordinalParse : locale._ordinalParseLenient;
});
addParseToken(['D', 'DD'], DATE);
addParseToken('Do', function (input, array) {
array[DATE] = toInt(input.match(match1to2)[0], 10);
});
// MOMENTS
var getSetDayOfMonth = makeGetSet('Date', true);
// FORMATTING
addFormatToken('DDD', ['DDDD', 3], 'DDDo', 'dayOfYear');
// ALIASES
addUnitAlias('dayOfYear', 'DDD');
// PRIORITY
addUnitPriority('dayOfYear', 4);
// PARSING
addRegexToken('DDD', match1to3);
addRegexToken('DDDD', match3);
addParseToken(['DDD', 'DDDD'], function (input, array, config) {
config._dayOfYear = toInt(input);
});
// HELPERS
// MOMENTS
function getSetDayOfYear (input) {
var dayOfYear = Math.round((this.clone().startOf('day') - this.clone().startOf('year')) / 864e5) + 1;
return input == null ? dayOfYear : this.add((input - dayOfYear), 'd');
}
// FORMATTING
addFormatToken('m', ['mm', 2], 0, 'minute');
// ALIASES
addUnitAlias('minute', 'm');
// PRIORITY
addUnitPriority('minute', 14);
// PARSING
addRegexToken('m', match1to2);
addRegexToken('mm', match1to2, match2);
addParseToken(['m', 'mm'], MINUTE);
// MOMENTS
var getSetMinute = makeGetSet('Minutes', false);
// FORMATTING
addFormatToken('s', ['ss', 2], 0, 'second');
// ALIASES
addUnitAlias('second', 's');
// PRIORITY
addUnitPriority('second', 15);
// PARSING
addRegexToken('s', match1to2);
addRegexToken('ss', match1to2, match2);
addParseToken(['s', 'ss'], SECOND);
// MOMENTS
var getSetSecond = makeGetSet('Seconds', false);
// FORMATTING
addFormatToken('S', 0, 0, function () {
return ~~(this.millisecond() / 100);
});
addFormatToken(0, ['SS', 2], 0, function () {
return ~~(this.millisecond() / 10);
});
addFormatToken(0, ['SSS', 3], 0, 'millisecond');
addFormatToken(0, ['SSSS', 4], 0, function () {
return this.millisecond() * 10;
});
addFormatToken(0, ['SSSSS', 5], 0, function () {
return this.millisecond() * 100;
});
addFormatToken(0, ['SSSSSS', 6], 0, function () {
return this.millisecond() * 1000;
});
addFormatToken(0, ['SSSSSSS', 7], 0, function () {
return this.millisecond() * 10000;
});
addFormatToken(0, ['SSSSSSSS', 8], 0, function () {
return this.millisecond() * 100000;
});
addFormatToken(0, ['SSSSSSSSS', 9], 0, function () {
return this.millisecond() * 1000000;
});
// ALIASES
addUnitAlias('millisecond', 'ms');
// PRIORITY
addUnitPriority('millisecond', 16);
// PARSING
addRegexToken('S', match1to3, match1);
addRegexToken('SS', match1to3, match2);
addRegexToken('SSS', match1to3, match3);
var token;
for (token = 'SSSS'; token.length <= 9; token += 'S') {
addRegexToken(token, matchUnsigned);
}
function parseMs(input, array) {
array[MILLISECOND] = toInt(('0.' + input) * 1000);
}
for (token = 'S'; token.length <= 9; token += 'S') {
addParseToken(token, parseMs);
}
// MOMENTS
var getSetMillisecond = makeGetSet('Milliseconds', false);
// FORMATTING
addFormatToken('z', 0, 0, 'zoneAbbr');
addFormatToken('zz', 0, 0, 'zoneName');
// MOMENTS
function getZoneAbbr () {
return this._isUTC ? 'UTC' : '';
}
function getZoneName () {
return this._isUTC ? 'Coordinated Universal Time' : '';
}
var momentPrototype__proto = Moment.prototype;
momentPrototype__proto.add = add_subtract__add;
momentPrototype__proto.calendar = moment_calendar__calendar;
momentPrototype__proto.clone = clone;
momentPrototype__proto.diff = diff;
momentPrototype__proto.endOf = endOf;
momentPrototype__proto.format = format;
momentPrototype__proto.from = from;
momentPrototype__proto.fromNow = fromNow;
momentPrototype__proto.to = to;
momentPrototype__proto.toNow = toNow;
momentPrototype__proto.get = stringGet;
momentPrototype__proto.invalidAt = invalidAt;
momentPrototype__proto.isAfter = isAfter;
momentPrototype__proto.isBefore = isBefore;
momentPrototype__proto.isBetween = isBetween;
momentPrototype__proto.isSame = isSame;
momentPrototype__proto.isSameOrAfter = isSameOrAfter;
momentPrototype__proto.isSameOrBefore = isSameOrBefore;
momentPrototype__proto.isValid = moment_valid__isValid;
momentPrototype__proto.lang = lang;
momentPrototype__proto.locale = locale;
momentPrototype__proto.localeData = localeData;
momentPrototype__proto.max = prototypeMax;
momentPrototype__proto.min = prototypeMin;
momentPrototype__proto.parsingFlags = parsingFlags;
momentPrototype__proto.set = stringSet;
momentPrototype__proto.startOf = startOf;
momentPrototype__proto.subtract = add_subtract__subtract;
momentPrototype__proto.toArray = toArray;
momentPrototype__proto.toObject = toObject;
momentPrototype__proto.toDate = toDate;
momentPrototype__proto.toISOString = moment_format__toISOString;
momentPrototype__proto.toJSON = toJSON;
momentPrototype__proto.toString = toString;
momentPrototype__proto.unix = unix;
momentPrototype__proto.valueOf = to_type__valueOf;
momentPrototype__proto.creationData = creationData;
// Year
momentPrototype__proto.year = getSetYear;
momentPrototype__proto.isLeapYear = getIsLeapYear;
// Week Year
momentPrototype__proto.weekYear = getSetWeekYear;
momentPrototype__proto.isoWeekYear = getSetISOWeekYear;
// Quarter
momentPrototype__proto.quarter = momentPrototype__proto.quarters = getSetQuarter;
// Month
momentPrototype__proto.month = getSetMonth;
momentPrototype__proto.daysInMonth = getDaysInMonth;
// Week
momentPrototype__proto.week = momentPrototype__proto.weeks = getSetWeek;
momentPrototype__proto.isoWeek = momentPrototype__proto.isoWeeks = getSetISOWeek;
momentPrototype__proto.weeksInYear = getWeeksInYear;
momentPrototype__proto.isoWeeksInYear = getISOWeeksInYear;
// Day
momentPrototype__proto.date = getSetDayOfMonth;
momentPrototype__proto.day = momentPrototype__proto.days = getSetDayOfWeek;
momentPrototype__proto.weekday = getSetLocaleDayOfWeek;
momentPrototype__proto.isoWeekday = getSetISODayOfWeek;
momentPrototype__proto.dayOfYear = getSetDayOfYear;
// Hour
momentPrototype__proto.hour = momentPrototype__proto.hours = getSetHour;
// Minute
momentPrototype__proto.minute = momentPrototype__proto.minutes = getSetMinute;
// Second
momentPrototype__proto.second = momentPrototype__proto.seconds = getSetSecond;
// Millisecond
momentPrototype__proto.millisecond = momentPrototype__proto.milliseconds = getSetMillisecond;
// Offset
momentPrototype__proto.utcOffset = getSetOffset;
momentPrototype__proto.utc = setOffsetToUTC;
momentPrototype__proto.local = setOffsetToLocal;
momentPrototype__proto.parseZone = setOffsetToParsedOffset;
momentPrototype__proto.hasAlignedHourOffset = hasAlignedHourOffset;
momentPrototype__proto.isDST = isDaylightSavingTime;
momentPrototype__proto.isLocal = isLocal;
momentPrototype__proto.isUtcOffset = isUtcOffset;
momentPrototype__proto.isUtc = isUtc;
momentPrototype__proto.isUTC = isUtc;
// Timezone
momentPrototype__proto.zoneAbbr = getZoneAbbr;
momentPrototype__proto.zoneName = getZoneName;
// Deprecations
momentPrototype__proto.dates = deprecate('dates accessor is deprecated. Use date instead.', getSetDayOfMonth);
momentPrototype__proto.months = deprecate('months accessor is deprecated. Use month instead', getSetMonth);
momentPrototype__proto.years = deprecate('years accessor is deprecated. Use year instead', getSetYear);
momentPrototype__proto.zone = deprecate('moment().zone is deprecated, use moment().utcOffset instead. http://momentjs.com/guides/#/warnings/zone/', getSetZone);
momentPrototype__proto.isDSTShifted = deprecate('isDSTShifted is deprecated. See http://momentjs.com/guides/#/warnings/dst-shifted/ for more information', isDaylightSavingTimeShifted);
var momentPrototype = momentPrototype__proto;
function moment__createUnix (input) {
return local__createLocal(input * 1000);
}
function moment__createInZone () {
return local__createLocal.apply(null, arguments).parseZone();
}
function preParsePostFormat (string) {
return string;
}
var prototype__proto = Locale.prototype;
prototype__proto.calendar = locale_calendar__calendar;
prototype__proto.longDateFormat = longDateFormat;
prototype__proto.invalidDate = invalidDate;
prototype__proto.ordinal = ordinal;
prototype__proto.preparse = preParsePostFormat;
prototype__proto.postformat = preParsePostFormat;
prototype__proto.relativeTime = relative__relativeTime;
prototype__proto.pastFuture = pastFuture;
prototype__proto.set = locale_set__set;
// Month
prototype__proto.months = localeMonths;
prototype__proto.monthsShort = localeMonthsShort;
prototype__proto.monthsParse = localeMonthsParse;
prototype__proto.monthsRegex = monthsRegex;
prototype__proto.monthsShortRegex = monthsShortRegex;
// Week
prototype__proto.week = localeWeek;
prototype__proto.firstDayOfYear = localeFirstDayOfYear;
prototype__proto.firstDayOfWeek = localeFirstDayOfWeek;
// Day of Week
prototype__proto.weekdays = localeWeekdays;
prototype__proto.weekdaysMin = localeWeekdaysMin;
prototype__proto.weekdaysShort = localeWeekdaysShort;
prototype__proto.weekdaysParse = localeWeekdaysParse;
prototype__proto.weekdaysRegex = weekdaysRegex;
prototype__proto.weekdaysShortRegex = weekdaysShortRegex;
prototype__proto.weekdaysMinRegex = weekdaysMinRegex;
// Hours
prototype__proto.isPM = localeIsPM;
prototype__proto.meridiem = localeMeridiem;
function lists__get (format, index, field, setter) {
var locale = locale_locales__getLocale();
var utc = create_utc__createUTC().set(setter, index);
return locale[field](utc, format);
}
function listMonthsImpl (format, index, field) {
if (typeof format === 'number') {
index = format;
format = undefined;
}
format = format || '';
if (index != null) {
return lists__get(format, index, field, 'month');
}
var i;
var out = [];
for (i = 0; i < 12; i++) {
out[i] = lists__get(format, i, field, 'month');
}
return out;
}
// ()
// (5)
// (fmt, 5)
// (fmt)
// (true)
// (true, 5)
// (true, fmt, 5)
// (true, fmt)
function listWeekdaysImpl (localeSorted, format, index, field) {
if (typeof localeSorted === 'boolean') {
if (typeof format === 'number') {
index = format;
format = undefined;
}
format = format || '';
} else {
format = localeSorted;
index = format;
localeSorted = false;
if (typeof format === 'number') {
index = format;
format = undefined;
}
format = format || '';
}
var locale = locale_locales__getLocale(),
shift = localeSorted ? locale._week.dow : 0;
if (index != null) {
return lists__get(format, (index + shift) % 7, field, 'day');
}
var i;
var out = [];
for (i = 0; i < 7; i++) {
out[i] = lists__get(format, (i + shift) % 7, field, 'day');
}
return out;
}
function lists__listMonths (format, index) {
return listMonthsImpl(format, index, 'months');
}
function lists__listMonthsShort (format, index) {
return listMonthsImpl(format, index, 'monthsShort');
}
function lists__listWeekdays (localeSorted, format, index) {
return listWeekdaysImpl(localeSorted, format, index, 'weekdays');
}
function lists__listWeekdaysShort (localeSorted, format, index) {
return listWeekdaysImpl(localeSorted, format, index, 'weekdaysShort');
}
function lists__listWeekdaysMin (localeSorted, format, index) {
return listWeekdaysImpl(localeSorted, format, index, 'weekdaysMin');
}
locale_locales__getSetGlobalLocale('en', {
ordinalParse: /\d{1,2}(th|st|nd|rd)/,
ordinal : function (number) {
var b = number % 10,
output = (toInt(number % 100 / 10) === 1) ? 'th' :
(b === 1) ? 'st' :
(b === 2) ? 'nd' :
(b === 3) ? 'rd' : 'th';
return number + output;
}
});
// Side effect imports
utils_hooks__hooks.lang = deprecate('moment.lang is deprecated. Use moment.locale instead.', locale_locales__getSetGlobalLocale);
utils_hooks__hooks.langData = deprecate('moment.langData is deprecated. Use moment.localeData instead.', locale_locales__getLocale);
var mathAbs = Math.abs;
function duration_abs__abs () {
var data = this._data;
this._milliseconds = mathAbs(this._milliseconds);
this._days = mathAbs(this._days);
this._months = mathAbs(this._months);
data.milliseconds = mathAbs(data.milliseconds);
data.seconds = mathAbs(data.seconds);
data.minutes = mathAbs(data.minutes);
data.hours = mathAbs(data.hours);
data.months = mathAbs(data.months);
data.years = mathAbs(data.years);
return this;
}
function duration_add_subtract__addSubtract (duration, input, value, direction) {
var other = create__createDuration(input, value);
duration._milliseconds += direction * other._milliseconds;
duration._days += direction * other._days;
duration._months += direction * other._months;
return duration._bubble();
}
// supports only 2.0-style add(1, 's') or add(duration)
function duration_add_subtract__add (input, value) {
return duration_add_subtract__addSubtract(this, input, value, 1);
}
// supports only 2.0-style subtract(1, 's') or subtract(duration)
function duration_add_subtract__subtract (input, value) {
return duration_add_subtract__addSubtract(this, input, value, -1);
}
function absCeil (number) {
if (number < 0) {
return Math.floor(number);
} else {
return Math.ceil(number);
}
}
function bubble () {
var milliseconds = this._milliseconds;
var days = this._days;
var months = this._months;
var data = this._data;
var seconds, minutes, hours, years, monthsFromDays;
// if we have a mix of positive and negative values, bubble down first
// check: https://github.com/moment/moment/issues/2166
if (!((milliseconds >= 0 && days >= 0 && months >= 0) ||
(milliseconds <= 0 && days <= 0 && months <= 0))) {
milliseconds += absCeil(monthsToDays(months) + days) * 864e5;
days = 0;
months = 0;
}
// The following code bubbles up values, see the tests for
// examples of what that means.
data.milliseconds = milliseconds % 1000;
seconds = absFloor(milliseconds / 1000);
data.seconds = seconds % 60;
minutes = absFloor(seconds / 60);
data.minutes = minutes % 60;
hours = absFloor(minutes / 60);
data.hours = hours % 24;
days += absFloor(hours / 24);
// convert days to months
monthsFromDays = absFloor(daysToMonths(days));
months += monthsFromDays;
days -= absCeil(monthsToDays(monthsFromDays));
// 12 months -> 1 year
years = absFloor(months / 12);
months %= 12;
data.days = days;
data.months = months;
data.years = years;
return this;
}
function daysToMonths (days) {
// 400 years have 146097 days (taking into account leap year rules)
// 400 years have 12 months === 4800
return days * 4800 / 146097;
}
function monthsToDays (months) {
// the reverse of daysToMonths
return months * 146097 / 4800;
}
function as (units) {
var days;
var months;
var milliseconds = this._milliseconds;
units = normalizeUnits(units);
if (units === 'month' || units === 'year') {
days = this._days + milliseconds / 864e5;
months = this._months + daysToMonths(days);
return units === 'month' ? months : months / 12;
} else {
// handle milliseconds separately because of floating point math errors (issue #1867)
days = this._days + Math.round(monthsToDays(this._months));
switch (units) {
case 'week' : return days / 7 + milliseconds / 6048e5;
case 'day' : return days + milliseconds / 864e5;
case 'hour' : return days * 24 + milliseconds / 36e5;
case 'minute' : return days * 1440 + milliseconds / 6e4;
case 'second' : return days * 86400 + milliseconds / 1000;
// Math.floor prevents floating point math errors here
case 'millisecond': return Math.floor(days * 864e5) + milliseconds;
default: throw new Error('Unknown unit ' + units);
}
}
}
// TODO: Use this.as('ms')?
function duration_as__valueOf () {
return (
this._milliseconds +
this._days * 864e5 +
(this._months % 12) * 2592e6 +
toInt(this._months / 12) * 31536e6
);
}
function makeAs (alias) {
return function () {
return this.as(alias);
};
}
var asMilliseconds = makeAs('ms');
var asSeconds = makeAs('s');
var asMinutes = makeAs('m');
var asHours = makeAs('h');
var asDays = makeAs('d');
var asWeeks = makeAs('w');
var asMonths = makeAs('M');
var asYears = makeAs('y');
function duration_get__get (units) {
units = normalizeUnits(units);
return this[units + 's']();
}
function makeGetter(name) {
return function () {
return this._data[name];
};
}
var milliseconds = makeGetter('milliseconds');
var seconds = makeGetter('seconds');
var minutes = makeGetter('minutes');
var hours = makeGetter('hours');
var days = makeGetter('days');
var months = makeGetter('months');
var years = makeGetter('years');
function weeks () {
return absFloor(this.days() / 7);
}
var round = Math.round;
var thresholds = {
s: 45, // seconds to minute
m: 45, // minutes to hour
h: 22, // hours to day
d: 26, // days to month
M: 11 // months to year
};
// helper function for moment.fn.from, moment.fn.fromNow, and moment.duration.fn.humanize
function substituteTimeAgo(string, number, withoutSuffix, isFuture, locale) {
return locale.relativeTime(number || 1, !!withoutSuffix, string, isFuture);
}
function duration_humanize__relativeTime (posNegDuration, withoutSuffix, locale) {
var duration = create__createDuration(posNegDuration).abs();
var seconds = round(duration.as('s'));
var minutes = round(duration.as('m'));
var hours = round(duration.as('h'));
var days = round(duration.as('d'));
var months = round(duration.as('M'));
var years = round(duration.as('y'));
var a = seconds < thresholds.s && ['s', seconds] ||
minutes <= 1 && ['m'] ||
minutes < thresholds.m && ['mm', minutes] ||
hours <= 1 && ['h'] ||
hours < thresholds.h && ['hh', hours] ||
days <= 1 && ['d'] ||
days < thresholds.d && ['dd', days] ||
months <= 1 && ['M'] ||
months < thresholds.M && ['MM', months] ||
years <= 1 && ['y'] || ['yy', years];
a[2] = withoutSuffix;
a[3] = +posNegDuration > 0;
a[4] = locale;
return substituteTimeAgo.apply(null, a);
}
// This function allows you to set the rounding function for relative time strings
function duration_humanize__getSetRelativeTimeRounding (roundingFunction) {
if (roundingFunction === undefined) {
return round;
}
if (typeof(roundingFunction) === 'function') {
round = roundingFunction;
return true;
}
return false;
}
// This function allows you to set a threshold for relative time strings
function duration_humanize__getSetRelativeTimeThreshold (threshold, limit) {
if (thresholds[threshold] === undefined) {
return false;
}
if (limit === undefined) {
return thresholds[threshold];
}
thresholds[threshold] = limit;
return true;
}
function humanize (withSuffix) {
var locale = this.localeData();
var output = duration_humanize__relativeTime(this, !withSuffix, locale);
if (withSuffix) {
output = locale.pastFuture(+this, output);
}
return locale.postformat(output);
}
var iso_string__abs = Math.abs;
function iso_string__toISOString() {
// for ISO strings we do not use the normal bubbling rules:
// * milliseconds bubble up until they become hours
// * days do not bubble at all
// * months bubble up until they become years
// This is because there is no context-free conversion between hours and days
// (think of clock changes)
// and also not between days and months (28-31 days per month)
var seconds = iso_string__abs(this._milliseconds) / 1000;
var days = iso_string__abs(this._days);
var months = iso_string__abs(this._months);
var minutes, hours, years;
// 3600 seconds -> 60 minutes -> 1 hour
minutes = absFloor(seconds / 60);
hours = absFloor(minutes / 60);
seconds %= 60;
minutes %= 60;
// 12 months -> 1 year
years = absFloor(months / 12);
months %= 12;
// inspired by https://github.com/dordille/moment-isoduration/blob/master/moment.isoduration.js
var Y = years;
var M = months;
var D = days;
var h = hours;
var m = minutes;
var s = seconds;
var total = this.asSeconds();
if (!total) {
// this is the same as C#'s (Noda) and python (isodate)...
// but not other JS (goog.date)
return 'P0D';
}
return (total < 0 ? '-' : '') +
'P' +
(Y ? Y + 'Y' : '') +
(M ? M + 'M' : '') +
(D ? D + 'D' : '') +
((h || m || s) ? 'T' : '') +
(h ? h + 'H' : '') +
(m ? m + 'M' : '') +
(s ? s + 'S' : '');
}
var duration_prototype__proto = Duration.prototype;
duration_prototype__proto.abs = duration_abs__abs;
duration_prototype__proto.add = duration_add_subtract__add;
duration_prototype__proto.subtract = duration_add_subtract__subtract;
duration_prototype__proto.as = as;
duration_prototype__proto.asMilliseconds = asMilliseconds;
duration_prototype__proto.asSeconds = asSeconds;
duration_prototype__proto.asMinutes = asMinutes;
duration_prototype__proto.asHours = asHours;
duration_prototype__proto.asDays = asDays;
duration_prototype__proto.asWeeks = asWeeks;
duration_prototype__proto.asMonths = asMonths;
duration_prototype__proto.asYears = asYears;
duration_prototype__proto.valueOf = duration_as__valueOf;
duration_prototype__proto._bubble = bubble;
duration_prototype__proto.get = duration_get__get;
duration_prototype__proto.milliseconds = milliseconds;
duration_prototype__proto.seconds = seconds;
duration_prototype__proto.minutes = minutes;
duration_prototype__proto.hours = hours;
duration_prototype__proto.days = days;
duration_prototype__proto.weeks = weeks;
duration_prototype__proto.months = months;
duration_prototype__proto.years = years;
duration_prototype__proto.humanize = humanize;
duration_prototype__proto.toISOString = iso_string__toISOString;
duration_prototype__proto.toString = iso_string__toISOString;
duration_prototype__proto.toJSON = iso_string__toISOString;
duration_prototype__proto.locale = locale;
duration_prototype__proto.localeData = localeData;
// Deprecations
duration_prototype__proto.toIsoString = deprecate('toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)', iso_string__toISOString);
duration_prototype__proto.lang = lang;
// Side effect imports
// FORMATTING
addFormatToken('X', 0, 0, 'unix');
addFormatToken('x', 0, 0, 'valueOf');
// PARSING
addRegexToken('x', matchSigned);
addRegexToken('X', matchTimestamp);
addParseToken('X', function (input, array, config) {
config._d = new Date(parseFloat(input, 10) * 1000);
});
addParseToken('x', function (input, array, config) {
config._d = new Date(toInt(input));
});
// Side effect imports
utils_hooks__hooks.version = '2.15.1';
setHookCallback(local__createLocal);
utils_hooks__hooks.fn = momentPrototype;
utils_hooks__hooks.min = min;
utils_hooks__hooks.max = max;
utils_hooks__hooks.now = now;
utils_hooks__hooks.utc = create_utc__createUTC;
utils_hooks__hooks.unix = moment__createUnix;
utils_hooks__hooks.months = lists__listMonths;
utils_hooks__hooks.isDate = isDate;
utils_hooks__hooks.locale = locale_locales__getSetGlobalLocale;
utils_hooks__hooks.invalid = valid__createInvalid;
utils_hooks__hooks.duration = create__createDuration;
utils_hooks__hooks.isMoment = isMoment;
utils_hooks__hooks.weekdays = lists__listWeekdays;
utils_hooks__hooks.parseZone = moment__createInZone;
utils_hooks__hooks.localeData = locale_locales__getLocale;
utils_hooks__hooks.isDuration = isDuration;
utils_hooks__hooks.monthsShort = lists__listMonthsShort;
utils_hooks__hooks.weekdaysMin = lists__listWeekdaysMin;
utils_hooks__hooks.defineLocale = defineLocale;
utils_hooks__hooks.updateLocale = updateLocale;
utils_hooks__hooks.locales = locale_locales__listLocales;
utils_hooks__hooks.weekdaysShort = lists__listWeekdaysShort;
utils_hooks__hooks.normalizeUnits = normalizeUnits;
utils_hooks__hooks.relativeTimeRounding = duration_humanize__getSetRelativeTimeRounding;
utils_hooks__hooks.relativeTimeThreshold = duration_humanize__getSetRelativeTimeThreshold;
utils_hooks__hooks.calendarFormat = getCalendarFormat;
utils_hooks__hooks.prototype = momentPrototype;
var _moment = utils_hooks__hooks;
return _moment;
}));<|fim▁end|> | });
// ALIASES |
<|file_name|>test-utils.js<|end_file_name|><|fim▁begin|>import { expect } from "chai";
import { createStore, applyMiddleware } from 'redux';
import Immutable from 'immutable';
import reducers from '../src/lib/reducers';
import * as Utils from '../src/lib/utils';
describe('utils', function () {
beforeEach(function () {
this.state = Immutable.fromJS([
{title: "alpha"},
{title: "beta", children: [
{title: "foo"},
{title: "bar", children: [
{title: "quux"},
{title: "xyzzy"}
]},
{title: "baz"}
]},
{title: "gamma"},
{title: "level1", children: [
{title: 'level2', children: [
{title: 'level3', children: [
{title: 'level4'}
]}
]}
]},
{title: 'thud'}
]);
this.store = createStore(reducers, {
nodes: this.state
});
this.expectedSeries = [
'0',
'1',
'1.children.0',<|fim▁hole|> '1.children.2',
'2',
'3',
'3.children.0',
'3.children.0.children.0',
'3.children.0.children.0.children.0',
'4'
];
this.expectedSiblings = [
['0', '1', '2', '3', '4'],
['1.children.0', '1.children.1', '1.children.2'],
['1.children.1.children.0', '1.children.1.children.1'],
['3.children.0']
['3.children.0.children.0'],
['3.children.0.children.0.children.0'],
];
});
describe('splitPath', function () {
it('should split a dot-delimited path into a key array', function () {
expect(Utils.splitPath('1.children.1.children.2'))
.to.deep.equal(['1', 'children', '1', 'children', '2']);
})
})
describe('getNodeContext', function () {
it('should construct contextual information for a node path', function () {
var path = '1.children.1.children.0';
var expected = {
key: ['1', 'children', '1', 'children', '0'],
parentKey: ['1', 'children', '1'],
index: 0,
value: this.state.getIn(['1', 'children', '1', 'children', '0']),
siblings: this.state.getIn(['1', 'children', '1', 'children'])
};
var result = Utils.getNodeContext(this.state, path);
expect(result).to.deep.equal(expected);
})
});
function commonSiblingPathTest (inReverse, state, expectedSiblings) {
var traversal = inReverse ?
Utils.getPreviousSiblingPath :
Utils.getNextSiblingPath;
var siblingList;
while (siblingList = expectedSiblings.shift()) {
if (inReverse) { siblingList.reverse(); }
var current, expected, result;
current = siblingList.shift();
while (siblingList.length) {
expected = siblingList.shift();
result = traversal(state, current);
expect(result).to.equal(expected);
current = expected;
}
result = traversal(state, current);
expect(result).to.equal(null);
}
}
describe('getNextSiblingPath', function () {
it('should find the path to the next sibling', function () {
commonSiblingPathTest(false, this.state, this.expectedSiblings);
});
});
describe('getPreviousSiblingPath', function () {
it('should find the path to the previous sibling', function () {
commonSiblingPathTest(true, this.state, this.expectedSiblings);
});
});
function commonNodePathTest (inReverse, state, expectedSeries) {
var current, expected, result;
var traversal = (inReverse) ?
Utils.getPreviousNodePath : Utils.getNextNodePath;
if (inReverse) { expectedSeries.reverse(); }
current = expectedSeries.shift();
while (expectedSeries.length) {
expected = expectedSeries.shift();
result = traversal(state, current);
expect(result).to.equal(expected);
current = expected;
}
}
describe('getNextNodePath', function () {
it('should find the path to the next node', function () {
commonNodePathTest(false, this.state, this.expectedSeries);
})
it('should skip children of collapsed nodes', function () {
let state = this.state;
['1', '3.children.0'].forEach(path => {
state = state.updateIn(
path.split('.'),
n => n.set('collapsed', true));
});
commonNodePathTest(false, state,
['0', '1', '2', '3', '3.children.0', '4']);
});
});
describe('getPreviousNodePath', function () {
it('should find the path to the previous node', function () {
commonNodePathTest(true, this.state, this.expectedSeries);
});
it('should skip children of collapsed nodes', function () {
let state = this.state;
['1', '3.children.0'].forEach(path => {
state = state.updateIn(
path.split('.'),
n => n.set('collapsed', true));
});
commonNodePathTest(true, state,
['0', '1', '2', '3', '3.children.0', '4']);
});
});
});<|fim▁end|> | '1.children.1',
'1.children.1.children.0',
'1.children.1.children.1', |
<|file_name|>clear_bits_geq.rs<|end_file_name|><|fim▁begin|>use word::{Word, ToWord, UnsignedWord};
/// Clears all bits of `x` at position >= `bit`.
///
/// # Panics
///
/// If `bit >= bit_size()`.
///
/// # Intrinsics:
/// - BMI 2.0: bzhi.
///
/// # Examples
///
/// ```
/// use bitwise::word::*;
///
/// assert_eq!(0b1111_0010u8.clear_bits_geq(5u8), 0b0001_0010u8);
/// assert_eq!(clear_bits_geq(0b1111_0010u8, 5u8), 0b0001_0010u8);
/// ```
#[inline]
pub fn clear_bits_geq<T: Word, U: UnsignedWord>(x: T, bit: U) -> T {
debug_assert!(T::bit_size() > bit.to());
x.bzhi(bit.to())<|fim▁hole|>/// Method version of [`clear_bits_geq`](fn.clear_bits_geq.html).
pub trait ClearBitsGeq {
#[inline]
fn clear_bits_geq<U: UnsignedWord>(self, n: U) -> Self;
}
impl<T: Word> ClearBitsGeq for T {
#[inline]
fn clear_bits_geq<U: UnsignedWord>(self, n: U) -> Self {
clear_bits_geq(self, n)
}
}<|fim▁end|> | }
|
<|file_name|>issue-23898.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>// run-pass
#![allow(non_camel_case_types)]
// Note: This test was used to demonstrate #5873 (now #23898).
enum State { ST_NULL, ST_WHITESPACE }
fn main() {
[State::ST_NULL; (State::ST_WHITESPACE as usize)];
}<|fim▁end|> | |
<|file_name|>GroupedByTimeOperator.java<|end_file_name|><|fim▁begin|>package com.oath.cyclops.internal.stream.spliterators.push;
import com.oath.cyclops.types.persistent.PersistentCollection;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* Created by johnmcclean on 12/01/2017.
*/
public class GroupedByTimeOperator<T,C extends PersistentCollection<? super T>,R> extends BaseOperator<T,R> {
private final Supplier<? extends C> factory;
private final Function<? super C, ? extends R> finalizer;
private final long time;
private final TimeUnit t;
public GroupedByTimeOperator(Operator<T> source, Supplier<? extends C> factory,
Function<? super C, ? extends R> finalizer,long time,
TimeUnit t){
super(source);
this.factory = factory;
this.finalizer = finalizer;
this.time = time;
this.t = t;
<|fim▁hole|>
@Override
public StreamSubscription subscribe(Consumer<? super R> onNext, Consumer<? super Throwable> onError, Runnable onComplete) {
long toRun = t.toNanos(time);
PersistentCollection[] next = {factory.get()};
long[] start ={System.nanoTime()};
StreamSubscription[] upstream = {null};
StreamSubscription sub = new StreamSubscription(){
@Override
public void request(long n) {
if(n<=0) {
onError.accept(new IllegalArgumentException("3.9 While the Subscription is not cancelled, Subscription.request(long n) MUST throw a java.lang.IllegalArgumentException if the argument is <= 0."));
return;
}
if(!isOpen)
return;
super.request(n);
upstream[0].request(n);
}
@Override
public void cancel() {
upstream[0].cancel();
super.cancel();
}
};
upstream[0] = source.subscribe(e-> {
try {
next[0] = next[0].plus(e);
if(System.nanoTime()-start[0] > toRun){
onNext.accept(finalizer.apply((C)next[0]));
sub.requested.decrementAndGet();
next[0] = factory.get();
start[0] = System.nanoTime();
}
else{
request( upstream,1l);
}
} catch (Throwable t) {
onError.accept(t);
}
}
,t->{onError.accept(t);
sub.requested.decrementAndGet();
if(sub.isActive())
request( upstream,1);
},()->{
if(next[0].size()>0) {
try {
onNext.accept(finalizer.apply((C) next[0]));
} catch(Throwable t){
onError.accept(t);
}
sub.requested.decrementAndGet();
}
sub.cancel();
onComplete.run();
});
return sub;
}
@Override
public void subscribeAll(Consumer<? super R> onNext, Consumer<? super Throwable> onError, Runnable onCompleteDs) {
long toRun = t.toNanos(time);
PersistentCollection[] next = {factory.get()};
long[] start ={System.nanoTime()};
source.subscribeAll(e-> {
try {
next[0] = next[0].plus(e);
if(System.nanoTime()-start[0] > toRun){
onNext.accept(finalizer.apply((C)next[0]));
next[0] = factory.get();
start[0] = System.nanoTime();
}
} catch (Throwable t) {
onError.accept(t);
}
}
,onError,()->{
if(next[0].size()>0) {
try {
onNext.accept(finalizer.apply((C) next[0]));
} catch(Throwable t){
onError.accept(t);
}
}
onCompleteDs.run();
});
}
}<|fim▁end|> |
}
|
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>"""
Test rest_framework_json_api's utils functions.
"""
from rest_framework_json_api import utils
from ..serializers import EntrySerializer
from ..tests import TestBase
class GetRelatedResourceTests(TestBase):
"""
Ensure the `get_related_resource_type` function returns correct types.
"""
def test_reverse_relation(self):
"""
Ensure reverse foreign keys have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['comments']
self.assertEqual(utils.get_related_resource_type(field), 'comments')<|fim▁hole|> def test_m2m_relation(self):
"""
Ensure m2ms have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['authors']
self.assertEqual(utils.get_related_resource_type(field), 'authors')<|fim▁end|> | |
<|file_name|>StateManager.cpp<|end_file_name|><|fim▁begin|>#include <cstdlib> // srand()
#include <ctime> // time()
#include "StateManager.hpp"
#include "SDL.hpp"
#include "Log.hpp"
#include "Config.hpp"
#include "GameStateMainMenu.hpp"
#include "GameStateGame.hpp"<|fim▁hole|>
StateManager::StateManager(int width, int height)
{
SDL::init(30);
Window::init(width, height, "Prototype", "yes");
Graphics::init(Window::screen);
Config::load("config.ini");
if (Config::debugMode)
Log::debugMode(true);
// Here we start the game!
this->currentState = new GameStateMainMenu();
this->currentState->load();
this->sharedInfo = 0;
srand(time(NULL));
}
StateManager::~StateManager()
{
SDL::exit();
if (this->currentState)
{
this->currentState->unload();
delete this->currentState;
}
}
void StateManager::run()
{
bool letsQuit = false;
while (!letsQuit)
{
// The delta time from the last frame
uint32_t delta = SDL::getDelta();
// Normally i'd refresh input right here, but
// each state must do it individually
int whatToDoNow = this->currentState->update(delta);
switch (whatToDoNow)
{
case GameState::CONTINUE:
break;
case GameState::QUIT:
letsQuit = true;
break;
case GameState::GAME_START:
this->sharedInfo = this->currentState->unload();
delete (this->currentState);
this->currentState = new GameStateGame();
this->currentState->load(this->sharedInfo);
break;
case GameState::GAME_OVER:
this->sharedInfo = this->currentState->unload();
delete (this->currentState);
this->currentState = new GameStateGameOver();
this->currentState->load(this->sharedInfo);
break;
default:
break;
}
Window::clear();
this->currentState->render();
Window::refresh();
// Let's wait a bit if the framerate is too low.
SDL::framerateWait();
}
}<|fim▁end|> | #include "GameStateGameOver.hpp"
#include "Window.hpp"
#include "Graphics.hpp" |
<|file_name|>secrets.ts<|end_file_name|><|fim▁begin|>import * as _ from 'lodash';
import * as AWS from 'aws-sdk';
import * as config from './config';
export interface Secrets {
REDDIT_CLIENT_ID: string;
REDDIT_CLIENT_TOKEN: string;
REDDIT_USERNAME: string;
REDDIT_PASSWORD: string;
STEAM_API_KEY: string;
}
export async function resolve(): Promise<Secrets> {
if (config.isLocalDev() && !process.env.USE_SECRETS) {
console.info('Getting secrets from ENV instead of encrypted bundle.');
return _.pick(process.env, [
'REDDIT_CLIENT_ID',<|fim▁hole|> ]) as any;
}
const kms = new AWS.KMS({
region: 'us-west-2',
});
console.log('Resolving secrets.');
const secrets = await kms.decrypt({
CiphertextBlob: Buffer.from(process.env.SECRETS, 'base64'),
}).promise();
return JSON.parse(secrets.Plaintext.toString());
}<|fim▁end|> | 'REDDIT_CLIENT_TOKEN',
'REDDIT_PASSWORD',
'STEAM_API_KEY', |
<|file_name|>ibm_wire.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Developed exclusively at US Government expense under US Air Force contract
# FA8721-05-C-002. The rights of the United States Government to use, modify,
# reproduce, release, perform, display or disclose this computer software and
# computer software documentation in whole or in part, in any manner and for
# any purpose whatsoever, and to have or authorize others to do so, are
# Unrestricted and Unlimited.
#
# Licensed for use under the BSD License as described in the BSD-LICENSE.txt
# file in the root directory of this release.
#
# Project: SPAR
# Authors: SY
# Description: IBM TA2 wire class
#
# Modifications:
# Date Name Modification
# ---- ---- ------------
# 22 Oct 2012 SY Original Version
# *****************************************************************
import ibm_circuit_object as ico
class IBMInputWire(ico.IBMCircuitObject):
"""
This class represents a single IBM input wire.
"""
def __init__(self, displayname, circuit):
"""Initializes the wire with the display name and circuit specified."""
ico.IBMCircuitObject.__init__(self, displayname, 0.0, 0, circuit)<|fim▁end|> | # *****************************************************************
# Copyright (c) 2013 Massachusetts Institute of Technology
# |
<|file_name|>officemru.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Microsoft Office MRUs Windows Registry plugin."""
import unittest
from plaso.formatters import officemru # pylint: disable=unused-import
from plaso.formatters import winreg # pylint: disable=unused-import
from plaso.lib import eventdata
from plaso.lib import timelib
from plaso.parsers.winreg_plugins import officemru
from tests import test_lib as shared_test_lib
from tests.parsers.winreg_plugins import test_lib
__author__ = 'David Nides ([email protected])'
class OfficeMRUPluginTest(test_lib.RegistryPluginTestCase):
"""Tests for the Microsoft Office MRUs Windows Registry plugin."""
@shared_test_lib.skipUnlessHasTestFile([u'NTUSER-WIN7.DAT'])
def testProcess(self):
"""Tests the Process function."""
test_file_entry = self._GetTestFileEntry([u'NTUSER-WIN7.DAT'])
key_path = (
u'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\14.0\\Word\\'
u'File MRU')
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
plugin_object = officemru.OfficeMRUPlugin()
storage_writer = self._ParseKeyWithPlugin(
registry_key, plugin_object, file_entry=test_file_entry)
self.assertEqual(len(storage_writer.events), 6)
event_object = storage_writer.events[5]
self.assertEqual(event_object.pathspec, test_file_entry.path_spec)
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event_object.parser, plugin_object.plugin_name)
expected_timestamp = timelib.Timestamp.CopyFromString(
u'2012-03-13 18:27:15.089802')
self.assertEqual(event_object.timestamp, expected_timestamp)
self.assertEqual(
event_object.timestamp_desc, eventdata.EventTimestamp.WRITTEN_TIME)
regvalue_identifier = u'Item 1'
expected_value_string = (
u'[F00000000][T01CD0146EA1EADB0][O00000000]*'
u'C:\\Users\\nfury\\Documents\\StarFury\\StarFury\\'<|fim▁hole|> u'[{0:s}] '
u'{1:s}: {2:s} '
u'Item 2: [F00000000][T01CD00921FC127F0][O00000000]*'
u'C:\\Users\\nfury\\Documents\\StarFury\\StarFury\\Earthforce SA-26 '
u'Thunderbolt Star Fury.docx '
u'Item 3: [F00000000][T01CD009208780140][O00000000]*'
u'C:\\Users\\nfury\\Documents\\StarFury\\StarFury\\StarFury.docx '
u'Item 4: [F00000000][T01CCFE0B22DA9EF0][O00000000]*'
u'C:\\Users\\nfury\\Documents\\VIBRANIUM.docx '
u'Item 5: [F00000000][T01CCFCBA595DFC30][O00000000]*'
u'C:\\Users\\nfury\\Documents\\ADAMANTIUM-Background.docx').format(
key_path, regvalue_identifier, expected_value_string)
expected_short_message = u'{0:s}...'.format(expected_message[0:77])
self._TestGetMessageStrings(
event_object, expected_message, expected_short_message)
# Test OfficeMRUWindowsRegistryEvent.
event_object = storage_writer.events[0]
expected_timestamp = timelib.Timestamp.CopyFromString(
u'2012-03-13 18:27:15.083')
self.assertEqual(event_object.timestamp, expected_timestamp)
self.assertEqual(
event_object.timestamp_desc, eventdata.EventTimestamp.WRITTEN_TIME)
self.assertEqual(event_object.value_string, expected_value_string)
expected_message = u'[{0:s}] Value: {1:s}'.format(
key_path, expected_value_string)
expected_short_message = u'{0:s}...'.format(expected_value_string[0:77])
self._TestGetMessageStrings(
event_object, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | u'SA-23E Mitchell-Hyundyne Starfury.docx')
self._TestRegvalue(event_object, regvalue_identifier, expected_value_string)
expected_message = ( |
<|file_name|>generator.py<|end_file_name|><|fim▁begin|>from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
from name_generator import get_random_name
FONT_SIZE = 14
FONT = ImageFont.truetype("appletext.ttf", FONT_SIZE)
def generate_new_tombstone(name, inscription):
img = Image.open("created_tombstones/base_tombstone.png")
draw = ImageDraw.Draw(img)
add_name(draw, name)
add_inscription(draw, inscription)
out_file_name = get_random_name()
img.save("created_tombstones/{}.png".format(out_file_name))
return out_file_name
def add_name(draw, name):<|fim▁hole|> n = 26
split_list = split_inscription(inscription)
for i, insc in enumerate(split_list):
distance = 210 + ((i + 1) * (FONT_SIZE + 10))
draw.text((center(len(insc)), distance), insc, (0,0,0), font=FONT)
def center(text_len):
mid = text_len / 2
if text_len % 2 == 0:
start_loc = 330 + ((4-mid) * FONT_SIZE) + (FONT_SIZE / 2)
else:
start_loc = 330 + ((4-mid) * FONT_SIZE)
return start_loc
def split_inscription(inscription):
n = 26
words = inscription.split()
result = []
current = ""
for word in words:
current = "{} {}".format(current, word)
if len(current) > n:
result.append(current)
current = ""
if current:
result.append(current)
return result
if __name__ == '__main__':
generate_new_tombstone("Aaron Montana", "Ate corndog with no mustard. Tried putting it in gravy. Died.")<|fim▁end|> | draw.text((330, 160), "Here Lies", (0,0,0), font=FONT)
draw.text((center(len(name)), 160 + FONT_SIZE + 10), name, (0,0,0), font=FONT)
def add_inscription(draw, inscription): |
<|file_name|>set.rs<|end_file_name|><|fim▁begin|>use liner::KeyBindings;
use shell::Shell;
use shell::flags::*;
use std::io::{self, Write};
use std::iter;
const HELP: &'static str = r#"NAME
set - Set or unset values of shell options and positional parameters.
SYNOPSIS
set [ --help ] [-e | +e] [-x | +x] [-o [vi | emacs]] [- | --] [STRING]...
DESCRIPTION
Shell options may be set using the '-' character, and unset using the '+' character.
OPTIONS
-e Exit immediately if a command exits with a non-zero status.
-o Specifies that an argument will follow that sets the key map.
The keymap argument may be either `vi` or `emacs`.
-x Specifies that commands will be printed as they are executed.
-- Following arguments will be set as positional arguments in the shell.
If no argument are supplied, arguments will be unset.
- Following arguments will be set as positional arguments in the shell.
If no arguments are suppled, arguments will not be unset.
"#;
enum PositionalArgs {
UnsetIfNone,
RetainIfNone,
}
use self::PositionalArgs::*;
pub(crate) fn set(args: &[&str], shell: &mut Shell) -> i32 {
let stdout = io::stdout();
let stderr = io::stderr();
let mut args_iter = args.iter();
let mut positionals = None;
while let Some(arg) = args_iter.next() {
if arg.starts_with("--") {
if arg.len() == 2 {
positionals = Some(UnsetIfNone);
break;
}
if &arg[2..] == "help" {
let mut stdout = stdout.lock();
let _ = stdout.write(HELP.as_bytes());
} else {
return 0;
}
} else if arg.starts_with('-') {
if arg.len() == 1 {
positionals = Some(RetainIfNone);
break;
}
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags |= ERR_EXIT,
b'o' => match args_iter.next() {
Some(&mode) if mode == "vi" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Vi;
}
}
Some(&mode) if mode == "emacs" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Emacs;
}
}
Some(_) => {
let _ = stderr.lock().write_all(b"set: invalid keymap\n");
return 0;
}
None => {
let _ = stderr.lock().write_all(b"set: no keymap given\n");
return 0;
}
},
b'x' => shell.flags |= PRINT_COMMS,
_ => return 0,
}
}
} else if arg.starts_with('+') {
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags &= 255 ^ ERR_EXIT,
b'x' => shell.flags &= 255 ^ PRINT_COMMS,
_ => return 0,
}
}
}
}
match positionals {
None => (),
Some(kind) => {
let command: String = shell.variables.get_array("args").unwrap()[0].to_owned();
// This used to take a `&[String]` but cloned them all, so although
// this is non-ideal and could probably be better done with `Rc`, it
// hasn't got any slower.
let arguments = iter::once(command).chain(args_iter.map(|i| i.to_string())).collect();
match kind {
UnsetIfNone => shell.variables.set_array("args", arguments),
RetainIfNone => if arguments.len() != 1 {<|fim▁hole|> }
}
}
0
}<|fim▁end|> | shell.variables.set_array("args", arguments);
}, |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.