file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
meter.py | import enum
import time
from pymodbus.constants import Endian
from pymodbus.client.sync import ModbusTcpClient
from pymodbus.client.sync import ModbusSerialClient
from pymodbus.payload import BinaryPayloadBuilder
from pymodbus.payload import BinaryPayloadDecoder
from pymodbus.register_read_message import ReadInputRegistersResponse
from pymodbus.register_read_message import ReadHoldingRegistersResponse
class connectionType(enum.Enum):
RTU = 1
TCP = 2
class registerType(enum.Enum):
INPUT = 1
HOLDING = 2
class registerDataType(enum.Enum):
BITS = 1
UINT8 = 2
UINT16 = 3
UINT32 = 4
UINT64 = 5
INT8 = 6
INT16 = 7
INT32 = 8
INT64 = 9
FLOAT16 = 10
FLOAT32 = 11
STRING = 12
RETRIES = 3
TIMEOUT = 1
UNIT = 1
class Meter:
model = "Generic"
registers = {}
stopbits = 1
parity = "N"
baud = 38400
wordorder = Endian.Big
byteorder = Endian.Big
def __init__(self, **kwargs):
parent = kwargs.get("parent")
if parent:
self.client = parent.client
self.mode = parent.mode
self.timeout = parent.timeout
self.retries = parent.retries
unit = kwargs.get("unit")
if unit:
self.unit = unit
else:
self.unit = parent.unit
if self.mode is connectionType.RTU:
self.device = parent.device
self.stopbits = parent.stopbits
self.parity = parent.parity
self.baud = parent.baud
elif self.mode is connectionType.TCP:
self.host = parent.host
self.port = parent.port
else:
raise NotImplementedError(self.mode)
else:
self.timeout = kwargs.get("timeout", TIMEOUT)
self.retries = kwargs.get("retries", RETRIES)
self.unit = kwargs.get("unit", UNIT)
device = kwargs.get("device")
if device:
self.device = device
stopbits = kwargs.get("stopbits")
if stopbits:
self.stopbits = stopbits
parity = kwargs.get("parity")
if (parity
and parity.upper() in ["N", "E", "O"]):
self.parity = parity.upper()
else:
self.parity = False
baud = kwargs.get("baud")
if baud:
self.baud = baud
self.mode = connectionType.RTU
self.client = ModbusSerialClient(
method="rtu",
port=self.device,
stopbits=self.stopbits,
parity=self.parity,
baudrate=self.baud,
timeout=self.timeout
)
else:
self.host = kwargs.get("host")
self.port = kwargs.get("port", 502)
self.mode = connectionType.TCP
self.client = ModbusTcpClient(
host=self.host,
port=self.port,
timeout=self.timeout
)
def __repr__(self):
if self.mode == connectionType.RTU:
return f"{self.model}({self.device}, {self.mode}: stopbits={self.stopbits}, parity={self.parity}, baud={self.baud}, timeout={self.timeout}, retries={self.retries}, unit={hex(self.unit)})"
elif self.mode == connectionType.TCP:
return f"{self.model}({self.host}:{self.port}, {self.mode}: timeout={self.timeout}, retries={self.retries}, unit={hex(self.unit)})"
else:
return f"<{self.__class__.__module__}.{self.__class__.__name__} object at {hex(id(self))}>"
def _read_input_registers(self, address, length):
for i in range(self.retries):
if not self.connected():
self.connect()
time.sleep(0.1)
continue
result = self.client.read_input_registers(address=address, count=length, unit=self.unit)
if not isinstance(result, ReadInputRegistersResponse):
continue
if len(result.registers) != length:
continue
return BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=self.byteorder, wordorder=self.wordorder)
return None
def _read_holding_registers(self, address, length):
for i in range(self.retries):
if not self.connected():
self.connect()
time.sleep(0.1)
continue
result = self.client.read_holding_registers(address=address, count=length, unit=self.unit)
if not isinstance(result, ReadHoldingRegistersResponse):
continue
if len(result.registers) != length:
continue
return BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=self.byteorder, wordorder=self.wordorder)
return None
def _write_holding_register(self, address, value):
return self.client.write_registers(address=address, values=value, unit=self.unit)
def _encode_value(self, data, dtype):
builder = BinaryPayloadBuilder(byteorder=self.byteorder, wordorder=self.wordorder)
try:
if dtype == registerDataType.FLOAT32:
builder.add_32bit_float(data)
elif dtype == registerDataType.INT32:
builder.add_32bit_int(data)
elif dtype == registerDataType.UINT32:
builder.add_32bit_uint(data)
elif dtype == registerDataType.INT16:
builder.add_16bit_int(data)
else:
raise NotImplementedError(dtype)
except NotImplementedError:
raise
return builder.to_registers()
def _decode_value(self, data, length, dtype, vtype):
try:
if dtype == registerDataType.FLOAT32:
return vtype(data.decode_32bit_float())
elif dtype == registerDataType.INT32:
return vtype(data.decode_32bit_int())
elif dtype == registerDataType.UINT32:
return vtype(data.decode_32bit_uint())
elif dtype == registerDataType.INT16:
return vtype(data.decode_16bit_int())
else:
raise NotImplementedError(dtype)
except NotImplementedError:
raise
def _read(self, value):
address, length, rtype, dtype, vtype, label, fmt, batch, sf = value
try:
if rtype == registerType.INPUT:
return self._decode_value(self._read_input_registers(address, length), length, dtype, vtype)
elif rtype == registerType.HOLDING:
return self._decode_value(self._read_holding_registers(address, length), length, dtype, vtype)
else:
raise NotImplementedError(rtype)
except NotImplementedError:
raise
def _read_all(self, values, rtype):
addr_min = False
addr_max = False
for k, v in values.items():
v_addr = v[0]
v_length = v[1]
if addr_min is False:
addr_min = v_addr
if addr_max is False:
addr_max = v_addr + v_length
if v_addr < addr_min:
addr_min = v_addr
if (v_addr + v_length) > addr_max:
addr_max = v_addr + v_length
results = {}
offset = addr_min
length = addr_max - addr_min
try:
if rtype == registerType.INPUT:
data = self._read_input_registers(offset, length)
elif rtype == registerType.HOLDING:
data = self._read_holding_registers(offset, length)
else:
raise NotImplementedError(rtype)
if not data:
return results
for k, v in values.items():
address, length, rtype, dtype, vtype, label, fmt, batch, sf = v
if address > offset:
skip_bytes = address - offset
offset += skip_bytes
data.skip_bytes(skip_bytes * 2)
results[k] = self._decode_value(data, length, dtype, vtype)
offset += length
except NotImplementedError:
raise
return results
def _write(self, value, data):
|
def connect(self):
return self.client.connect()
def disconnect(self):
self.client.close()
def connected(self):
return self.client.is_socket_open()
def get_scaling(self, key):
address, length, rtype, dtype, vtype, label, fmt, batch, sf = self.registers[key]
return sf
def read(self, key, scaling=False):
if key not in self.registers:
raise KeyError(key)
if scaling:
return self._read(self.registers[key]) * self.get_scaling(key)
else:
return self._read(self.registers[key])
def write(self, key, data):
if key not in self.registers:
raise KeyError(key)
return self._write(self.registers[key], data / self.get_scaling(key))
def read_all(self, rtype=registerType.INPUT, scaling=False):
registers = {k: v for k, v in self.registers.items() if (v[2] == rtype)}
results = {}
for batch in range(1, max(len(registers), 2)):
register_batch = {k: v for k, v in registers.items() if (v[7] == batch)}
if not register_batch:
break
results.update(self._read_all(register_batch, rtype))
if scaling:
return {k: v * self.get_scaling(k) for k, v in results.items()}
else:
return {k: v for k, v in results.items()}
| address, length, rtype, dtype, vtype, label, fmt, batch, sf = value
try:
if rtype == registerType.HOLDING:
return self._write_holding_register(address, self._encode_value(data, dtype))
else:
raise NotImplementedError(rtype)
except NotImplementedError:
raise |
requestManager.js | "use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function | (result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* A streaming pending request.
*/
class PendingRequest {
}
/**
* Orchestrates and manages pending streaming requests.
*/
class RequestManager {
constructor() {
this._pendingRequests = {};
}
pendingRequestCount() {
return Object.keys(this._pendingRequests).length;
}
signalResponse(requestId, response) {
return __awaiter(this, void 0, void 0, function* () {
let pendingRequest = this._pendingRequests[requestId];
if (pendingRequest) {
pendingRequest.resolve(response);
delete this._pendingRequests[requestId];
return true;
}
return false;
});
}
getResponse(requestId) {
let pendingRequest = this._pendingRequests[requestId];
if (pendingRequest) {
return Promise.reject(`requestId '${requestId}' already exists in RequestManager`);
}
pendingRequest = new PendingRequest();
pendingRequest.requestId = requestId;
let promise = new Promise((resolve, reject) => {
pendingRequest.resolve = resolve;
pendingRequest.reject = reject;
});
this._pendingRequests[requestId] = pendingRequest;
return promise;
}
}
exports.RequestManager = RequestManager;
//# sourceMappingURL=requestManager.js.map | step |
to_string_test.go | package input_test
import (
"github.com/neo4j/neo4j-go-driver/v4/neo4j"
"github.com/tlarsen7572/graphyx/input"
"testing"
"time"
)
func TestNodeToString(t *testing.T) {
node := neo4j.Node{
Id: 2,
Labels: []string{`Something`},
Props: map[string]interface{}{
"Prop1": 2,
"Prop2": time.Date(2020, 1, 2, 3, 4, 5, 6, time.UTC),
},
}
actual := input.ToString(node)
expected := `(:Something {"Prop1":2,"Prop2":"2020-01-02T03:04:05.000000006Z"})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestNodeToStringNoProperties(t *testing.T) {
node := neo4j.Node{
Id: 2,
Labels: []string{`Something`},
Props: map[string]interface{}{},
}
actual := input.ToString(node)
expected := `(:Something)`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func | (t *testing.T) {
node := neo4j.Node{
Id: 2,
Labels: []string{},
Props: map[string]interface{}{},
}
actual := input.ToString(node)
expected := `()`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestNodeToStringNoLabels(t *testing.T) {
node := neo4j.Node{
Id: 2,
Labels: []string{},
Props: map[string]interface{}{
"Prop1": 2,
"Prop2": "Hello world",
},
}
actual := input.ToString(node)
expected := `( {"Prop1":2,"Prop2":"Hello world"})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestRelationshipToString(t *testing.T) {
rel := neo4j.Relationship{
StartId: 10,
EndId: 11,
Id: 2,
Type: `Something`,
Props: map[string]interface{}{
"Prop1": 2,
"Prop2": time.Date(2020, 1, 2, 3, 4, 5, 6, time.UTC),
},
}
actual := input.ToString(rel)
expected := `[:Something {"Prop1":2,"Prop2":"2020-01-02T03:04:05.000000006Z"}]`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestRelationshipToStringNoProperties(t *testing.T) {
rel := neo4j.Relationship{
StartId: 10,
EndId: 11,
Id: 2,
Type: `Something`,
Props: map[string]interface{}{},
}
actual := input.ToString(rel)
expected := `[:Something]`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestEmptyRelationshipToString(t *testing.T) {
rel := neo4j.Relationship{
StartId: 10,
EndId: 11,
Id: 2,
Type: ``,
Props: map[string]interface{}{},
}
actual := input.ToString(rel)
expected := `[]`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestRelationshipToStringNoType(t *testing.T) {
rel := neo4j.Relationship{
StartId: 10,
EndId: 11,
Id: 2,
Type: ``,
Props: map[string]interface{}{
"Prop1": 2,
"Prop2": time.Date(2020, 1, 2, 3, 4, 5, 6, time.UTC),
},
}
actual := input.ToString(rel)
expected := `[ {"Prop1":2,"Prop2":"2020-01-02T03:04:05.000000006Z"}]`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestLeftToRightPathToString(t *testing.T) {
path := neo4j.Path{
Nodes: []neo4j.Node{
{Id: 1, Labels: []string{`A`}, Props: map[string]interface{}{"Key": 1}},
{Id: 2, Labels: []string{`B`}, Props: map[string]interface{}{"Key": 2}},
{Id: 3, Labels: []string{`C`}, Props: map[string]interface{}{"Key": 3}},
},
Relationships: []neo4j.Relationship{
{Id: 4, StartId: 1, EndId: 2, Type: `A_to_B`},
{Id: 5, StartId: 2, EndId: 3, Type: `B_to_C`},
},
}
actual := input.ToString(path)
expected := `(:A {"Key":1})-[:A_to_B]->(:B {"Key":2})-[:B_to_C]->(:C {"Key":3})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestEmptyPathToString(t *testing.T) {
path := neo4j.Path{
Nodes: []neo4j.Node{},
Relationships: []neo4j.Relationship{},
}
actual := input.ToString(path)
expected := ``
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestPathToStringOneNode(t *testing.T) {
path := neo4j.Path{
Nodes: []neo4j.Node{
{Id: 1, Labels: []string{`A`}, Props: map[string]interface{}{"Key": 1}},
},
Relationships: []neo4j.Relationship{},
}
actual := input.ToString(path)
expected := `(:A {"Key":1})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestPrimitivesToString(t *testing.T) {
expected := `hello world`
actual := input.ToString(expected)
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
expected = `1`
actual = input.ToString(1)
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
expected = `1.2`
actual = input.ToString(1.2)
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestMultipleRelationshipsFromOneNode(t *testing.T) {
path := neo4j.Path{
Nodes: []neo4j.Node{
{Id: 5, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 5}},
{Id: 119, Labels: []string{`Movie`}, Props: map[string]interface{}{"Key": 119}},
},
Relationships: []neo4j.Relationship{
{Id: 4, StartId: 5, EndId: 119, Type: `DIRECTED`},
{Id: 5, StartId: 5, EndId: 119, Type: `WROTE`},
},
}
actual := input.ToString(path)
expected := `(:Movie {"Key":119})<-[:DIRECTED]-(:Person {"Key":5})-[:WROTE]->(:Movie {"Key":119})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestMergingPath(t *testing.T) {
path := neo4j.Path{
Nodes: []neo4j.Node{
{Id: 5, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 5}},
{Id: 119, Labels: []string{`Movie`}, Props: map[string]interface{}{"Key": 119}},
{Id: 200, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 200}},
},
Relationships: []neo4j.Relationship{
{Id: 4, StartId: 5, EndId: 119, Type: `DIRECTED`},
{Id: 5, StartId: 200, EndId: 119, Type: `WROTE`},
},
}
actual := input.ToString(path)
expected := `(:Person {"Key":5})-[:DIRECTED]->(:Movie {"Key":119})<-[:WROTE]-(:Person {"Key":200})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestAlternatingPath(t *testing.T) {
path := neo4j.Path{
Nodes: []neo4j.Node{
{Id: 5, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 5}},
{Id: 119, Labels: []string{`Movie`}, Props: map[string]interface{}{"Key": 119}},
{Id: 200, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 200}},
{Id: 321, Labels: []string{`Movie`}, Props: map[string]interface{}{"Key": 321}},
{Id: 432, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 432}},
},
Relationships: []neo4j.Relationship{
{Id: 4001, StartId: 5, EndId: 119, Type: `DIRECTED`},
{Id: 4002, StartId: 200, EndId: 119, Type: `WROTE`},
{Id: 4003, StartId: 200, EndId: 321, Type: `DIRECTED`},
{Id: 4004, StartId: 432, EndId: 321, Type: `ACTED_IN`},
},
}
actual := input.ToString(path)
expected := `(:Person {"Key":5})-[:DIRECTED]->(:Movie {"Key":119})<-[:WROTE]-(:Person {"Key":200})-[:DIRECTED]->(:Movie {"Key":321})<-[:ACTED_IN]-(:Person {"Key":432})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
func TestAlternatingPathStartingWithEndId(t *testing.T) {
path := neo4j.Path{
Nodes: []neo4j.Node{
{Id: 5, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 5}},
{Id: 119, Labels: []string{`Movie`}, Props: map[string]interface{}{"Key": 119}},
{Id: 200, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 200}},
{Id: 321, Labels: []string{`Movie`}, Props: map[string]interface{}{"Key": 321}},
{Id: 432, Labels: []string{`Person`}, Props: map[string]interface{}{"Key": 432}},
{Id: 543, Labels: []string{`Movie`}, Props: map[string]interface{}{"Key": 543}},
},
Relationships: []neo4j.Relationship{
{Id: 4001, StartId: 5, EndId: 543, Type: `DIRECTED`},
{Id: 4002, StartId: 5, EndId: 119, Type: `DIRECTED`},
{Id: 4003, StartId: 200, EndId: 119, Type: `WROTE`},
{Id: 4004, StartId: 200, EndId: 321, Type: `DIRECTED`},
{Id: 4005, StartId: 432, EndId: 321, Type: `ACTED_IN`},
},
}
actual := input.ToString(path)
expected := `(:Movie {"Key":543})<-[:DIRECTED]-(:Person {"Key":5})-[:DIRECTED]->(:Movie {"Key":119})<-[:WROTE]-(:Person {"Key":200})-[:DIRECTED]->(:Movie {"Key":321})<-[:ACTED_IN]-(:Person {"Key":432})`
if actual != expected {
t.Fatalf(`expected '%v' but got '%v'`, expected, actual)
}
}
| TestEmptyNodeToString |
models.py | from datetime import datetime, timedelta
import json
import re
import math
from dateutil import relativedelta
from django.core.cache import cache
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.postgres.fields import ArrayField
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils.timezone import now
from django.utils import timezone
from django.urls import reverse
from django.utils.functional import cached_property
from packaging.specifiers import SpecifierSet
from distutils.version import LooseVersion
import requests
from core.utils import STATUS_CHOICES, status_choices_switch
from core.models import BaseModel
from package.repos import get_repo_for_repo_url
from package.signals import signal_fetch_latest_metadata
from package.utils import get_version, get_pypi_version, normalize_license
from django.utils.translation import gettext_lazy as _
repo_url_help_text = settings.PACKAGINATOR_HELP_TEXT["REPO_URL"]
pypi_url_help_text = settings.PACKAGINATOR_HELP_TEXT["PYPI_URL"]
class NoPyPiVersionFound(Exception):
pass
class Category(BaseModel):
title = models.CharField(_("Title"), max_length=50)
slug = models.SlugField(_("slug"))
description = models.TextField(_("description"), blank=True)
title_plural = models.CharField(_("Title Plural"), max_length=50, blank=True)
show_pypi = models.BooleanField(_("Show pypi stats & version"), default=True)
class Meta:
ordering = ["title"]
verbose_name_plural = "Categories"
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("category", args=[self.slug])
class Package(BaseModel):
title = models.CharField(_("Title"), max_length=100)
slug = models.SlugField(
_("Slug"),
help_text="Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens. Values will be converted to lowercase.",
unique=True,
)
category = models.ForeignKey(
Category, verbose_name="Installation", on_delete=models.PROTECT
)
repo_description = models.TextField(_("Repo Description"), blank=True)
repo_url = models.URLField(
_("repo URL"), help_text=repo_url_help_text, blank=True, unique=True
)
repo_watchers = models.IntegerField(_("Stars"), default=0)
repo_forks = models.IntegerField(_("repo forks"), default=0)
pypi_url = models.CharField(
_("PyPI slug"),
max_length=255,
help_text=pypi_url_help_text,
blank=True,
default="",
)
pypi_downloads = models.IntegerField(_("Pypi downloads"), default=0)
pypi_classifiers = ArrayField(
models.CharField(max_length=100), blank=True, null=True
)
pypi_license = models.CharField(
_("PyPI License"), max_length=100, blank=True, null=True
)
pypi_licenses = ArrayField(models.CharField(max_length=100), blank=True, null=True)
pypi_requires_python = models.CharField(
_("PyPI Requires Python"), max_length=100, blank=True, null=True
)
supports_python3 = models.BooleanField(
_("Supports Python 3"), blank=True, null=True
)
participants = models.TextField(
_("Participants"),
help_text="List of collaborats/participants on the project",
blank=True,
)
usage = models.ManyToManyField(User, blank=True)
created_by = models.ForeignKey(
User, blank=True, null=True, related_name="creator", on_delete=models.SET_NULL
)
last_modified_by = models.ForeignKey(
User, blank=True, null=True, related_name="modifier", on_delete=models.SET_NULL
)
last_fetched = models.DateTimeField(blank=True, null=True, default=timezone.now)
documentation_url = models.URLField(
_("Documentation URL"), blank=True, null=True, default=""
)
commit_list = models.TextField(_("Commit List"), blank=True)
score = models.IntegerField(_("Score"), default=0)
date_deprecated = models.DateTimeField(blank=True, null=True)
date_repo_archived = models.DateTimeField(
_("date when repo was archived"), blank=True, null=True
)
deprecated_by = models.ForeignKey(
User, blank=True, null=True, related_name="deprecator", on_delete=models.PROTECT
)
deprecates_package = models.ForeignKey(
"self",
blank=True,
null=True,
related_name="replacement",
on_delete=models.PROTECT,
)
@cached_property
def is_deprecated(self):
if self.date_deprecated is None:
return False
return True
def get_pypi_uri(self):
if self.pypi_name and len(self.pypi_name):
return f"https://pypi.org/project/{self.pypi_name}/"
return None
def get_pypi_json_uri(self):
if self.pypi_name and len(self.pypi_name):
return f"https://pypi.org/pypi/{self.pypi_name}/json"
return None
@property
def pypi_name(self):
"""return the pypi name of a package"""
if not self.pypi_url.strip():
return ""
name = self.pypi_url
if "http://pypi.python.org/pypi/" in name:
name = name.replace("http://pypi.python.org/pypi/", "")
if "https://pypi.python.org/pypi/" in name:
name = name.replace("https://pypi.python.org/pypi/", "")
if "https://pypi.org/project/" in name:
name = name.replace("https://pypi.org/project/", "")
name = name.strip("/")
return name
def last_updated(self):
cache_name = self.cache_namer(self.last_updated)
last_commit = cache.get(cache_name)
if last_commit is not None:
return last_commit
try:
last_commit = self.commit_set.latest("commit_date").commit_date
if last_commit:
cache.set(cache_name, last_commit)
return last_commit
except ObjectDoesNotExist:
last_commit = None
return last_commit
@property
def repo(self):
return get_repo_for_repo_url(self.repo_url)
@property
def active_examples(self):
return self.packageexample_set.filter(active=True)
@property
def license_latest(self):
try:
return self.version_set.latest().license
except Version.DoesNotExist:
return "UNKNOWN"
def grids(self):
return (x.grid for x in self.gridpackage_set.all())
def repo_name(self):
return re.sub(self.repo.url_regex, "", self.repo_url)
def repo_info(self):
return dict(
username=self.repo_name().split("/")[0],
repo_name=self.repo_name().split("/")[1],
)
def participant_list(self):
return self.participants.split(",")
def get_usage_count(self):
return self.usage.count()
def commits_over_52(self):
cache_name = self.cache_namer(self.commits_over_52)
value = cache.get(cache_name)
if value is not None:
return value
now = datetime.now()
commits = self.commit_set.filter(
commit_date__gt=now - timedelta(weeks=52),
).values_list("commit_date", flat=True)
weeks = [0] * 52
for cdate in commits:
age_weeks = (now - cdate).days // 7
if age_weeks < 52:
weeks[age_weeks] += 1
value = ",".join(map(str, reversed(weeks)))
cache.set(cache_name, value)
return value
def fetch_pypi_data(self, *args, **kwargs):
# Get the releases from pypi
if self.pypi_url and len(self.pypi_url.strip()):
total_downloads = 0
response = requests.get(self.get_pypi_json_uri())
if settings.DEBUG:
if response.status_code not in (200, 404):
print("BOOM!")
print((self, response.status_code))
print(response.content)
return False
if response.status_code == 404:
if settings.DEBUG:
print("BOOM! this package probably does not exist on pypi")
print((self, response.status_code))
# If we get a 404, we can stop checking this url...
# self.pypi_url = ""
# self.save()
return False
release = json.loads(response.content)
info = release["info"]
version, created = Version.objects.get_or_create(
package=self, number=info["version"]
)
if "classifiers" in info and len(info["classifiers"]):
self.pypi_classifiers = info["classifiers"]
if "requires_python" in info and info["requires_python"]:
self.pypi_requires_python = info["requires_python"]
if self.pypi_requires_python and "3" in SpecifierSet(
self.pypi_requires_python
):
self.supports_python3 = True
# do we have a license set?
if "license" in info and len(info["license"]):
licenses = [info["license"]]
for classifier in info["classifiers"]:
if classifier.startswith("License"):
licenses.append(classifier.split("::")[-1].strip())
break
version.licenses = licenses
version.license = licenses[0]
if self.pypi_license != version.license:
self.pypi_license = version.license
if self.pypi_licenses != version.licenses:
self.pypi_licenses = version.licenses
# do we have a license set in our classifier?
elif "classifiers" in info and len(info["classifiers"]):
licenses = []
for classifier in info["classifiers"]:
if classifier.startswith("License"):
licenses.append(classifier.split("::")[-1].strip())
break
version.licenses = licenses
version.license = licenses[0]
if self.pypi_license != version.license:
self.pypi_license = version.license
if self.pypi_licenses != version.licenses:
self.pypi_licenses = version.licenses
# version stuff
try:
url_data = release["urls"][0]
version.downloads = url_data["downloads"]
version.upload_time = url_data["upload_time"]
except IndexError:
# Not a real release so we just guess the upload_time.
version.upload_time = version.created
for classifier in info["classifiers"]:
if classifier.startswith("Development Status"):
version.development_status = status_choices_switch(classifier)
break
for classifier in info["classifiers"]:
if classifier.startswith("Programming Language :: Python :: 3"):
version.supports_python3 = True
if not self.supports_python3:
self.supports_python3 = True
break
version.save()
self.pypi_downloads = total_downloads
# Calculate total downloads
return True
return False
def fetch_metadata(self, fetch_pypi=True, fetch_repo=True):
if fetch_pypi:
self.fetch_pypi_data()
if fetch_repo:
self.repo.fetch_metadata(self)
signal_fetch_latest_metadata.send(sender=self)
self.save()
def grid_clear_detail_template_cache(self):
for grid in self.grids():
grid.clear_detail_template_cache()
def calculate_score(self):
"""
Scores a penalty of 10% of the stars for each 3 months the package is not updated;
+ a penalty of -30% of the stars if it does not support python 3.
So an abandoned packaged for 2 years would lose 80% of its stars.
"""
delta = relativedelta.relativedelta(now(), self.last_updated())
delta_months = (delta.years * 12) + delta.months
last_updated_penalty = math.modf(delta_months / 3)[1] * self.repo_watchers / 10
last_version = self.version_set.last()
is_python_3 = last_version and last_version.supports_python3
# TODO: Address this better
python_3_penalty = (
0 if is_python_3 else min([self.repo_watchers * 30 / 100, 1000])
)
# penalty for docs maybe
return self.repo_watchers - last_updated_penalty - python_3_penalty
def save(self, *args, **kwargs):
if not self.repo_description:
self.repo_description = ""
self.grid_clear_detail_template_cache()
self.score = self.calculate_score()
super().save(*args, **kwargs)
def fetch_commits(self):
self.repo.fetch_commits(self)
def pypi_version(self):
cache_name = self.cache_namer(self.pypi_version)
version = cache.get(cache_name)
if version is not None:
return version
version = get_pypi_version(self)
cache.set(cache_name, version)
return version
def last_released(self):
cache_name = self.cache_namer(self.last_released)
version = cache.get(cache_name)
if version is not None:
return version
version = get_version(self)
cache.set(cache_name, version)
return version
@property
def development_status(self):
"""Gets data needed in API v2 calls"""
release = self.last_released()
if release:
return self.last_released().pretty_status
return None
@property
def pypi_ancient(self):
release = self.last_released()
if release:
return release.upload_time < datetime.now() - timedelta(365)
return None
@property
def no_development(self):
commit_date = self.last_updated()
if commit_date is not None:
return commit_date < datetime.now() - timedelta(365)
return None
class Meta:
ordering = ["title"]
get_latest_by = "id"
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("package", args=[self.slug])
@property
def last_commit(self):
return self.commit_set.latest()
def commits_over_52_listed(self):
return [int(x) for x in self.commits_over_52().split(",")]
class PackageExample(BaseModel):
package = models.ForeignKey(Package, on_delete=models.CASCADE)
title = models.CharField(_("Title"), max_length=100)
url = models.URLField(_("URL"))
active = models.BooleanField(
_("Active"),
default=True,
help_text="Moderators have to approve links before they are provided",
)
created_by = models.ForeignKey(
User, blank=True, null=True, on_delete=models.SET_NULL
)
class Meta:
ordering = ["title"]
def __str__(self):
return self.title
@property
def pretty_url(self):
if self.url.startswith("http"):
return self.url
return "http://" + self.url
class Commit(BaseModel):
package = models.ForeignKey(Package, on_delete=models.CASCADE)
commit_date = models.DateTimeField(_("Commit Date"))
commit_hash = models.CharField(
_("Commit Hash"),
help_text="Example: Git sha or SVN commit id",
max_length=150,
blank=True,
default="",
)
class Meta:
ordering = ["-commit_date"]
get_latest_by = "commit_date"
def __str__(self):
return f"Commit for '{self.package.title}' on {self.commit_date}"
def | (self, *args, **kwargs):
# reset the last_updated and commits_over_52 caches on the package
package = self.package
cache.delete(package.cache_namer(self.package.last_updated))
cache.delete(package.cache_namer(package.commits_over_52))
self.package.last_updated()
super().save(*args, **kwargs)
class VersionManager(models.Manager):
def by_version(self, visible=False, *args, **kwargs):
qs = self.get_queryset().filter(*args, **kwargs)
if visible:
qs = qs.filter(hidden=False)
def generate_valid_versions(qs):
for item in qs:
v = LooseVersion(item.number)
comparable = True
for elem in v.version:
if isinstance(elem, str):
comparable = False
if comparable:
yield item
return sorted(
# list(qs), # TODO: Add back...
list(
generate_valid_versions(qs)
), # this would remove ["2.1.0.beta3", "2.1.0.rc1",]
key=lambda v: LooseVersion(v.number),
)
def by_version_not_hidden(self, *args, **kwargs):
return list(reversed(self.by_version(visible=True, *args, **kwargs)))
class Version(BaseModel):
package = models.ForeignKey(
Package, blank=True, null=True, on_delete=models.CASCADE
)
number = models.CharField(_("Version"), max_length=100, default="", blank="")
downloads = models.IntegerField(_("downloads"), default=0)
license = models.CharField(_("license"), max_length=100, null=True, blank=True)
licenses = ArrayField(
models.CharField(max_length=100, verbose_name=_("licenses")),
null=True,
blank=True,
help_text="Comma separated list of licenses.",
)
hidden = models.BooleanField(_("hidden"), default=False)
upload_time = models.DateTimeField(
_("upload_time"),
help_text=_("When this was uploaded to PyPI"),
blank=True,
null=True,
)
development_status = models.IntegerField(
_("Development Status"), choices=STATUS_CHOICES, default=0
)
supports_python3 = models.BooleanField(_("Supports Python 3"), default=False)
objects = VersionManager()
class Meta:
get_latest_by = "upload_time"
ordering = ["-upload_time"]
@property
def pretty_license(self):
return self.license.replace("License", "").replace("license", "")
@property
def pretty_status(self):
return self.get_development_status_display().split(" ")[-1]
def save(self, *args, **kwargs):
self.license = normalize_license(self.license)
# reset the latest_version cache on the package
cache_name = self.package.cache_namer(self.package.last_released)
cache.delete(cache_name)
get_version(self.package)
# reset the pypi_version cache on the package
cache_name = self.package.cache_namer(self.package.pypi_version)
cache.delete(cache_name)
get_pypi_version(self.package)
super().save(*args, **kwargs)
def __str__(self):
return f"{self.package.title}: {self.number}"
| save |
feature_extraction_vit.py | # coding=utf-8
# Copyright 2021 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Feature extractor class for ViT."""
from typing import List, Optional, Union
import numpy as np
from PIL import Image
from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin
from ...file_utils import TensorType
from ...image_utils import IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD, ImageFeatureExtractionMixin, is_torch_tensor
from ...utils import logging
logger = logging.get_logger(__name__)
class ViTFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin):
| r"""
Constructs a ViT feature extractor.
This feature extractor inherits from :class:`~transformers.FeatureExtractionMixin` which contains most of the main
methods. Users should refer to this superclass for more information regarding those methods.
Args:
do_resize (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether to resize the input to a certain :obj:`size`.
size (:obj:`int` or :obj:`Tuple(int)`, `optional`, defaults to 224):
Resize the input to the given size. If a tuple is provided, it should be (width, height). If only an
integer is provided, then the input will be resized to (size, size). Only has an effect if :obj:`do_resize`
is set to :obj:`True`.
resample (:obj:`int`, `optional`, defaults to :obj:`PIL.Image.BILINEAR`):
An optional resampling filter. This can be one of :obj:`PIL.Image.NEAREST`, :obj:`PIL.Image.BOX`,
:obj:`PIL.Image.BILINEAR`, :obj:`PIL.Image.HAMMING`, :obj:`PIL.Image.BICUBIC` or :obj:`PIL.Image.LANCZOS`.
Only has an effect if :obj:`do_resize` is set to :obj:`True`.
do_normalize (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not to normalize the input with mean and standard deviation.
image_mean (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`):
The sequence of means for each channel, to be used when normalizing images.
image_std (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`):
The sequence of standard deviations for each channel, to be used when normalizing images.
"""
model_input_names = ["pixel_values"]
def __init__(
self,
do_resize=True,
size=224,
resample=Image.BILINEAR,
do_normalize=True,
image_mean=None,
image_std=None,
**kwargs
):
super().__init__(**kwargs)
self.do_resize = do_resize
self.size = size
self.resample = resample
self.do_normalize = do_normalize
self.image_mean = image_mean if image_mean is not None else IMAGENET_STANDARD_MEAN
self.image_std = image_std if image_std is not None else IMAGENET_STANDARD_STD
def __call__(
self,
images: Union[
Image.Image, np.ndarray, "torch.Tensor", List[Image.Image], List[np.ndarray], List["torch.Tensor"] # noqa
],
return_tensors: Optional[Union[str, TensorType]] = None,
**kwargs
) -> BatchFeature:
"""
Main method to prepare for the model one or several image(s).
.. warning::
NumPy arrays and PyTorch tensors are converted to PIL images when resizing, so the most efficient is to pass
PIL images.
Args:
images (:obj:`PIL.Image.Image`, :obj:`np.ndarray`, :obj:`torch.Tensor`, :obj:`List[PIL.Image.Image]`, :obj:`List[np.ndarray]`, :obj:`List[torch.Tensor]`):
The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch
tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a
number of channels, H and W are image height and width.
return_tensors (:obj:`str` or :class:`~transformers.file_utils.TensorType`, `optional`, defaults to :obj:`'np'`):
If set, will return tensors of a particular framework. Acceptable values are:
* :obj:`'tf'`: Return TensorFlow :obj:`tf.constant` objects.
* :obj:`'pt'`: Return PyTorch :obj:`torch.Tensor` objects.
* :obj:`'np'`: Return NumPy :obj:`np.ndarray` objects.
* :obj:`'jax'`: Return JAX :obj:`jnp.ndarray` objects.
Returns:
:class:`~transformers.BatchFeature`: A :class:`~transformers.BatchFeature` with the following fields:
- **pixel_values** -- Pixel values to be fed to a model, of shape (batch_size, num_channels, height,
width).
"""
# Input type checking for clearer error
valid_images = False
# Check that images has a valid type
if isinstance(images, (Image.Image, np.ndarray)) or is_torch_tensor(images):
valid_images = True
elif isinstance(images, (list, tuple)):
if len(images) == 0 or isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]):
valid_images = True
if not valid_images:
raise ValueError(
"Images must of type `PIL.Image.Image`, `np.ndarray` or `torch.Tensor` (single example),"
"`List[PIL.Image.Image]`, `List[np.ndarray]` or `List[torch.Tensor]` (batch of examples)."
)
is_batched = bool(
isinstance(images, (list, tuple))
and (isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]))
)
if not is_batched:
images = [images]
# transformations (resizing + normalization)
if self.do_resize and self.size is not None:
images = [self.resize(image=image, size=self.size, resample=self.resample) for image in images]
if self.do_normalize:
images = [self.normalize(image=image, mean=self.image_mean, std=self.image_std) for image in images]
# return as BatchFeature
data = {"pixel_values": images}
encoded_inputs = BatchFeature(data=data, tensor_type=return_tensors)
return encoded_inputs |
|
lib.rs | //! Provides utilities for rolling dice.
//!
//! # Examples
//!
//! ## Simple Usage
//!
//! ```
//! use one_d_six::quickroll;
//!
//! if quickroll::<u16>("1d2") == 1 {
//! println!("Heads!");
//! } else {
//! println!("Tails!");
//! }
//! ```
//!
//! ## Adding Sets of Dice Together
//!
//! ```
//! use one_d_six::Dice;
//!
//! // 3d6
//! let set_1 = Dice::new(3, 6);
//! // 2d4
//! let set_2: Dice = "2d4".parse().unwrap();
//!
//! // 3d6 + 2d4
//! let dice = set_1 + set_2;
//!
//! // Each set of dice starts pre-rolled
//! let roll = dice.total();
//!
//! println!("Result of 3d6 + 2d4 roll: {}", roll);
//! ```
//!
//! ## Getting Dice as String
//! ### Simple String
//!
//! ```
//! use one_d_six::Dice;
//!
//!
//! let dice: Dice = Dice::new(3, 6);
//! println!("3d6: {}", dice);
//! ```
//!
//! ### Complex String
//!
//! ```
//! use one_d_six::Dice;
//!
//!
//! // Will look like "1 2 3"
//! let dice = Dice::new(3, 6);
//! println!("3d6: {:?}", dice);
//! ```
use std::str::FromStr;
pub use dice::*;
pub use dice_total::*;
pub use die::*;
pub use rollable::*;
mod dice;
mod dice_total;
mod die;
mod rollable;
/// Attempts to roll dice based on a *1d6* style string.
///
/// # Example
///
/// ```
/// use one_d_six::try_quickroll;
///
/// if let Ok(roll) = try_quickroll::<u32>("1d6") {
/// assert!(roll >= 1);
/// assert!(roll <= 6);
/// } else {
/// unreachable!();
/// }
/// ```
pub fn try_quickroll<T: Rollable>(dice_format: &str) -> Result<T, String>
where
T: DiceTotal<T>,
T: FromStr,
{
let dice: Dice<T> = dice_format.parse()?;
Ok(dice.total())
}
/// Rolls dice based on a *1d6* style string.
///
/// # Example
///
/// ```
/// use one_d_six::quickroll;
///
/// let coin_flip: u8 = quickroll("1d2");
///
/// assert!(coin_flip == 1 || coin_flip == 2);
/// ```
///
/// # Panics
///
/// Panics if `dice_format` is in an improper format.
pub fn quickroll<T: Rollable>(dice_format: &str) -> T
where
T: DiceTotal<T>,
T: FromStr,
| {
let dice: Dice<T> = dice_format.parse().unwrap();
dice.total()
} |
|
0001_initial.py | # Generated by Django 3.2.5 on 2021-07-22 21:15
import api.models.user
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
| initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254, unique=True)),
('account_balance', models.IntegerField(default=0)),
('otp', models.CharField(default=api.models.user.create_otp, max_length=6)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('is_superuser', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'User',
'verbose_name_plural': 'Users',
},
managers=[
('objects', api.models.user.UserManager()),
],
),
] |
|
Aula19ex94UneDicLista.py | #CADASTRO DE PESSOAS em dicionário - AULA 19 EXERCÍCIO 94
#dados das pessos: nome, sexo e idade
#todos os dicionários numa lista
#Informar quantos cadastrados, média de idade, lista de mulheres e nomes de pessoas de idade acima da média
#
pessoa = dict()
grupo = list()
somaidades = media = 0
while True:
pessoa.clear() #limnpeza do dicionário senão dá erro nos laços
pessoa["nome"] = str(input('Nome: ')).strip()
pessoa["sexo"] = str(input('Sexo: [M/F] ')).strip().upper()
pessoa["idade"] = int(input('Idade: '))
grupo.append(pessoa.copy()) #cópia do dicionário para lista
cont = str(input('Continuar? [S/N] ')).strip().lower()
somaidades += pessoa["idade"]
if cont == 'n':
break
media = somaidades/len(grupo)
print('-'*50)
print(f'A) Pessoas cadastradas: {len(grupo)}')
print(f'B) Média de idade: {media:.2f} anos')
print(f'C) Mulheres cadastradas: ', end='')
for i in range(len(grupo)): | print()
print(f'D) Acima da média: ', end='')
for i in range(len(grupo)):
if grupo[i]["idade"] > media:
print(f'{grupo[i]["nome"]} {grupo[i]["idade"]} anos ', end='')
print()
print('-'*50) | if grupo[i]["sexo"] == 'F':
print(f'{grupo[i]["nome"]} ', end='') |
languageService.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import {NotificationType, RequestType} from 'vscode-languageclient';
// ------------------------------- < IntelliSense Ready Event > ------------------------------------
/**
* Event sent when the language service is finished updating after a connection
*/
export namespace IntelliSenseReadyNotification {
export const type = new NotificationType<IntelliSenseReadyParams, void>('textDocument/intelliSenseReady');
}
/**
* Update event parameters
*/
export class | {
/**
* URI identifying the text document
*/
public ownerUri: string;
}
/**
* Notification sent when the an IntelliSense cache invalidation is requested
*/
export namespace RebuildIntelliSenseNotification {
export const type = new NotificationType<RebuildIntelliSenseParams, void>('textDocument/rebuildIntelliSense');
}
/**
* Rebuild IntelliSense notification parameters
*/
export class RebuildIntelliSenseParams {
/**
* URI identifying the text document
*/
public ownerUri: string;
}
// ------------------------------- </ IntelliSense Ready Event > ----------------------------------
// ------------------------------- < Status Event > ------------------------------------
/**
* Event sent when the language service send a status change event
*/
export namespace StatusChangedNotification {
export const type = new NotificationType<StatusChangeParams, void>('textDocument/statusChanged');
}
/**
* Update event parameters
*/
export class StatusChangeParams {
/**
* URI identifying the text document
*/
public ownerUri: string;
/**
* The new status of the document
*/
public status: string;
}
// ------------------------------- </ Status Sent Event > ----------------------------------
// ------------------------------- < Language Flavor Changed Event > ------------------------------------
/**
* Language flavor change event parameters
*/
export class DidChangeLanguageFlavorParams {
/**
* URI identifying the text document
*/
public uri: string;
/**
* text document's language
*/
public language: string;
/**
* Sub-flavor for the langauge, e.g. 'MSSQL' for a SQL Server connection or 'Other' for any other SQL flavor
*/
public flavor: string;
}
/**
* Notification sent when the language flavor is changed
*/
export namespace LanguageFlavorChangedNotification {
export const type = new NotificationType<DidChangeLanguageFlavorParams, void>('connection/languageflavorchanged');
}
// ------------------------------- < Load Completion Extension Request > ------------------------------------
/**
* Completion extension load parameters
*/
export class CompletionExtensionParams {
/// <summary>
/// Absolute path for the assembly containing the completion extension
/// </summary>
public assemblyPath: string;
/// <summary>
/// The type name for the completion extension
/// </summary>
public typeName: string;
/// <summary>
/// Property bag for initializing the completion extension
/// </summary>
public properties: {};
}
export namespace CompletionExtLoadRequest {
export const type = new RequestType<CompletionExtensionParams, boolean, void, void>('completion/extLoad');
}
| IntelliSenseReadyParams |
uint32.ts | const UINT32_SIZE = 32;
const UINT32_MAX = Math.pow(2, UINT32_SIZE);
/**
* Converts a number to a uint32.
* @param value The number to convert
* @returns The uint32 value
*/
export const uint32 = (value: number): number => value >>> 0;
/**
* Converts a uint32 to a number between [0,1).
* @param value The uint32 value to convert
* @returns The float53 value
*/
export const float53 = (value: number): number => value / UINT32_MAX;
/**
* Rotates a value left by the number of bits.
* @param value The value to rotate
* @param bits The number of bits to rotate
* @returns The rotated value
*/ | export const rotateBits = (value: number, bits: number): number =>
(value << bits) | (value >>> (UINT32_SIZE - bits)); | |
main.go | package main
import (
"fmt"
"{{ cookiecutter.go_module_path.strip('/') }}/src"
)
// title contains the name of the project
const title = "{{ cookiecutter.project_name.strip() }}"
/*
ProjectName returns the value of `title` string
*/
func ProjectName() string {
return title
} |
func main() {
fmt.Printf("Running project: %s\n", src.ProjectName())
} |
|
views.py | from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.generics import ListAPIView
from rest_framework.permissions import AllowAny, IsAdminUser
from django_filters.rest_framework import DjangoFilterBackend
from .models import ServiceAgentBus
from .serializers import ServiceAgentSerializer
class ServiceAgentViewSetBase(ListAPIView):
serializer_class = ServiceAgentSerializer
filter_backends = [
DjangoFilterBackend,
OrderingFilter, |
search_fields = ['label']
ordering_fields = [
'published_at',
'created_at',
]
class ServiceAgentViewSet(ServiceAgentViewSetBase):
'''
List all the service agents.
This endpoint is accessible only by admin users.
'''
permission_classes = [IsAdminUser]
queryset = ServiceAgentBus.objects.all() | SearchFilter,
] |
sensors.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from pecan import abort
from mongoengine import ValidationError
from st2common import log as logging
from st2common.models.api.base import jsexpose
from st2common.persistence.sensor import SensorType
from st2common.models.api.sensor import SensorTypeAPI
from st2common.exceptions.apivalidation import ValueValidationException
from st2common.validators.api.misc import validate_not_part_of_system_pack
from st2api.controllers import resource
from st2common.rbac.types import PermissionType
from st2common.rbac.decorators import request_user_has_permission
from st2common.rbac.decorators import request_user_has_resource_db_permission
http_client = six.moves.http_client
LOG = logging.getLogger(__name__)
class SensorTypeController(resource.ContentPackResourceController):
model = SensorTypeAPI
access = SensorType
supported_filters = {
'name': 'name',
'pack': 'pack'
}
options = {
'sort': ['pack', 'name']
}
include_reference = True
@request_user_has_permission(permission_type=PermissionType.SENSOR_LIST)
@jsexpose()
def get_all(self, **kwargs):
return super(SensorTypeController, self)._get_all(**kwargs)
@request_user_has_resource_db_permission(permission_type=PermissionType.SENSOR_VIEW)
@jsexpose(arg_types=[str])
def get_one(self, ref_or_id):
return super(SensorTypeController, self)._get_one(ref_or_id)
@request_user_has_resource_db_permission(permission_type=PermissionType.SENSOR_MODIFY)
@jsexpose(arg_types=[str], body_cls=SensorTypeAPI)
def put(self, ref_or_id, sensor_type):
# Note: Right now this function only supports updating of "enabled"
# attribute on the SensorType model.
# The reason for that is that SensorTypeAPI.to_model right now only
# knows how to work with sensor type definitions from YAML files.
| try:
sensor_type_db = self._get_by_ref_or_id(ref_or_id=ref_or_id)
except Exception as e:
LOG.exception(e.message)
abort(http_client.NOT_FOUND, e.message)
return
sensor_type_id = sensor_type_db.id
try:
validate_not_part_of_system_pack(sensor_type_db)
except ValueValidationException as e:
abort(http_client.BAD_REQUEST, str(e))
return
if not getattr(sensor_type, 'pack', None):
sensor_type.pack = sensor_type_db.pack
try:
old_sensor_type_db = sensor_type_db
sensor_type_db.id = sensor_type_id
sensor_type_db.enabled = getattr(sensor_type, 'enabled', False)
sensor_type_db = SensorType.add_or_update(sensor_type_db)
except (ValidationError, ValueError) as e:
LOG.exception('Unable to update sensor_type data=%s', sensor_type)
abort(http_client.BAD_REQUEST, str(e))
return
extra = {
'old_sensor_type_db': old_sensor_type_db,
'new_sensor_type_db': sensor_type_db
}
LOG.audit('Sensor updated. Sensor.id=%s.' % (sensor_type_db.id), extra=extra)
sensor_type_api = SensorTypeAPI.from_model(sensor_type_db)
return sensor_type_api |
|
mod.rs | //! # OpenTelemetry Metrics API
use std::result;
use std::sync::PoisonError;
use thiserror::Error;
mod async_instrument;
mod config;
mod counter;
mod descriptor;
mod kind;
mod meter;
pub mod noop;
mod number;
mod observer;
pub mod registry;
pub mod sdk_api;
mod sync_instrument;
mod up_down_counter;
mod value_recorder;
use crate::sdk::export::ExportError;
pub use async_instrument::{AsyncRunner, BatchObserverCallback, Observation, ObserverResult};
pub use config::InstrumentConfig;
pub use counter::{BoundCounter, Counter, CounterBuilder};
pub use descriptor::Descriptor;
pub use kind::InstrumentKind;
pub use meter::{Meter, MeterProvider};
pub use number::{AtomicNumber, Number, NumberKind};
pub use observer::{
BatchObserver, SumObserver, SumObserverBuilder, UpDownSumObserver, UpDownSumObserverBuilder,
ValueObserver, ValueObserverBuilder,
};
pub use sync_instrument::Measurement;
pub use up_down_counter::{BoundUpDownCounter, UpDownCounter, UpDownCounterBuilder};
pub use value_recorder::{BoundValueRecorder, ValueRecorder, ValueRecorderBuilder};
/// A specialized `Result` type for metric operations.
pub type Result<T> = result::Result<T, MetricsError>;
/// Errors returned by the metrics API.
#[derive(Error, Debug)]
#[non_exhaustive]
pub enum MetricsError {
/// Other errors not covered by specific cases.
#[error("Metrics error: {0}")]
Other(String),
/// Errors when requesting quantiles out of the 0-1 range.
#[error("The requested quantile is out of range")]
InvalidQuantile,
/// Errors when recording nan values.
#[error("NaN value is an invalid input")]
NaNInput,
/// Errors when recording negative values in monotonic sums.
#[error("Negative value is out of range for this instrument")]
NegativeInput,
/// Errors when merging aggregators of incompatible types.
#[error("Inconsistent aggregator types: {0}")]
InconsistentAggregator(String),
/// Errors when requesting data when no data has been collected
#[error("No data collected by this aggregator")]
NoDataCollected,
/// Errors when registering to instruments with the same name and kind
#[error("A metric was already registered by this name with another kind or number type: {0}")]
MetricKindMismatch(String),
/// Errors when processor logic is incorrect
#[error("Inconsistent processor state")]
InconsistentState,
/// Errors when aggregator cannot subtract
#[error("Aggregator does not subtract")]
NoSubtraction,
/// Fail to export metrics
#[error("Metrics exporter {} failed with {0}", .0.exporter_name())]
ExportErr(Box<dyn ExportError>),
}
impl<T: ExportError> From<T> for MetricsError {
fn | (err: T) -> Self {
MetricsError::ExportErr(Box::new(err))
}
}
impl<T> From<PoisonError<T>> for MetricsError {
fn from(err: PoisonError<T>) -> Self {
MetricsError::Other(err.to_string())
}
}
| from |
statement_create_database.rs | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::sync::Arc;
use common_exception::Result;
use common_meta_app::schema::DatabaseMeta;
use common_planners::CreateDatabasePlan;
use common_planners::PlanNode;
use common_tracing::tracing;
use sqlparser::ast::ObjectName;
use crate::sessions::QueryContext;
use crate::sql::statements::resolve_database;
use crate::sql::statements::AnalyzableStatement;
use crate::sql::statements::AnalyzedResult;
#[derive(Debug, Clone, PartialEq)]
pub struct DfCreateDatabase {
pub if_not_exists: bool,
pub name: ObjectName,
pub engine: String,
pub engine_options: BTreeMap<String, String>,
pub options: BTreeMap<String, String>,
}
#[async_trait::async_trait]
impl AnalyzableStatement for DfCreateDatabase {
#[tracing::instrument(level = "debug", skip(self, ctx), fields(ctx.id = ctx.get_id().as_str()))]
async fn analyze(&self, ctx: Arc<QueryContext>) -> Result<AnalyzedResult> {
let tenant = ctx.get_tenant();
let (catalog, db) = resolve_database(&ctx, &self.name, "CREATE DATABASE")?;
let if_not_exists = self.if_not_exists;
let meta = self.database_meta()?;
Ok(AnalyzedResult::SimpleQuery(Box::new(
PlanNode::CreateDatabase(CreateDatabasePlan {
tenant,
if_not_exists,
catalog,
db,
meta,
}),
))) | }
impl DfCreateDatabase {
fn database_meta(&self) -> Result<DatabaseMeta> {
Ok(DatabaseMeta {
engine: self.engine.clone(),
engine_options: self.engine_options.clone(),
options: self.options.clone(),
..Default::default()
})
}
} | } |
main.rs | //! # Enc_File
//!
//! Encrypt / decrypt files or calculate hash from the command line.
//! Warning: This crate hasn't been audited or reviewed in any sense. I created it to easily encrypt und decrypt non-important files which won't cause harm if known by third parties. Don't use for anything important, use VeraCrypt or similar instead.
//!
//! Breaking change in Version 0.3: Changed input of some functions. To encrypt/decrypt and hash use e.g. "encrypt_chacha(readfile(example.file).unwrap(), key).unwrap()". Using a keymap to work with several keys conveniently. You can import your old keys, using "Add key" -> "manually".
//!
//! Breaking change in Version 0.2: Using XChaCha20Poly1305 as default encryption/decryption. AES is still available using encrypt_aes or decrypt_aes to maintain backwards compability.
//!
//! Uses XChaCha20Poly1305 (https://docs.rs/chacha20poly1305) or AES-GCM-SIV (https://docs.rs/aes-gcm-siv) for encryption, bincode (https://docs.rs/bincode) for encoding and BLAKE3 (https://docs.rs/blake3) or SHA256 / SHA512 (https://docs.rs/sha2) for hashing.
//!
//! Encrypted files are (and have to be) stored as .crpt.
//!
//! Panics at errors making execution impossible.
//!
//! Can be used as library and a binary target. Install via cargo install enc_file
//! # Examples
//!
//! ```
//! use enc_file::{encrypt_chacha, decrypt_chacha, read_file};
//!
//! //Plaintext to encrypt
//! let text = b"This a test";
//! //Provide key. Key will normally be chosen from keymap and provided to the encrypt_chacha() function
//! let key: &str = "an example very very secret key.";
//! //Convert text to Vec<u8>
//! let text_vec = text.to_vec();
//!
//! //Encrypt text
//! //Ciphertext stores the len() of encrypted content, the nonce and the actual ciphertext using bincode
//! let ciphertext = encrypt_chacha(text_vec, key).unwrap(); //encrypt vec<u8>, returns result(Vec<u8>)
//! //let ciphertext = encrypt_chacha(read_file(example.file).unwrap(), key).unwrap(); //read a file as Vec<u8> and then encrypt
//! //Check that plaintext != ciphertext
//! assert_ne!(&ciphertext, &text);
//!
//! //Decrypt ciphertext to plaintext
//! let plaintext = decrypt_chacha(ciphertext, key).unwrap();
//! //Check that text == plaintext
//! assert_eq!(format!("{:?}", text), format!("{:?}", plaintext));
//! ```
//!
//! ```
//!use enc_file::{get_blake3_hash};
//!
//!let test = b"Calculating the BLAKE3 Hash of this text";
//!let test_vec = test.to_vec(); //Convert text to Vec<u8>
//!let hash1 = get_blake3_hash(test_vec.clone()).unwrap();
//!let hash2 = get_blake3_hash(test_vec).unwrap();
//!assert_eq!(hash1, hash2); //Make sure hash1 == hash2
//!let test2 = b"Calculating the BLAKE3 Hash of this text."; //"." added at the end
//!let test2_vec = test2.to_vec();
//!let hash3 = get_blake3_hash(test2_vec).unwrap();
//!assert_ne!(hash1, hash3); //check that the added "." changes the hash
//! ```
//!
//! See https://github.com/LazyEmpiricist/enc_file
//!
// Warning: Don't use for anything important! This crate hasn't been audited or reviewed in any sense. I created it to easily encrypt und decrypt non-important files which won't cause harm if known by third parties.
//
// Breaking change in Version 0.3: Using a keymap to work with several keys conveniently. You can import your old keys, using "Add key" and choose "manually".
//
// Breaking change in Version 0.2: Using XChaCha20Poly1305 as default encryption/decryption. AES is still available using encrypt_aes or decrypt_aes to maintain backwards compability.
//
// Uses XChaCha20Poly1305 (https://docs.rs/chacha20poly1305) or AES-GCM-SIV (https://docs.rs/aes-gcm-siv) for cryptography, bincode (https://docs.rs/bincode) for encoding and BLAKE3 (https://docs.rs/blake3) or SHA256 / SHA512 (https://docs.rs/sha2) for hashing.
//
// Generate a new key.file on first run (you can also manually add keys).
//
// Encrypting "example.file" will create a new (encrypted) file "example.file.crpt" in the same directory.
//
// Decrypting "example.file.crpt" will create a new (decrypted) file "example.file" in the same directory.
//
// Warning: Both encrypt and decrypt override existing files!
//
//
// # Examples
//
// Encrypt/decrypt using XChaCha20Poly1305 and random nonce
// ```
// use enc_file::{encrypt_chacha, decrypt_chacha, read_file};
//
// //Plaintext to encrypt
// let text = b"This a test";
// //Provide key. Key will normally be chosen from keymap and provided to the encrypt_chacha() function
// let key: &str = "an example very very secret key.";
// //Convert text to Vec<u8>
// let text_vec = text.to_vec();
//
// //Encrypt text
// let ciphertext = encrypt_chacha(text_vec, key).unwrap(); //encrypt vec<u8>, returns result(Vec<u8>)
// //let ciphertext = encrypt_chacha(read_file(example.file).unwrap(), key).unwrap(); //read a file as Vec<u8> and then encrypt
// //Check that plaintext != ciphertext
// assert_ne!(&ciphertext, &text);
//
// //Decrypt ciphertext to plaintext
// let plaintext = decrypt_chacha(ciphertext, key).unwrap();
// //Check that text == plaintext
// assert_eq!(format!("{:?}", text), format!("{:?}", plaintext));
// ```
//
// Calculate Blake3 Hash
// ```
// use enc_file::{get_blake3_hash};
//
// let test = b"Calculating the BLAKE3 Hash of this text";
// let test_vec = test.to_vec(); //Convert text to Vec<u8>
// let hash1 = get_blake3_hash(test_vec.clone()).unwrap();
// let hash2 = get_blake3_hash(test_vec).unwrap();
// assert_eq!(hash1, hash2); //Make sure hash1 == hash2
// ```
use enc_file::{
add_key, choose_hashing_function, create_new_keyfile, decrypt_file, encrypt_file,
get_blake3_hash, get_input_string, get_sha256_hash, get_sha512_hash, read_file, read_keyfile,
remove_key,
};
use std::env;
use std::path::{Path, PathBuf}; | let args: Vec<String> = env::args().collect();
//enable use of hashing functions via command line
if args.len() >= 2 {
let path = PathBuf::from(&args[2]);
if path.is_file() {
if &args[1] == "hash" {
println!("Calculating Blake3 Hash for {:?}", &path);
let hash = get_blake3_hash(read_file(&path)?)?;
println!("Hash: {:?}", hash);
} else if &args[1] == "hash_sha256" {
println!("Calculating SHA256 Hash for {:?}", &path);
let hash = get_sha256_hash(read_file(&path)?)?;
println!("Hash: {:?}", hash);
} else if &args[1] == "hash_sha512" {
println!("Calculating SHA512 Hash for {:?}", &path);
let hash = get_sha512_hash(read_file(&path)?)?;
println!("Hash: {:?}", hash);
} else {
println!("Please enter valid hashing function (see docs)")
}
} else {
println!("Please enter a valid filename")
}
} else {
println!("Please enter the corresponding number to continue:\n1 Add new key\n2 Remove key\n3 Encrypt file using XChaCha20Poly1305\n4 Decrypt file using XChaCha20Poly1305\n5 Encrypt file using AES-256-GCM-SIV\n6 Decrypt file using AES-256-GCM-SIV\n7 Calculate Hash");
//Getting user input
let answer = get_input_string()?;
// Creating a Vec with choices needing a password to compare to user input
let requiring_pw = vec![
"1".to_string(),
"2".to_string(),
"3".to_string(),
"4".to_string(),
"5".to_string(),
"6".to_string(),
];
//check if the operation needs access to the keymap, requiring a password. Hashing can be done without a password.
if requiring_pw.contains(&answer) {
//All functions in this if-block require a password
//Check if there is a key.file in the directory
let (password, keymap_plaintext, new) = if !Path::new("./key.file").exists() {
//No key.file found. Ask if a new one should be created.
create_new_keyfile()?
} else {
//key.file found. Reading and decrypting content
read_keyfile()?
};
if answer == "1" {
//if user just created a new key, no need to ask again for a second key
if !new {
//Adding a new key to keymap
add_key(keymap_plaintext, password)?;
} else {
}
} else if answer == "2" {
//removing a key from keymap
remove_key(keymap_plaintext, password)?;
} else if answer == "3" {
//Encrypt file using ChaCha20Poly1305 with choosen key
encrypt_file(keymap_plaintext, "chacha")?;
} else if answer == "4" {
//Decrypt file ChaCha20Poly1305 with choosen key
decrypt_file(keymap_plaintext, "chacha")?;
} else if answer == "5" {
//Encrypt file using AES256-GCM-SIV with choosen key
encrypt_file(keymap_plaintext, "aes")?;
} else if answer == "6" {
//Decrypt file using AES256-GCM-SIV with choosen key
decrypt_file(keymap_plaintext, "aes")?;
}
//the following function don't need a password (as they don't access keymap)
} else if answer == "7" {
//Get Blake3, SHA256 or SHA512 HASH of file
choose_hashing_function()?;
} else {
//User did not a valid number (between 1 and 7)
println!("Please enter a valid choice")
}
}
Ok(())
} |
fn main() -> Result<(), Box<dyn std::error::Error>> { |
time.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import time
from datetime import datetime
def get_timestamp():
return int(time.time())
def | (timestamp, throttle):
if not timestamp:
return True
elif isinstance(timestamp, datetime):
return (datetime.utcnow() - timestamp).total_seconds() > throttle
else:
return (get_timestamp() - timestamp) > throttle
| throttle_period_expired |
searchindex.js | Search.setIndex({docnames:["indexidioms"],envversion:52,filenames:["indexidioms.rst"],objects:{},objnames:{},objtypes:{},terms:{index:0,modul:0,page:0,search:0},titles:["Welcome to idiomsdoc\u2019s documentation!"],titleterms:{document:0,idiomsdoc:0,indic:0,tabl:0,welcom:0}}) | ||
slack.ts | import * as http from 'http-status-codes';
import { Logging, ServerIpcNode, SlackBot } from '@lbt-mycrt/common';
import * as session from '../auth/session';
import { environmentDao, environmentInviteDao as inviteDao } from '../dao/mycrt-dao';
import { HttpError } from '../http-error';
import * as check from '../middleware/request-validation';
import * as schema from '../request-schema/slack-schema';
import SelfAwareRouter from './self-aware-router';
export default class SlackRouter extends SelfAwareRouter {
public name: string = 'slack';
public urlPrefix: string = '';
constructor(ipcNode: ServerIpcNode) {
super(ipcNode, [
session.loggedInOrForbidden,
]);
}
protected mountRoutes(): void {
const logger = Logging.defaultLogger(__dirname);
this.router.get('/:id(\\d+)/slack', this.handleHttpErrors(async (request, response) => {
const environment = await environmentDao.getEnvironment(request.params.id);
if (!environment) {
throw new HttpError(http.NOT_FOUND, `Environment ${request.params.id} does not exist`);
}
const isUserMember = await inviteDao.getUserMembership(request.user!, environment!);
if (!isUserMember.isMember) {
throw new HttpError(http.UNAUTHORIZED);
}
const slack = await environmentDao.getSlackConfigByEnv(request.params.id);
if (!slack) {
throw new HttpError(http.NOT_FOUND);
}
response.json(slack);
}));
this.router.post('/:id(\\d+)/slack', check.validBody(schema.slackBody),
this.handleHttpErrors(async (request, response) => {
const environment = await environmentDao.getEnvironment(request.params.id);
if (!environment) {
throw new HttpError(http.NOT_FOUND, `Environment ${request.params.id} does not exist`);
}
const isUserMember = await inviteDao.getUserMembership(request.user!, environment!);
if (!isUserMember.isAdmin) {
throw new HttpError(http.UNAUTHORIZED);
}
const slackWithEnv = await environmentDao.getSlackConfigByEnv(request.params.id);
if (slackWithEnv !== null) {
throw new HttpError(http.BAD_REQUEST, "Environment already is configured with slack");
}
const slackConfig = {
channel: request.body.channel,
token: request.body.token,
environmentId: request.params.id,
};
const slack = await environmentDao.makeSlackConfig(slackConfig);
SlackBot.postMessage("Heyo it's lil Bobby bot, nice to meet you! Imma let you know what " +
"goes down with your captures and replays in *" + environment.name + "*.", environment.id!);
response.json(slack);
}));
this.router.put('/:id(\\d+)/slack', check.validBody(schema.slackPutBody),
this.handleHttpErrors(async (request, response) => {
const environment = await environmentDao.getEnvironment(request.params.id); | const isUserMember = await inviteDao.getUserMembership(request.user!, environment!);
if (!isUserMember.isAdmin) {
throw new HttpError(http.UNAUTHORIZED);
}
const slackPut = await environmentDao.editSlackConfig(request.params.id, request.body);
const newSlack = await environmentDao.getSlackConfigByEnv(request.params.id);
response.json(newSlack);
}));
this.router.delete('/:id(\\d+)/slack', this.handleHttpErrors(async (request, response) => {
const environment = await environmentDao.getEnvironment(request.params.id);
if (!environment) {
throw new HttpError(http.NOT_FOUND);
}
const isUserMember = await inviteDao.getUserMembership(request.user!, environment!);
if (!isUserMember.isAdmin) {
throw new HttpError(http.UNAUTHORIZED);
}
const slack = await environmentDao.getSlackConfigByEnv(request.params.id);
if (!slack) {
throw new HttpError(http.NOT_FOUND);
}
const slackDel = await environmentDao.deleteSlackConfig(slack.id!);
response.status(http.OK).end();
}));
}
} | if (!environment) {
throw new HttpError(http.NOT_FOUND, `Environment ${request.params.id} does not exist`);
}
|
multi_subtask.py | from abc import ABC, abstractmethod
import numpy as np
import random
from typing import Callable, Dict, Optional, Tuple, Sequence
from .reward_spaces import Subtask
from ..lux.game import Game
class SubtaskSampler(ABC):
def __init__(self, subtask_constructors: Sequence[Callable[..., Subtask]]):
self.subtask_constructors = subtask_constructors
@abstractmethod
def sample(self, final_rewards: Optional[Tuple[float, float]]) -> Subtask:
pass
# noinspection PyMethodMayBeStatic
def get_info(self) -> Dict[str, np.ndarray]:
return {}
class RandomSampler(SubtaskSampler):
def sample(self, final_rewards: Optional[Tuple[float, float]]) -> Subtask:
return self.subtask_constructors[random.randrange(len(self.subtask_constructors))]()
class DifficultySampler(SubtaskSampler):
def __init__(self, subtask_constructors: Sequence[Callable[..., Subtask]]): | self.active_subtask_idx = -1
self.summed_rewards = np.zeros(len(self.subtask_constructors))
self.n_trials = np.zeros(len(self.subtask_constructors))
def sample(self, final_rewards: Optional[Tuple[float, float]]) -> Subtask:
if final_rewards is not None:
self.n_trials[self.active_subtask_idx] += 1
self.summed_rewards[self.active_subtask_idx] += np.mean(final_rewards)
self.active_subtask_idx = np.random.choice(len(self.subtask_constructors), p=self.weights)
return self.subtask_constructors[self.active_subtask_idx]()
@property
def weights(self) -> np.ndarray:
weights = Subtask.get_reward_spec().reward_max - self.summed_rewards / np.maximum(self.n_trials, 1)
return weights / weights.sum()
def get_info(self) -> Dict[str, np.ndarray]:
return {
f"LOGGING_{subtask.__name__}_subtask_difficulty": self.weights[i]
for i, subtask in enumerate(self.subtask_constructors)
}
class MultiSubtask(Subtask):
def __init__(
self,
subtask_constructors: Sequence[Callable[..., Subtask]] = (),
subtask_sampler_constructor: Callable[..., SubtaskSampler] = RandomSampler,
**kwargs
):
super(MultiSubtask, self).__init__(**kwargs)
self.subtask_constructors = subtask_constructors
self.subtask_sampler = subtask_sampler_constructor(self.subtask_constructors)
self.active_subtask = self.subtask_sampler.sample(None)
self.info = {
f"LOGGING_{subtask.__name__}_subtask_reward": np.array([float("nan"), float("nan")])
for subtask in self.subtask_constructors
}
def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:
reward, done = self.active_subtask.compute_rewards_and_done(game_state, done)
for subtask in self.subtask_constructors:
reward_key = f"LOGGING_{subtask.__name__}_subtask_reward"
if isinstance(self.active_subtask, subtask):
self.info[reward_key] = np.array(reward)
else:
self.info[reward_key] = np.array([float("nan"), float("nan")])
if done:
self.active_subtask = self.subtask_sampler.sample(reward)
return reward, done
def completed_task(self, game_state: Game) -> np.ndarray:
raise NotImplementedError
def get_info(self) -> Dict[str, np.ndarray]:
return dict(**self.info, **self.subtask_sampler.get_info())
def get_subtask_encoding(self, subtask_encoding_dict: dict) -> int:
return self.active_subtask.get_subtask_encoding(subtask_encoding_dict) | super(DifficultySampler, self).__init__(subtask_constructors) |
SpeakerDistributionBarChart.js | let myBarChartSpeakerDistribution = undefined;
let speakerPicture = undefined;
/**
* This is to map the picture array just ones.
*/
$.ajax({
method: "GET",
dataType: "json",
url: "http://localhost:4567/speaker",
success: function (data) {
var speakers = data.speakers;
speakerPicture = speakers.map((speaker) => {
let rObj = {};
rObj["name"] = speaker.name + " " + speaker.surname;
rObj["picture"] = speaker.picture;
return rObj;
});
},
error: function (error) {
console.log(error);
},
});
/**
* API requests for speakerDistribution for all speaker.
* @param id
* @returns {Promise<unknown>}
*/
function getSpeakerDistributionAll() {
return new Promise((resolve) => {
$.ajax({
method: "GET",
dataType: "json",
url: "http://localhost:4567/speaker",
success: function (data) {
const speakers = data.speakers;
speakers.sort(function (sp1, sp2) {
if (sp1.allSpeeches > sp2.allSpeeches) {
return -1;
} else {
return 1;
}
});
const result = speakers.filter((speaker) => speaker.allSpeeches > 50);
console.log(result);
resolve(result);
},
error: function (error) {
console.log(error);
resolve();
},
});
});
}
/**
* API requests for speakerDistribution per speaker.
* @param id
* @returns {Promise<unknown>}
*/
function | (id, startDate, endDate) {
return new Promise((resolve) => {
if (id == "all") {
id = "11001478";
}
$.ajax({
method: "GET",
dataType: "json",
url: "http://localhost:4567/speaker?id=" + id + "&beginDate=" + startDate + "&endDate=" + endDate,
success: function (data) {
// let speaker = data;
// speaker.allSpeeches = speaker.allSpeeches.length;
resolve([data]);
},
error: function (error) {
console.log(error);
resolve();
},
});
});
}
/**
* API requests for speakerDistribution per party.
* @param id
* @returns {Promise<unknown>}
*/
function getSpeakerDistributionParty(id) {
return new Promise((resolve) => {
$.ajax({
method: "GET",
dataType: "json",
url: "http://localhost:4567/speaker",
success: function (data) {
const speakers = data.speakers.filter((speaker) => speaker.party == id);
speakers.sort(function (sp1, sp2) {
if (sp1.allSpeeches > sp2.allSpeeches) {
return -1;
} else {
return 1;
}
});
const result = speakers.filter((speaker) => speaker.allSpeeches > 10);
resolve(result);
},
error: function (error) {
console.log(error);
resolve();
},
});
});
}
/**
* This methode is for switch through the right data.
* @param type
* @param id
* @returns {Promise<*>}
*/
async function getSpeakerDistributionData(type, id, startDate, endDate) {
switch (type) {
case 0:
return await getSpeakerDistributionAll();
break;
case 1:
return await getSpeakerDistributionSpeaker(id, startDate, endDate);
break;
case 2:
return await getSpeakerDistributionParty(id);
break;
default:
return undefined;
}
}
/**
* The Methode is plotting for a given canvasID or speakerID a Chart.
* @param id
* @param canvasID
* @param type
* @returns {Promise<void>}
*/
async function plotSpeakerDistribution(
id = "all",
canvasID = "myBarChartSpeakerDistribution",
type = 0,
startDate = "2017-10-20",
endDate = "2022-02-11"
) {
const data = await getSpeakerDistributionData(type, id, startDate, endDate);
const ctx = document.getElementById(canvasID);
myBarChartSpeakerDistribution = new Chart(ctx, {
type: "bar",
plot: {
tooltip: {},
},
data: {
labels: data.map((speaker) => speaker.name + " " + speaker.surname),
datasets: [
{
axis: "x",
barThickness: 2,
data: data.map((speaker) => speaker.allSpeeches),
backgroundColor: "#dc0101",
},
],
},
options: {
legend: {
display: false,
},
scales: {
xAxes: {
display: false,
},
},
plugins: {
legend: false,
tooltip: {
enabled: false,
position: "nearest",
external: externalTooltipHandler,
},
},
},
});
}
/**
* The methode is to create the tooltip for the Chart.
* @param chart
* @returns {HTMLDivElement}
*/
const getOrCreateTooltip = (chart) => {
let tooltipEl = chart.canvas.parentNode.querySelector("div");
if (!tooltipEl) {
tooltipEl = document.createElement("div");
tooltipEl.style.background = "rgba(0, 0, 0, 0.7)";
tooltipEl.style.borderRadius = "3px";
tooltipEl.style.color = "white";
tooltipEl.style.opacity = 1;
tooltipEl.style.pointerEvents = "none";
tooltipEl.style.position = "absolute";
tooltipEl.style.transform = "translate(0%, 0)";
tooltipEl.style.transition = "all .1s ease";
tooltipEl.style.zIndex = 99;
const table = document.createElement("table");
table.style.margin = "0px";
tooltipEl.appendChild(table);
chart.canvas.parentNode.appendChild(tooltipEl);
}
return tooltipEl;
};
/**
* This param is creating the html stuff for the tooltip.
* @param context
*/
const externalTooltipHandler = (context) => {
// Tooltip Element
const { chart, tooltip } = context;
const tooltipEl = getOrCreateTooltip(chart);
// Hide if no tooltip
if (tooltip.opacity === 0) {
tooltipEl.style.opacity = 0;
return;
}
// Set Text
if (tooltip.body) {
const titleLines = tooltip.title || [];
const bodyLines = tooltip.body.map((b) => b.lines);
const tableHead = document.createElement("thead");
titleLines.forEach((title) => {
const tr = document.createElement("tr");
tr.style.borderWidth = 0;
const th = document.createElement("th");
th.style.borderWidth = 0;
const text = document.createTextNode(title);
th.appendChild(text);
tr.appendChild(th);
tableHead.appendChild(tr);
});
const tableBody = document.createElement("tbody");
bodyLines.forEach((body, i) => {
const tr2 = document.createElement("tr");
tr2.style.backgroundColor = "inherit";
tr2.style.borderWidth = 0;
const td2 = document.createElement("td");
td2.style.borderWidth = 0;
const tr1 = document.createElement("tr");
tr1.style.backgroundColor = "inherit";
tr1.style.borderWidth = 0;
const td1 = document.createElement("td");
td1.style.borderWidth = 0;
const text = document.createTextNode("Total Speeches: " + body);
const img = document.createElement("img");
img.src = speakerPicture.filter(
(speaker) => speaker["name"] == tooltip.title
)[0]["picture"];
td1.appendChild(text);
tr1.appendChild(td1);
td2.appendChild(img);
tr2.appendChild(td2);
tableBody.appendChild(tr1);
tableBody.appendChild(tr2);
});
const tableRoot = tooltipEl.querySelector("table");
// Remove old children
while (tableRoot.firstChild) {
tableRoot.firstChild.remove();
}
// Add new children
tableRoot.appendChild(tableHead);
tableRoot.appendChild(tableBody);
}
const { offsetLeft: positionX, offsetTop: positionY } = chart.canvas;
// Display, position, and set styles for font
tooltipEl.style.opacity = 1;
tooltipEl.style.left = positionX + tooltip.caretX + "px";
tooltipEl.style.top = positionY + tooltip.caretY + "px";
tooltipEl.style.font = tooltip.options.bodyFont.string;
tooltipEl.style.padding =
tooltip.options.padding + "px " + tooltip.options.padding + "px";
};
| getSpeakerDistributionSpeaker |
supervisor.rs | use alloc::collections::btree_map::BTreeMap as HashMap;
use alloc::sync::Arc;
use core::convert::Infallible;
use core::ops::Deref;
use core::time::Duration;
use std::sync::RwLock;
use crossbeam_channel::{unbounded, Receiver, Sender};
use itertools::Itertools;
use tracing::{debug, error, error_span, info, trace, warn};
use ibc::{
core::ics24_host::identifier::{ChainId, ChannelId, PortId},
events::IbcEvent,
Height,
};
use crate::{
chain::{handle::ChainHandle, HealthCheck},
config::{ChainConfig, Config, SharedConfig},
event::{
self,
monitor::{Error as EventError, ErrorDetail as EventErrorDetail, EventBatch},
},
object::Object,
registry::{Registry, SharedRegistry},
rest,
supervisor::scan::ScanMode,
util::{
lock::LockExt,
task::{spawn_background_task, Next, TaskError, TaskHandle},
try_recv_multiple,
},
worker::WorkerMap,
};
pub mod client_state_filter;
use client_state_filter::{FilterPolicy, Permission};
pub mod error;
pub use error::{Error, ErrorDetail};
pub mod dump_state;
use dump_state::SupervisorState;
pub mod scan;
pub mod spawn;
pub mod cmd;
use cmd::{CmdEffect, ConfigUpdate, SupervisorCmd};
use self::{scan::ChainScanner, spawn::SpawnContext};
type ArcBatch = Arc<event::monitor::Result<EventBatch>>;
type Subscription = Receiver<ArcBatch>;
/**
A wrapper around the SupervisorCmd sender so that we can
send stop signal to the supervisor before stopping the
chain drivers to prevent the supervisor from raising
errors caused by closed connections.
*/
pub struct SupervisorHandle {
pub sender: Sender<SupervisorCmd>,
tasks: Vec<TaskHandle>,
}
/// Options for the supervisor
#[derive(Debug)] |
/// Force a full scan of the chains for clients, connections, and channels,
/// even when an allow list is configured for a chain and the full scan could
/// be omitted.
pub force_full_scan: bool,
}
/**
Spawn a supervisor for testing purpose using the provided
[`SharedConfig`] and [`SharedRegistry`]. Returns a
[`SupervisorHandle`] that stops the supervisor when the
value is dropped.
*/
pub fn spawn_supervisor(
config: SharedConfig,
registry: SharedRegistry<impl ChainHandle>,
rest_rx: Option<rest::Receiver>,
options: SupervisorOptions,
) -> Result<SupervisorHandle, Error> {
let (sender, receiver) = unbounded();
let tasks = spawn_supervisor_tasks(config, registry, rest_rx, receiver, options)?;
Ok(SupervisorHandle { sender, tasks })
}
impl SupervisorHandle {
/**
Explicitly stop the running supervisor. This is useful in tests where
the supervisor has to be stopped and restarted explicitly.
Note that after stopping the supervisor, the only way to restart it
is by respawning a new supervisor using [`spawn_supervisor`].
*/
pub fn shutdown(self) {
for task in self.tasks {
// Send the shutdown signals in parallel
task.shutdown();
}
// Dropping the tasks will cause this to block until all tasks
// are terminated.
}
pub fn wait(self) {
for task in self.tasks {
task.join();
}
}
}
pub fn spawn_supervisor_tasks<Chain: ChainHandle>(
config: Arc<RwLock<Config>>,
registry: SharedRegistry<Chain>,
rest_rx: Option<rest::Receiver>,
cmd_rx: Receiver<SupervisorCmd>,
options: SupervisorOptions,
) -> Result<Vec<TaskHandle>, Error> {
if options.health_check {
health_check(&config.acquire_read(), &mut registry.write());
}
let workers = Arc::new(RwLock::new(WorkerMap::new()));
let client_state_filter = Arc::new(RwLock::new(FilterPolicy::default()));
let scan = chain_scanner(
&config.acquire_read(),
&mut registry.write(),
&mut client_state_filter.acquire_write(),
if options.force_full_scan {
ScanMode::Full
} else {
ScanMode::Auto
},
)
.scan_chains();
info!("Scanned chains:");
info!("{}", scan);
spawn_context(
&config.acquire_read(),
&mut registry.write(),
&mut workers.acquire_write(),
)
.spawn_workers(scan);
let subscriptions = Arc::new(RwLock::new(init_subscriptions(
&config.acquire_read(),
&mut registry.write(),
)?));
let batch_task = spawn_batch_worker(
config.clone(),
registry.clone(),
client_state_filter.clone(),
workers.clone(),
subscriptions.clone(),
);
let cmd_task = spawn_cmd_worker(
config.clone(),
registry.clone(),
client_state_filter,
workers.clone(),
subscriptions,
cmd_rx,
);
let mut tasks = vec![batch_task, cmd_task];
if let Some(rest_rx) = rest_rx {
let rest_task = spawn_rest_worker(config, registry, workers, rest_rx);
tasks.push(rest_task);
}
Ok(tasks)
}
fn spawn_batch_worker<Chain: ChainHandle>(
config: Arc<RwLock<Config>>,
registry: SharedRegistry<Chain>,
client_state_filter: Arc<RwLock<FilterPolicy>>,
workers: Arc<RwLock<WorkerMap>>,
subscriptions: Arc<RwLock<Vec<(Chain, Subscription)>>>,
) -> TaskHandle {
spawn_background_task(
tracing::Span::none(),
Some(Duration::from_millis(500)),
move || -> Result<Next, TaskError<Infallible>> {
if let Some((chain, batch)) = try_recv_multiple(&subscriptions.acquire_read()) {
handle_batch(
&config.acquire_read(),
&mut registry.write(),
&mut client_state_filter.acquire_write(),
&mut workers.acquire_write(),
chain.clone(),
batch,
);
}
Ok(Next::Continue)
},
)
}
pub fn spawn_cmd_worker<Chain: ChainHandle>(
config: Arc<RwLock<Config>>,
registry: SharedRegistry<Chain>,
client_state_filter: Arc<RwLock<FilterPolicy>>,
workers: Arc<RwLock<WorkerMap>>,
subscriptions: Arc<RwLock<Vec<(Chain, Subscription)>>>,
cmd_rx: Receiver<SupervisorCmd>,
) -> TaskHandle {
spawn_background_task(
error_span!("cmd"),
Some(Duration::from_millis(500)),
move || {
if let Ok(cmd) = cmd_rx.try_recv() {
match cmd {
SupervisorCmd::UpdateConfig(update) => {
let effect = update_config(
&mut config.acquire_write(),
&mut registry.write(),
&mut workers.acquire_write(),
&mut client_state_filter.acquire_write(),
*update,
);
if let CmdEffect::ConfigChanged = effect {
let new_subscriptions =
init_subscriptions(&config.acquire_read(), &mut registry.write());
match new_subscriptions {
Ok(subs) => {
*subscriptions.acquire_write() = subs;
}
Err(Error(ErrorDetail::NoChainsAvailable(_), _)) => (),
Err(e) => return Err(TaskError::Fatal(e)),
}
}
}
SupervisorCmd::DumpState(reply_to) => {
dump_state(®istry.read(), &workers.acquire_read(), reply_to);
}
}
}
Ok(Next::Continue)
},
)
}
pub fn spawn_rest_worker<Chain: ChainHandle>(
config: Arc<RwLock<Config>>,
registry: SharedRegistry<Chain>,
workers: Arc<RwLock<WorkerMap>>,
rest_rx: rest::Receiver,
) -> TaskHandle {
spawn_background_task(
error_span!("rest"),
Some(Duration::from_millis(500)),
move || -> Result<Next, TaskError<Infallible>> {
handle_rest_requests(
&config.acquire_read(),
®istry.read(),
&workers.acquire_read(),
&rest_rx,
);
Ok(Next::Continue)
},
)
}
/// Returns `true` if the relayer should filter based on
/// client state attributes, e.g., trust threshold.
/// Returns `false` otherwise.
fn client_filter_enabled(_config: &Config) -> bool {
// we currently always enable the client filter
true
}
/// Returns `true` if the relayer should filter based on
/// channel identifiers.
/// Returns `false` otherwise.
fn channel_filter_enabled(_config: &Config) -> bool {
// we currently always enable the channel filter
true
}
fn relay_packets_on_channel(
config: &Config,
chain_id: &ChainId,
port_id: &PortId,
channel_id: &ChannelId,
) -> bool {
// If filtering is disabled, then relay all channels
if !channel_filter_enabled(config) {
return true;
}
config.packets_on_channel_allowed(chain_id, port_id, channel_id)
}
fn relay_on_object<Chain: ChainHandle>(
config: &Config,
registry: &mut Registry<Chain>,
client_state_filter: &mut FilterPolicy,
chain_id: &ChainId,
object: &Object,
) -> bool {
// No filter is enabled, bail fast.
if !channel_filter_enabled(config) && !client_filter_enabled(config) {
return true;
}
// First, apply the channel filter
if let Object::Packet(u) = object {
if !relay_packets_on_channel(config, chain_id, u.src_port_id(), u.src_channel_id()) {
return false;
}
}
// Second, apply the client filter
let client_filter_outcome = match object {
Object::Client(client) => client_state_filter.control_client_object(registry, client),
Object::Connection(conn) => client_state_filter.control_conn_object(registry, conn),
Object::Channel(chan) => client_state_filter.control_chan_object(registry, chan),
Object::Packet(u) => client_state_filter.control_packet_object(registry, u),
};
match client_filter_outcome {
Ok(Permission::Allow) => true,
Ok(Permission::Deny) => {
warn!(
"client filter denies relaying on object {}",
object.short_name()
);
false
}
Err(e) => {
warn!(
"denying relaying on object {}, caused by: {}",
object.short_name(),
e
);
false
}
}
}
/// If `enabled`, build an `Object` using the provided `object_ctor`
/// and add the given `event` to the `collected` events for this `object`.
fn collect_event<F>(
collected: &mut CollectedEvents,
event: &IbcEvent,
enabled: bool,
object_ctor: F,
) where
F: FnOnce() -> Option<Object>,
{
if enabled {
if let Some(object) = object_ctor() {
collected
.per_object
.entry(object)
.or_default()
.push(event.clone());
}
}
}
pub fn collect_events(
config: &Config,
workers: &WorkerMap,
src_chain: &impl ChainHandle,
batch: &EventBatch,
) -> CollectedEvents {
let mut collected = CollectedEvents::new(batch.height, batch.chain_id.clone());
let mode = config.mode;
for event in &batch.events {
match event {
IbcEvent::NewBlock(_) => {
collected.new_block = Some(event.clone());
}
IbcEvent::UpdateClient(ref update) => {
collect_event(&mut collected, event, mode.clients.enabled, || {
// Collect update client events only if the worker exists
if let Ok(object) = Object::for_update_client(update, src_chain) {
workers.contains(&object).then(|| object)
} else {
None
}
});
}
IbcEvent::OpenInitConnection(..)
| IbcEvent::OpenTryConnection(..)
| IbcEvent::OpenAckConnection(..) => {
collect_event(&mut collected, event, mode.connections.enabled, || {
event
.connection_attributes()
.map(|attr| Object::connection_from_conn_open_events(attr, src_chain).ok())
.flatten()
});
}
IbcEvent::OpenInitChannel(..) | IbcEvent::OpenTryChannel(..) => {
collect_event(&mut collected, event, mode.channels.enabled, || {
event
.channel_attributes()
.map(|attr| Object::channel_from_chan_open_events(attr, src_chain).ok())
.flatten()
});
}
IbcEvent::OpenAckChannel(ref open_ack) => {
// Create client and packet workers here as channel end must be opened
collect_event(&mut collected, event, mode.clients.enabled, || {
Object::client_from_chan_open_events(open_ack.attributes(), src_chain).ok()
});
collect_event(&mut collected, event, mode.packets.enabled, || {
Object::packet_from_chan_open_events(open_ack.attributes(), src_chain).ok()
});
// If handshake message relaying is enabled create worker to send the MsgChannelOpenConfirm message
collect_event(&mut collected, event, mode.channels.enabled, || {
Object::channel_from_chan_open_events(open_ack.attributes(), src_chain).ok()
});
}
IbcEvent::OpenConfirmChannel(ref open_confirm) => {
// Create client worker here as channel end must be opened
collect_event(&mut collected, event, mode.clients.enabled, || {
Object::client_from_chan_open_events(open_confirm.attributes(), src_chain).ok()
});
collect_event(&mut collected, event, mode.packets.enabled, || {
Object::packet_from_chan_open_events(open_confirm.attributes(), src_chain).ok()
});
}
IbcEvent::SendPacket(ref packet) => {
collect_event(&mut collected, event, mode.packets.enabled, || {
Object::for_send_packet(packet, src_chain).ok()
});
}
IbcEvent::TimeoutPacket(ref packet) => {
collect_event(&mut collected, event, mode.packets.enabled, || {
Object::for_timeout_packet(packet, src_chain).ok()
});
}
IbcEvent::WriteAcknowledgement(ref packet) => {
collect_event(&mut collected, event, mode.packets.enabled, || {
Object::for_write_ack(packet, src_chain).ok()
});
}
IbcEvent::CloseInitChannel(ref packet) => {
collect_event(&mut collected, event, mode.packets.enabled, || {
Object::for_close_init_channel(packet, src_chain).ok()
});
}
_ => (),
}
}
collected
}
/// Create a new `SpawnContext` for spawning workers.
fn spawn_context<'a, Chain: ChainHandle>(
config: &'a Config,
registry: &'a mut Registry<Chain>,
workers: &'a mut WorkerMap,
) -> SpawnContext<'a, Chain> {
SpawnContext::new(config, registry, workers)
}
fn chain_scanner<'a, Chain: ChainHandle>(
config: &'a Config,
registry: &'a mut Registry<Chain>,
client_state_filter: &'a mut FilterPolicy,
full_scan: ScanMode,
) -> ChainScanner<'a, Chain> {
ChainScanner::new(config, registry, client_state_filter, full_scan)
}
/// Perform a health check on all connected chains
fn health_check<Chain: ChainHandle>(config: &Config, registry: &mut Registry<Chain>) {
use HealthCheck::*;
let chains = &config.chains;
for config in chains {
let id = &config.id;
let chain = registry.get_or_spawn(id);
match chain {
Ok(chain) => match chain.health_check() {
Ok(Healthy) => info!("[{}] chain is healthy", id),
Ok(Unhealthy(e)) => warn!("[{}] chain is unhealthy: {}", id, e),
Err(e) => error!("[{}] failed to perform health check: {}", id, e),
},
Err(e) => {
error!(
"skipping health check for chain {}, reason: failed to spawn chain runtime with error: {}",
config.id, e
);
}
}
}
}
/// Subscribe to the events emitted by the chains the supervisor is connected to.
fn init_subscriptions<Chain: ChainHandle>(
config: &Config,
registry: &mut Registry<Chain>,
) -> Result<Vec<(Chain, Subscription)>, Error> {
let chains = &config.chains;
let mut subscriptions = Vec::with_capacity(chains.len());
for chain_config in chains {
let chain = match registry.get_or_spawn(&chain_config.id) {
Ok(chain) => chain,
Err(e) => {
error!(
"failed to spawn chain runtime for {}: {}",
chain_config.id, e
);
continue;
}
};
match chain.subscribe() {
Ok(subscription) => subscriptions.push((chain, subscription)),
Err(e) => error!(
"failed to subscribe to events of {}: {}",
chain_config.id, e
),
}
}
// At least one chain runtime should be available, otherwise the supervisor
// cannot do anything and will hang indefinitely.
if registry.size() == 0 {
return Err(Error::no_chains_available());
}
Ok(subscriptions)
}
/// Dump the state of the supervisor into a [`SupervisorState`] value,
/// and send it back through the given channel.
fn dump_state<Chain: ChainHandle>(
registry: &Registry<Chain>,
workers: &WorkerMap,
reply_to: Sender<SupervisorState>,
) {
let state = state(registry, workers);
let _ = reply_to.try_send(state);
}
/// Returns a representation of the supervisor's internal state
/// as a [`SupervisorState`].
fn state<Chain: ChainHandle>(registry: &Registry<Chain>, workers: &WorkerMap) -> SupervisorState {
let chains = registry.chains().map(|c| c.id()).collect_vec();
SupervisorState::new(chains, workers.objects())
}
fn handle_rest_requests<Chain: ChainHandle>(
config: &Config,
registry: &Registry<Chain>,
workers: &WorkerMap,
rest_rx: &rest::Receiver,
) {
if let Some(cmd) = rest::process_incoming_requests(config, rest_rx) {
handle_rest_cmd(registry, workers, cmd);
}
}
fn handle_rest_cmd<Chain: ChainHandle>(
registry: &Registry<Chain>,
workers: &WorkerMap,
m: rest::Command,
) {
match m {
rest::Command::DumpState(reply) => {
let state = state(registry, workers);
reply
.send(Ok(state))
.unwrap_or_else(|e| error!("error replying to a REST request {}", e));
}
}
}
fn clear_pending_packets(workers: &mut WorkerMap, chain_id: &ChainId) -> Result<(), Error> {
for worker in workers.workers_for_chain(chain_id) {
worker.clear_pending_packets();
}
Ok(())
}
/// Process a batch of events received from a chain.
fn process_batch<Chain: ChainHandle>(
config: &Config,
registry: &mut Registry<Chain>,
client_state_filter: &mut FilterPolicy,
workers: &mut WorkerMap,
src_chain: Chain,
batch: &EventBatch,
) -> Result<(), Error> {
assert_eq!(src_chain.id(), batch.chain_id);
let height = batch.height;
let chain_id = batch.chain_id.clone();
let collected = collect_events(config, workers, &src_chain, batch);
// If there is a NewBlock event, forward this event first to any workers affected by it.
if let Some(IbcEvent::NewBlock(new_block)) = collected.new_block {
workers.notify_new_block(&src_chain.id(), height, new_block);
}
// Forward the IBC events.
for (object, events) in collected.per_object.into_iter() {
if !relay_on_object(
config,
registry,
client_state_filter,
&src_chain.id(),
&object,
) {
trace!(
"skipping events for '{}'. \
reason: filtering is enabled and channel does not match any allowed channels",
object.short_name()
);
continue;
}
if events.is_empty() {
continue;
}
let src = registry
.get_or_spawn(object.src_chain_id())
.map_err(Error::spawn)?;
let dst = registry
.get_or_spawn(object.dst_chain_id())
.map_err(Error::spawn)?;
let worker = workers.get_or_spawn(object, src, dst, config);
worker.send_events(height, events, chain_id.clone());
}
Ok(())
}
/// Process the given batch if it does not contain any errors,
/// output the errors on the console otherwise.
fn handle_batch<Chain: ChainHandle>(
config: &Config,
registry: &mut Registry<Chain>,
client_state_filter: &mut FilterPolicy,
workers: &mut WorkerMap,
chain: Chain,
batch: ArcBatch,
) {
let chain_id = chain.id();
match batch.deref() {
Ok(batch) => {
let _ = process_batch(config, registry, client_state_filter, workers, chain, batch)
.map_err(|e| error!("[{}] error during batch processing: {}", chain_id, e));
}
Err(EventError(EventErrorDetail::SubscriptionCancelled(_), _)) => {
warn!(chain.id = %chain_id, "event subscription was cancelled, clearing pending packets");
let _ = clear_pending_packets(workers, &chain_id).map_err(|e| {
error!(
"[{}] error during clearing pending packets: {}",
chain_id, e
)
});
}
Err(e) => {
error!("[{}] error in receiving event batch: {}", chain_id, e)
}
}
}
/// Remove the given chain to the configuration and spawn the associated workers.
/// Will not have any effect if the chain was not already present in the config.
///
/// If the removal had any effect, returns [`CmdEffect::ConfigChanged`] as
/// subscriptions need to be reset to take into account the newly added chain.
fn remove_chain<Chain: ChainHandle>(
config: &mut Config,
registry: &mut Registry<Chain>,
workers: &mut WorkerMap,
id: &ChainId,
) -> CmdEffect {
if !config.has_chain(id) {
info!(chain = %id, "skipping removal of non-existing chain");
return CmdEffect::Nothing;
}
info!(chain = %id, "removing existing chain");
config.chains.retain(|c| &c.id != id);
debug!(chain = %id, "shutting down workers");
let mut ctx = spawn_context(config, registry, workers);
ctx.shutdown_workers_for_chain(id);
debug!(chain = %id, "shutting down chain runtime");
registry.shutdown(id);
CmdEffect::ConfigChanged
}
/// Add the given chain to the configuration and spawn the associated workers.
/// Will not have any effect if the chain is already present in the config.
///
/// If the addition had any effect, returns [`CmdEffect::ConfigChanged`] as
/// subscriptions need to be reset to take into account the newly added chain.
fn add_chain<Chain: ChainHandle>(
config: &mut Config,
registry: &mut Registry<Chain>,
workers: &mut WorkerMap,
client_state_filter: &mut FilterPolicy,
chain_config: ChainConfig,
) -> CmdEffect {
let id = chain_config.id.clone();
if config.has_chain(&id) {
info!(chain = %id, "skipping addition of already existing chain");
return CmdEffect::Nothing;
}
info!(chain = %id, "adding new chain");
config.chains.push(chain_config.clone());
debug!(chain = %id, "spawning chain runtime");
if let Err(e) = registry.spawn(&id) {
error!(
"failed to add chain {} because of failure to spawn the chain runtime: {}",
id, e
);
// Remove the newly added config
config.chains.retain(|c| c.id != id);
return CmdEffect::Nothing;
}
debug!(chain = %id, "scanning chain");
let scan_result = chain_scanner(config, registry, client_state_filter, ScanMode::Auto)
.scan_chain(&chain_config);
let scan = match scan_result {
Ok(scan) => scan,
Err(e) => {
error!("failed to scan chain {}: {}", id, e);
// Remove the newly added config
config.chains.retain(|c| c.id != id);
return CmdEffect::Nothing;
}
};
debug!(chain = %id, "spawning workers");
let mut ctx = spawn_context(config, registry, workers);
ctx.spawn_workers_for_chain(scan);
CmdEffect::ConfigChanged
}
/// Update the given chain configuration, by removing it with
/// [`Supervisor::remove_chain`] and adding the updated
/// chain config with [`Supervisor::remove_chain`].
///
/// If the update had any effect, returns [`CmdEffect::ConfigChanged`] as
/// subscriptions need to be reset to take into account the newly added chain.
fn update_chain<Chain: ChainHandle>(
config: &mut Config,
registry: &mut Registry<Chain>,
workers: &mut WorkerMap,
client_state_filter: &mut FilterPolicy,
chain_config: ChainConfig,
) -> CmdEffect {
info!(chain = %chain_config.id, "updating existing chain");
let removed = remove_chain(config, registry, workers, &chain_config.id);
let added = add_chain(config, registry, workers, client_state_filter, chain_config);
removed.or(added)
}
/// Apply the given configuration update.
///
/// Returns an [`CmdEffect`] which instructs the caller as to
/// whether or not the event subscriptions needs to be reset or not.
fn update_config<Chain: ChainHandle>(
config: &mut Config,
registry: &mut Registry<Chain>,
workers: &mut WorkerMap,
client_state_filter: &mut FilterPolicy,
update: ConfigUpdate,
) -> CmdEffect {
match update {
ConfigUpdate::Add(chain_config) => {
add_chain(config, registry, workers, client_state_filter, chain_config)
}
ConfigUpdate::Remove(id) => remove_chain(config, registry, workers, &id),
ConfigUpdate::Update(chain_config) => {
update_chain(config, registry, workers, client_state_filter, chain_config)
}
}
}
/// Describes the result of [`collect_events`].
#[derive(Clone, Debug)]
pub struct CollectedEvents {
/// The height at which these events were emitted from the chain.
pub height: Height,
/// The chain from which the events were emitted.
pub chain_id: ChainId,
/// [`NewBlock`](ibc::events::IbcEventType::NewBlock) event
/// collected from the [`EventBatch`].
pub new_block: Option<IbcEvent>,
/// Mapping between [`Object`]s and their associated [`IbcEvent`]s.
pub per_object: HashMap<Object, Vec<IbcEvent>>,
}
impl CollectedEvents {
pub fn new(height: Height, chain_id: ChainId) -> Self {
Self {
height,
chain_id,
new_block: Default::default(),
per_object: Default::default(),
}
}
/// Whether the collected events include a
/// [`NewBlock`](ibc::events::IbcEventType::NewBlock) event.
pub fn has_new_block(&self) -> bool {
self.new_block.is_some()
}
} | pub struct SupervisorOptions {
/// Perform a health check of all chains we connect to
pub health_check: bool, |
row-drag.directive.ts | import { Directive, Input, OnDestroy, NgModule, TemplateRef } from '@angular/core';
import { IgxDragDirective } from '../directives/drag-drop/drag-drop.directive';
import { KEYS } from '../core/utils';
import { fromEvent, Subscription } from 'rxjs';
import { IgxRowDirective, IgxGridBaseDirective } from './grid';
import { IRowDragStartEventArgs, IRowDragEndEventArgs } from './common/events';
import { GridType } from './common/grid.interface';
import { IgxHierarchicalRowComponent } from './hierarchical-grid/hierarchical-row.component';
const ghostBackgroundClass = 'igx-grid__tr--ghost';
const gridCellClass = 'igx-grid__td';
const rowSelectedClass = 'igx-grid__tr--selected';
const cellSelectedClass = 'igx-grid__td--selected';
const cellActiveClass = 'igx-grid__td--active';
/**
* @hidden
*/
@Directive({
selector: '[igxRowDrag]'
})
export class IgxRowDragDirective extends IgxDragDirective implements OnDestroy {
private row: IgxRowDirective<IgxGridBaseDirective & GridType>;
private subscription$: Subscription;
private _rowDragStarted = false;
@Input('igxRowDrag')
set data(val) {
this.row = val;
}
get data() {
return this.row;
}
public onPointerDown(event) {
event.preventDefault();
this._rowDragStarted = false;
this._removeOnDestroy = false;
super.onPointerDown(event);
}
public onPointerMove(event) {
super.onPointerMove(event);
if (this._dragStarted && !this._rowDragStarted) {
this._rowDragStarted = true;
const args: IRowDragStartEventArgs = {
dragDirective: this,
dragData: this.row,
cancel: false,
owner: this.row.grid
};
this.row.grid.onRowDragStart.emit(args);
if (args.cancel) {
this.ghostElement.parentNode.removeChild(this.ghostElement);
this.ghostElement = null;
this._dragStarted = false;
this._clicked = false;
return;
}
this.row.dragging = true;
this.row.grid.rowDragging = true;
this.row.grid.markForCheck();
this.subscription$ = fromEvent(this.row.grid.document.defaultView, 'keydown').subscribe((ev: KeyboardEvent) => {
if (ev.key === KEYS.ESCAPE || ev.key === KEYS.ESCAPE_IE) {
this._lastDropArea = false;
this.onPointerUp(event);
}
});
}
}
public onPointerUp(event) {
if (!this._clicked) {
return;
}
const args: IRowDragEndEventArgs = {
dragDirective: this,
dragData: this.row,
animation: false,
owner: this.row.grid
};
this.zone.run(() => {
this.row.grid.onRowDragEnd.emit(args);
});
const dropArea = this._lastDropArea;
super.onPointerUp(event);
if (!dropArea && this.ghostElement) {
this.ghostElement.addEventListener('transitionend', this.transitionEndEvent, false);
} else {
this.endDragging();
}
}
protected createGhost(pageX, pageY) {
this.row.grid.endEdit(true);
this.row.grid.markForCheck();
this.ghostContext = {
$implicit: this.row.rowData,
data: this.row.rowData,
grid: this.row.grid
};
super.createGhost(pageX, pageY, this.row.nativeElement);
// check if there is an expander icon and create the ghost at the corresponding position
if (this.isHierarchicalGrid) {
const row = this.row as IgxHierarchicalRowComponent;
if (row.expander) {
const expanderWidth = row.expander.nativeElement.getBoundingClientRect().width;
this._ghostHostX += expanderWidth; |
const ghost = this.ghostElement;
const gridRect = this.row.grid.nativeElement.getBoundingClientRect();
const rowRect = this.row.nativeElement.getBoundingClientRect();
ghost.style.overflow = 'hidden';
ghost.style.width = gridRect.width + 'px';
ghost.style.height = rowRect.height + 'px';
this.renderer.addClass(ghost, ghostBackgroundClass);
this.renderer.removeClass(ghost, rowSelectedClass);
const ghostCells = ghost.getElementsByClassName(gridCellClass);
for (let index = 0; index < ghostCells.length; index++) {
this.renderer.removeClass(ghostCells[index], cellSelectedClass);
this.renderer.removeClass(ghostCells[index], cellActiveClass);
}
}
private _unsubscribe() {
if (this.subscription$ && !this.subscription$.closed) {
this.subscription$.unsubscribe();
}
}
private endDragging() {
this.onTransitionEnd(null);
this.row.dragging = false;
this.row.grid.rowDragging = false;
this.row.grid.markForCheck();
this._unsubscribe();
}
private transitionEndEvent = (evt?) => {
if (this.ghostElement) {
this.ghostElement.removeEventListener('transitionend', this.transitionEndEvent, false);
}
this.endDragging();
}
private get isHierarchicalGrid() {
return this.row.grid.nativeElement.tagName.toLowerCase() === 'igx-hierarchical-grid';
}
}
/**
* @hidden
*/
@Directive({
selector: '[igxDragIndicatorIcon]'
})
export class IgxDragIndicatorIconDirective {
}
/**
* @hidden
*/
@Directive({
selector: '[igxRowDragGhost]'
})
export class IgxRowDragGhostDirective {
constructor(public templateRef: TemplateRef<any>) { }
}
@NgModule({
declarations: [IgxRowDragDirective, IgxDragIndicatorIconDirective, IgxRowDragGhostDirective],
entryComponents: [],
exports: [IgxRowDragDirective, IgxDragIndicatorIconDirective, IgxRowDragGhostDirective],
imports: []
})
export class IgxRowDragModule {
} | }
} |
rds_mysql_database.go | // Copyright 2019 Yunion
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package qcloud
import (
"fmt"
"yunion.io/x/pkg/errors"
api "yunion.io/x/onecloud/pkg/apis/compute"
"yunion.io/x/onecloud/pkg/cloudprovider"
"yunion.io/x/onecloud/pkg/multicloud"
)
type SMySQLInstanceDatabase struct {
rds *SMySQLInstance
multicloud.SResourceBase
CharacterSet string
DatabaseName string
}
func (self *SMySQLInstanceDatabase) GetStatus() string {
return api.DBINSTANCE_DATABASE_RUNNING
}
func (self *SMySQLInstanceDatabase) GetId() string { | return self.DatabaseName
}
func (self *SMySQLInstanceDatabase) GetName() string {
return self.DatabaseName
}
func (self *SMySQLInstanceDatabase) GetGlobalId() string {
return self.DatabaseName
}
func (self *SMySQLInstanceDatabase) GetCharacterSet() string {
return self.CharacterSet
}
func (self *SMySQLInstanceDatabase) Delete() error {
return cloudprovider.ErrNotSupported
}
func (self *SRegion) DescribeMySQLDatabases(instanceId string, offset, limit int) ([]SMySQLInstanceDatabase, int, error) {
if limit < 1 || limit > 100 {
limit = 100
}
params := map[string]string{
"Offset": fmt.Sprintf("%d", offset),
"Limit": fmt.Sprintf("%d", limit),
"InstanceId": instanceId,
}
resp, err := self.cdbRequest("DescribeDatabases", params)
if err != nil {
return nil, 0, errors.Wrapf(err, "DescribeDatabases")
}
databases := []SMySQLInstanceDatabase{}
err = resp.Unmarshal(&databases, "DatabaseList")
if err != nil {
return nil, 0, errors.Wrapf(err, "resp.Unmarshal")
}
totalCount, _ := resp.Float("TotalCount")
return databases, int(totalCount), nil
}
func (rds *SMySQLInstance) GetIDBInstanceDatabases() ([]cloudprovider.ICloudDBInstanceDatabase, error) {
ret := []cloudprovider.ICloudDBInstanceDatabase{}
for {
part, total, err := rds.region.DescribeMySQLDatabases(rds.InstanceId, len(ret), 100)
if err != nil {
return nil, errors.Wrapf(err, "DescribeMySQLDatabases")
}
for i := range part {
part[i].rds = rds
ret = append(ret, &part[i])
}
if len(ret) >= total {
break
}
}
return ret, nil
} | |
gateway-s3.go | /*
* Minio Cloud Storage, (C) 2017, 2018 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package s3
import (
"context"
"encoding/json"
"io"
"math/rand"
"net/http"
"strings"
"time"
"github.com/minio/cli"
miniogo "github.com/minio/minio-go"
"github.com/minio/minio-go/pkg/credentials"
minio "github.com/minio/minio/cmd"
"github.com/minio/minio-go/pkg/encrypt"
"github.com/minio/minio-go/pkg/s3utils"
"github.com/minio/minio/cmd/logger"
"github.com/minio/minio/pkg/auth"
"github.com/minio/minio/pkg/policy"
)
const (
s3Backend = "s3"
)
func init() {
const s3GatewayTemplate = `NAME:
{{.HelpName}} - {{.Usage}}
USAGE:
{{.HelpName}} {{if .VisibleFlags}}[FLAGS]{{end}} [ENDPOINT]
{{if .VisibleFlags}}
FLAGS:
{{range .VisibleFlags}}{{.}}
{{end}}{{end}}
ENDPOINT:
S3 server endpoint. Default ENDPOINT is https://s3.amazonaws.com
ENVIRONMENT VARIABLES:
ACCESS:
MINIO_ACCESS_KEY: Username or access key of S3 storage.
MINIO_SECRET_KEY: Password or secret key of S3 storage.
BROWSER:
MINIO_BROWSER: To disable web browser access, set this value to "off".
DOMAIN:
MINIO_DOMAIN: To enable virtual-host-style requests, set this value to Minio host domain name.
CACHE:
MINIO_CACHE_DRIVES: List of mounted drives or directories delimited by ";".
MINIO_CACHE_EXCLUDE: List of cache exclusion patterns delimited by ";".
MINIO_CACHE_EXPIRY: Cache expiry duration in days.
MINIO_CACHE_MAXUSE: Maximum permitted usage of the cache in percentage (0-100).
LOGGER:
MINIO_LOGGER_HTTP_ENDPOINT: HTTP endpoint URL to log all incoming requests.
EXAMPLES:
1. Start minio gateway server for AWS S3 backend.
$ export MINIO_ACCESS_KEY=accesskey
$ export MINIO_SECRET_KEY=secretkey
$ {{.HelpName}}
2. Start minio gateway server for S3 backend on custom endpoint.
$ export MINIO_ACCESS_KEY=Q3AM3UQ867SPQQA43P2F
$ export MINIO_SECRET_KEY=zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG
$ {{.HelpName}} https://play.minio.io:9000
3. Start minio gateway server for AWS S3 backend logging all requests to http endpoint.
$ export MINIO_ACCESS_KEY=Q3AM3UQ867SPQQA43P2F
$ export MINIO_SECRET_KEY=zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG
$ export MINIO_LOGGER_HTTP_ENDPOINT="http://localhost:8000/"
$ {{.HelpName}} https://play.minio.io:9000
4. Start minio gateway server for AWS S3 backend with edge caching enabled.
$ export MINIO_ACCESS_KEY=accesskey
$ export MINIO_SECRET_KEY=secretkey
$ export MINIO_CACHE_DRIVES="/mnt/drive1;/mnt/drive2;/mnt/drive3;/mnt/drive4"
$ export MINIO_CACHE_EXCLUDE="bucket1/*;*.png"
$ export MINIO_CACHE_EXPIRY=40
$ export MINIO_CACHE_MAXUSE=80
$ {{.HelpName}}
4. Start minio gateway server for AWS S3 backend using AWS environment variables.
NOTE: The access and secret key in this case will authenticate with Minio instead
of AWS and AWS envs will be used to authenticate to AWS S3.
$ export AWS_ACCESS_KEY_ID=aws_access_key
$ export AWS_SECRET_ACCESS_KEY=aws_secret_key
$ export MINIO_ACCESS_KEY=accesskey
$ export MINIO_SECRET_KEY=secretkey
$ {{.HelpName}}
`
minio.RegisterGatewayCommand(cli.Command{
Name: s3Backend,
Usage: "Amazon Simple Storage Service (S3)",
Action: s3GatewayMain,
CustomHelpTemplate: s3GatewayTemplate,
HideHelpCommand: true,
})
}
// Handler for 'minio gateway s3' command line.
func | (ctx *cli.Context) {
args := ctx.Args()
if !ctx.Args().Present() {
args = cli.Args{"https://s3.amazonaws.com"}
}
// Validate gateway arguments.
logger.FatalIf(minio.ValidateGatewayArguments(ctx.GlobalString("address"), args.First()), "Invalid argument")
// Start the gateway..
minio.StartGateway(ctx, &S3{args.First()})
}
// S3 implements Gateway.
type S3 struct {
host string
}
// Name implements Gateway interface.
func (g *S3) Name() string {
return s3Backend
}
const letterBytes = "abcdefghijklmnopqrstuvwxyz01234569"
const (
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits
)
// randString generates random names and prepends them with a known prefix.
func randString(n int, src rand.Source, prefix string) string {
b := make([]byte, n)
// A rand.Int63() generates 63 random bits, enough for letterIdxMax letters!
for i, cache, remain := n-1, src.Int63(), letterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = src.Int63(), letterIdxMax
}
if idx := int(cache & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i--
}
cache >>= letterIdxBits
remain--
}
return prefix + string(b[0:30-len(prefix)])
}
// newS3 - Initializes a new client by auto probing S3 server signature.
func newS3(url string) (*miniogo.Core, error) {
if url == "" {
url = "https://s3.amazonaws.com"
}
// Override default params if the host is provided
endpoint, secure, err := minio.ParseGatewayEndpoint(url)
if err != nil {
return nil, err
}
// Chains all credential types, in the following order:
// - AWS env vars (i.e. AWS_ACCESS_KEY_ID)
// - AWS creds file (i.e. AWS_SHARED_CREDENTIALS_FILE or ~/.aws/credentials)
// - IAM profile based credentials. (performs an HTTP
// call to a pre-defined endpoint, only valid inside
// configured ec2 instances)
// - Static credentials provided by user (i.e. MINIO_ACCESS_KEY)
creds := credentials.NewChainCredentials([]credentials.Provider{
&credentials.EnvAWS{},
&credentials.FileAWSCredentials{},
&credentials.IAM{
Client: &http.Client{
Transport: minio.NewCustomHTTPTransport(),
},
},
&credentials.EnvMinio{},
})
clnt, err := miniogo.NewWithCredentials(endpoint, creds, secure, "")
if err != nil {
return nil, err
}
// Set custom transport
clnt.SetCustomTransport(minio.NewCustomHTTPTransport())
probeBucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "probe-bucket-sign-")
// Check if the provided keys are valid.
if _, err = clnt.BucketExists(probeBucketName); err != nil {
return nil, err
}
return &miniogo.Core{Client: clnt}, nil
}
// NewGatewayLayer returns s3 ObjectLayer.
func (g *S3) NewGatewayLayer(creds auth.Credentials) (minio.ObjectLayer, error) {
// creds are ignored here, since S3 gateway implements chaining
// all credentials.
clnt, err := newS3(g.host)
if err != nil {
return nil, err
}
s := s3Objects{
Client: clnt,
}
// Enables single encyption of KMS is configured.
if minio.GlobalKMS != nil {
encS := s3EncObjects{s}
// Start stale enc multipart uploads cleanup routine.
go encS.cleanupStaleEncMultipartUploads(context.Background(),
minio.GlobalMultipartCleanupInterval, minio.GlobalMultipartExpiry, minio.GlobalServiceDoneCh)
return &encS, nil
}
return &s, nil
}
// Production - s3 gateway is production ready.
func (g *S3) Production() bool {
return true
}
// s3Objects implements gateway for Minio and S3 compatible object storage servers.
type s3Objects struct {
minio.GatewayUnsupported
Client *miniogo.Core
}
// Shutdown saves any gateway metadata to disk
// if necessary and reload upon next restart.
func (l *s3Objects) Shutdown(ctx context.Context) error {
return nil
}
// StorageInfo is not relevant to S3 backend.
func (l *s3Objects) StorageInfo(ctx context.Context) (si minio.StorageInfo) {
return si
}
// MakeBucket creates a new container on S3 backend.
func (l *s3Objects) MakeBucketWithLocation(ctx context.Context, bucket, location string) error {
// Verify if bucket name is valid.
// We are using a separate helper function here to validate bucket
// names instead of IsValidBucketName() because there is a possibility
// that certains users might have buckets which are non-DNS compliant
// in us-east-1 and we might severely restrict them by not allowing
// access to these buckets.
// Ref - http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html
if s3utils.CheckValidBucketName(bucket) != nil {
return minio.BucketNameInvalid{Bucket: bucket}
}
err := l.Client.MakeBucket(bucket, location)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket)
}
return err
}
// GetBucketInfo gets bucket metadata..
func (l *s3Objects) GetBucketInfo(ctx context.Context, bucket string) (bi minio.BucketInfo, e error) {
buckets, err := l.Client.ListBuckets()
if err != nil {
return bi, minio.ErrorRespToObjectError(err, bucket)
}
for _, bi := range buckets {
if bi.Name != bucket {
continue
}
return minio.BucketInfo{
Name: bi.Name,
Created: bi.CreationDate,
}, nil
}
return bi, minio.BucketNotFound{Bucket: bucket}
}
// ListBuckets lists all S3 buckets
func (l *s3Objects) ListBuckets(ctx context.Context) ([]minio.BucketInfo, error) {
buckets, err := l.Client.ListBuckets()
if err != nil {
return nil, minio.ErrorRespToObjectError(err)
}
b := make([]minio.BucketInfo, len(buckets))
for i, bi := range buckets {
b[i] = minio.BucketInfo{
Name: bi.Name,
Created: bi.CreationDate,
}
}
return b, err
}
// DeleteBucket deletes a bucket on S3
func (l *s3Objects) DeleteBucket(ctx context.Context, bucket string) error {
err := l.Client.RemoveBucket(bucket)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket)
}
return nil
}
// ListObjects lists all blobs in S3 bucket filtered by prefix
func (l *s3Objects) ListObjects(ctx context.Context, bucket string, prefix string, marker string, delimiter string, maxKeys int) (loi minio.ListObjectsInfo, e error) {
result, err := l.Client.ListObjects(bucket, prefix, marker, delimiter, maxKeys)
if err != nil {
return loi, minio.ErrorRespToObjectError(err, bucket)
}
return minio.FromMinioClientListBucketResult(bucket, result), nil
}
// ListObjectsV2 lists all blobs in S3 bucket filtered by prefix
func (l *s3Objects) ListObjectsV2(ctx context.Context, bucket, prefix, continuationToken, delimiter string, maxKeys int, fetchOwner bool, startAfter string) (loi minio.ListObjectsV2Info, e error) {
result, err := l.Client.ListObjectsV2(bucket, prefix, continuationToken, fetchOwner, delimiter, maxKeys, startAfter)
if err != nil {
return loi, minio.ErrorRespToObjectError(err, bucket)
}
return minio.FromMinioClientListBucketV2Result(bucket, result), nil
}
// GetObjectNInfo - returns object info and locked object ReadCloser
func (l *s3Objects) GetObjectNInfo(ctx context.Context, bucket, object string, rs *minio.HTTPRangeSpec, h http.Header, lockType minio.LockType, opts minio.ObjectOptions) (gr *minio.GetObjectReader, err error) {
var objInfo minio.ObjectInfo
objInfo, err = l.GetObjectInfo(ctx, bucket, object, opts)
if err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket, object)
}
var startOffset, length int64
startOffset, length, err = rs.GetOffsetLength(objInfo.Size)
if err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket, object)
}
pr, pw := io.Pipe()
go func() {
err := l.GetObject(ctx, bucket, object, startOffset, length, pw, objInfo.ETag, opts)
pw.CloseWithError(err)
}()
// Setup cleanup function to cause the above go-routine to
// exit in case of partial read
pipeCloser := func() { pr.Close() }
return minio.NewGetObjectReaderFromReader(pr, objInfo, pipeCloser), nil
}
// GetObject reads an object from S3. Supports additional
// parameters like offset and length which are synonymous with
// HTTP Range requests.
//
// startOffset indicates the starting read location of the object.
// length indicates the total length of the object.
func (l *s3Objects) GetObject(ctx context.Context, bucket string, key string, startOffset int64, length int64, writer io.Writer, etag string, o minio.ObjectOptions) error {
if length < 0 && length != -1 {
return minio.ErrorRespToObjectError(minio.InvalidRange{}, bucket, key)
}
opts := miniogo.GetObjectOptions{}
opts.ServerSideEncryption = o.ServerSideEncryption
if startOffset >= 0 && length >= 0 {
if err := opts.SetRange(startOffset, startOffset+length-1); err != nil {
return minio.ErrorRespToObjectError(err, bucket, key)
}
}
object, _, err := l.Client.GetObject(bucket, key, opts)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket, key)
}
defer object.Close()
if _, err := io.Copy(writer, object); err != nil {
return minio.ErrorRespToObjectError(err, bucket, key)
}
return nil
}
// GetObjectInfo reads object info and replies back ObjectInfo
func (l *s3Objects) GetObjectInfo(ctx context.Context, bucket string, object string, opts minio.ObjectOptions) (objInfo minio.ObjectInfo, err error) {
oi, err := l.Client.StatObject(bucket, object, miniogo.StatObjectOptions{
GetObjectOptions: miniogo.GetObjectOptions{
ServerSideEncryption: opts.ServerSideEncryption,
},
})
if err != nil {
return minio.ObjectInfo{}, minio.ErrorRespToObjectError(err, bucket, object)
}
return minio.FromMinioClientObjectInfo(bucket, oi), nil
}
// PutObject creates a new object with the incoming data,
func (l *s3Objects) PutObject(ctx context.Context, bucket string, object string, r *minio.PutObjReader, metadata map[string]string, opts minio.ObjectOptions) (objInfo minio.ObjectInfo, err error) {
data := r.Reader
oi, err := l.Client.PutObject(bucket, object, data, data.Size(), data.MD5Base64String(), data.SHA256HexString(), minio.ToMinioClientMetadata(metadata), opts.ServerSideEncryption)
if err != nil {
return objInfo, minio.ErrorRespToObjectError(err, bucket, object)
}
// On success, populate the key & metadata so they are present in the notification
oi.Key = object
oi.Metadata = minio.ToMinioClientObjectInfoMetadata(metadata)
return minio.FromMinioClientObjectInfo(bucket, oi), nil
}
// CopyObject copies an object from source bucket to a destination bucket.
func (l *s3Objects) CopyObject(ctx context.Context, srcBucket string, srcObject string, dstBucket string, dstObject string, srcInfo minio.ObjectInfo, srcOpts, dstOpts minio.ObjectOptions) (objInfo minio.ObjectInfo, err error) {
// Set this header such that following CopyObject() always sets the right metadata on the destination.
// metadata input is already a trickled down value from interpreting x-amz-metadata-directive at
// handler layer. So what we have right now is supposed to be applied on the destination object anyways.
// So preserve it by adding "REPLACE" directive to save all the metadata set by CopyObject API.
srcInfo.UserDefined["x-amz-metadata-directive"] = "REPLACE"
srcInfo.UserDefined["x-amz-copy-source-if-match"] = srcInfo.ETag
header := make(http.Header)
if srcOpts.ServerSideEncryption != nil {
encrypt.SSECopy(srcOpts.ServerSideEncryption).Marshal(header)
}
if dstOpts.ServerSideEncryption != nil {
dstOpts.ServerSideEncryption.Marshal(header)
}
for k, v := range header {
srcInfo.UserDefined[k] = v[0]
}
if _, err = l.Client.CopyObject(srcBucket, srcObject, dstBucket, dstObject, srcInfo.UserDefined); err != nil {
return objInfo, minio.ErrorRespToObjectError(err, srcBucket, srcObject)
}
return l.GetObjectInfo(ctx, dstBucket, dstObject, dstOpts)
}
// DeleteObject deletes a blob in bucket
func (l *s3Objects) DeleteObject(ctx context.Context, bucket string, object string) error {
err := l.Client.RemoveObject(bucket, object)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket, object)
}
return nil
}
// ListMultipartUploads lists all multipart uploads.
func (l *s3Objects) ListMultipartUploads(ctx context.Context, bucket string, prefix string, keyMarker string, uploadIDMarker string, delimiter string, maxUploads int) (lmi minio.ListMultipartsInfo, e error) {
result, err := l.Client.ListMultipartUploads(bucket, prefix, keyMarker, uploadIDMarker, delimiter, maxUploads)
if err != nil {
return lmi, err
}
return minio.FromMinioClientListMultipartsInfo(result), nil
}
// NewMultipartUpload upload object in multiple parts
func (l *s3Objects) NewMultipartUpload(ctx context.Context, bucket string, object string, metadata map[string]string, o minio.ObjectOptions) (uploadID string, err error) {
// Create PutObject options
opts := miniogo.PutObjectOptions{UserMetadata: metadata, ServerSideEncryption: o.ServerSideEncryption}
uploadID, err = l.Client.NewMultipartUpload(bucket, object, opts)
if err != nil {
return uploadID, minio.ErrorRespToObjectError(err, bucket, object)
}
return uploadID, nil
}
// PutObjectPart puts a part of object in bucket
func (l *s3Objects) PutObjectPart(ctx context.Context, bucket string, object string, uploadID string, partID int, r *minio.PutObjReader, opts minio.ObjectOptions) (pi minio.PartInfo, e error) {
data := r.Reader
info, err := l.Client.PutObjectPart(bucket, object, uploadID, partID, data, data.Size(), data.MD5Base64String(), data.SHA256HexString(), opts.ServerSideEncryption)
if err != nil {
return pi, minio.ErrorRespToObjectError(err, bucket, object)
}
return minio.FromMinioClientObjectPart(info), nil
}
// CopyObjectPart creates a part in a multipart upload by copying
// existing object or a part of it.
func (l *s3Objects) CopyObjectPart(ctx context.Context, srcBucket, srcObject, destBucket, destObject, uploadID string,
partID int, startOffset, length int64, srcInfo minio.ObjectInfo, srcOpts, dstOpts minio.ObjectOptions) (p minio.PartInfo, err error) {
srcInfo.UserDefined = map[string]string{
"x-amz-copy-source-if-match": srcInfo.ETag,
}
header := make(http.Header)
if srcOpts.ServerSideEncryption != nil {
encrypt.SSECopy(srcOpts.ServerSideEncryption).Marshal(header)
}
if dstOpts.ServerSideEncryption != nil {
dstOpts.ServerSideEncryption.Marshal(header)
}
for k, v := range header {
srcInfo.UserDefined[k] = v[0]
}
completePart, err := l.Client.CopyObjectPart(srcBucket, srcObject, destBucket, destObject,
uploadID, partID, startOffset, length, srcInfo.UserDefined)
if err != nil {
return p, minio.ErrorRespToObjectError(err, srcBucket, srcObject)
}
p.PartNumber = completePart.PartNumber
p.ETag = completePart.ETag
return p, nil
}
// ListObjectParts returns all object parts for specified object in specified bucket
func (l *s3Objects) ListObjectParts(ctx context.Context, bucket string, object string, uploadID string, partNumberMarker int, maxParts int, opts minio.ObjectOptions) (lpi minio.ListPartsInfo, e error) {
result, err := l.Client.ListObjectParts(bucket, object, uploadID, partNumberMarker, maxParts)
if err != nil {
return lpi, minio.ErrorRespToObjectError(err, bucket, object)
}
return minio.FromMinioClientListPartsInfo(result), nil
}
// AbortMultipartUpload aborts a ongoing multipart upload
func (l *s3Objects) AbortMultipartUpload(ctx context.Context, bucket string, object string, uploadID string) error {
err := l.Client.AbortMultipartUpload(bucket, object, uploadID)
return minio.ErrorRespToObjectError(err, bucket, object)
}
// CompleteMultipartUpload completes ongoing multipart upload and finalizes object
func (l *s3Objects) CompleteMultipartUpload(ctx context.Context, bucket string, object string, uploadID string, uploadedParts []minio.CompletePart, opts minio.ObjectOptions) (oi minio.ObjectInfo, e error) {
etag, err := l.Client.CompleteMultipartUpload(bucket, object, uploadID, minio.ToMinioClientCompleteParts(uploadedParts))
if err != nil {
return oi, minio.ErrorRespToObjectError(err, bucket, object)
}
return minio.ObjectInfo{Bucket: bucket, Name: object, ETag: etag}, nil
}
// SetBucketPolicy sets policy on bucket
func (l *s3Objects) SetBucketPolicy(ctx context.Context, bucket string, bucketPolicy *policy.Policy) error {
data, err := json.Marshal(bucketPolicy)
if err != nil {
// This should not happen.
logger.LogIf(ctx, err)
return minio.ErrorRespToObjectError(err, bucket)
}
if err := l.Client.SetBucketPolicy(bucket, string(data)); err != nil {
return minio.ErrorRespToObjectError(err, bucket)
}
return nil
}
// GetBucketPolicy will get policy on bucket
func (l *s3Objects) GetBucketPolicy(ctx context.Context, bucket string) (*policy.Policy, error) {
data, err := l.Client.GetBucketPolicy(bucket)
if err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket)
}
bucketPolicy, err := policy.ParseConfig(strings.NewReader(data), bucket)
return bucketPolicy, minio.ErrorRespToObjectError(err, bucket)
}
// DeleteBucketPolicy deletes all policies on bucket
func (l *s3Objects) DeleteBucketPolicy(ctx context.Context, bucket string) error {
if err := l.Client.SetBucketPolicy(bucket, ""); err != nil {
return minio.ErrorRespToObjectError(err, bucket, "")
}
return nil
}
// IsCompressionSupported returns whether compression is applicable for this layer.
func (l *s3Objects) IsCompressionSupported() bool {
return false
}
// IsEncryptionSupported returns whether server side encryption is implemented for this layer.
func (l *s3Objects) IsEncryptionSupported() bool {
return minio.GlobalKMS != nil || len(minio.GlobalGatewaySSE) > 0
}
| s3GatewayMain |
astencode.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
// FIXME: remove this after snapshot, and Results are handled
#![allow(unused_must_use)]
use metadata::common as c;
use metadata::cstore as cstore;
use session::Session;
use metadata::decoder;
use middle::def;
use metadata::encoder as e;
use middle::region;
use metadata::tydecode;
use metadata::tydecode::{DefIdSource, NominalType, TypeWithId, TypeParameter};
use metadata::tydecode::{RegionParameter, ClosureSource};
use metadata::tyencode;
use middle::check_const::ConstQualif;
use middle::mem_categorization::Typer;
use middle::privacy::{AllPublic, LastMod};
use middle::subst;
use middle::subst::VecPerParamSpace;
use middle::ty::{self, Ty, MethodCall, MethodCallee, MethodOrigin};
use util::ppaux::ty_to_string;
use syntax::{ast, ast_map, ast_util, codemap, fold};
use syntax::codemap::Span;
use syntax::fold::Folder;
use syntax::parse::token;
use syntax::ptr::P;
use syntax;
use std::cell::Cell;
use std::io::SeekFrom;
use std::io::prelude::*;
use std::num::FromPrimitive;
use std::rc::Rc;
use rbml::reader;
use rbml::writer::Encoder;
use rbml;
use serialize;
use serialize::{Decodable, Decoder, DecoderHelpers, Encodable};
use serialize::{EncoderHelpers};
#[cfg(test)] use std::io::Cursor;
#[cfg(test)] use syntax::parse;
#[cfg(test)] use syntax::print::pprust;
struct DecodeContext<'a, 'b, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
cdata: &'b cstore::crate_metadata,
from_id_range: ast_util::IdRange,
to_id_range: ast_util::IdRange,
// Cache the last used filemap for translating spans as an optimization.
last_filemap_index: Cell<usize>,
}
trait tr {
fn tr(&self, dcx: &DecodeContext) -> Self;
}
trait tr_intern {
fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId;
}
// ______________________________________________________________________
// Top-level methods.
pub fn encode_inlined_item(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
ii: e::InlinedItemRef) {
let id = match ii {
e::IIItemRef(i) => i.id,
e::IIForeignRef(i) => i.id,
e::IITraitItemRef(_, ti) => ti.id,
e::IIImplItemRef(_, ii) => ii.id,
};
debug!("> Encoding inlined item: {} ({:?})",
ecx.tcx.map.path_to_string(id),
rbml_w.writer.seek(SeekFrom::Current(0)));
// Folding could be avoided with a smarter encoder.
let ii = simplify_ast(ii);
let id_range = ast_util::compute_id_range_for_inlined_item(&ii);
rbml_w.start_tag(c::tag_ast as uint);
id_range.encode(rbml_w);
encode_ast(rbml_w, &ii);
encode_side_tables_for_ii(ecx, rbml_w, &ii);
rbml_w.end_tag();
debug!("< Encoded inlined fn: {} ({:?})",
ecx.tcx.map.path_to_string(id),
rbml_w.writer.seek(SeekFrom::Current(0)));
}
impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> {
fn new_id(&self, id: ast::NodeId) -> ast::NodeId {
if id == ast::DUMMY_NODE_ID {
// Used by ast_map to map the NodeInlinedParent.
self.tcx.sess.next_node_id()
} else {
self.tr_id(id)
}
}
fn new_def_id(&self, def_id: ast::DefId) -> ast::DefId {
self.tr_def_id(def_id)
}
fn new_span(&self, span: Span) -> Span {
self.tr_span(span)
}
}
/// Decodes an item from its AST in the cdata's metadata and adds it to the
/// ast-map.
pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
tcx: &ty::ctxt<'tcx>,
path: Vec<ast_map::PathElem>,
par_doc: rbml::Doc)
-> Result<&'tcx ast::InlinedItem, Vec<ast_map::PathElem>> {
match par_doc.opt_child(c::tag_ast) {
None => Err(path),
Some(ast_doc) => {
let mut path_as_str = None;
debug!("> Decoding inlined fn: {:?}::?",
{
// Do an Option dance to use the path after it is moved below.
let s = ast_map::path_to_string(path.iter().cloned());
path_as_str = Some(s);
path_as_str.as_ref().map(|x| &x[..])
});
let mut ast_dsr = reader::Decoder::new(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
let to_id_range = reserve_id_range(&tcx.sess, from_id_range);
let dcx = &DecodeContext {
cdata: cdata,
tcx: tcx,
from_id_range: from_id_range,
to_id_range: to_id_range,
last_filemap_index: Cell::new(0)
};
let raw_ii = decode_ast(ast_doc);
let ii = ast_map::map_decoded_item(&dcx.tcx.map, path, raw_ii, dcx);
let ident = match *ii {
ast::IIItem(ref i) => i.ident,
ast::IIForeign(ref i) => i.ident,
ast::IITraitItem(_, ref ti) => ti.ident,
ast::IIImplItem(_, ref ii) => ii.ident
};
debug!("Fn named: {}", token::get_ident(ident));
debug!("< Decoded inlined fn: {}::{}",
path_as_str.unwrap(),
token::get_ident(ident));
region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii);
decode_side_tables(dcx, ast_doc);
match *ii {
ast::IIItem(ref i) => {
debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_string(&**i));
}
_ => |
}
Ok(ii)
}
}
}
// ______________________________________________________________________
// Enumerating the IDs which appear in an AST
fn reserve_id_range(sess: &Session,
from_id_range: ast_util::IdRange) -> ast_util::IdRange {
// Handle the case of an empty range:
if from_id_range.empty() { return from_id_range; }
let cnt = from_id_range.max - from_id_range.min;
let to_id_min = sess.reserve_node_ids(cnt);
let to_id_max = to_id_min + cnt;
ast_util::IdRange { min: to_id_min, max: to_id_max }
}
impl<'a, 'b, 'tcx> DecodeContext<'a, 'b, 'tcx> {
/// Translates an internal id, meaning a node id that is known to refer to some part of the
/// item currently being inlined, such as a local variable or argument. All naked node-ids
/// that appear in types have this property, since if something might refer to an external item
/// we would use a def-id to allow for the possibility that the item resides in another crate.
pub fn tr_id(&self, id: ast::NodeId) -> ast::NodeId {
// from_id_range should be non-empty
assert!(!self.from_id_range.empty());
// Use wrapping arithmetic because otherwise it introduces control flow.
// Maybe we should just have the control flow? -- aatch
(id.wrapping_sub(self.from_id_range.min).wrapping_add(self.to_id_range.min))
}
/// Translates an EXTERNAL def-id, converting the crate number from the one used in the encoded
/// data to the current crate numbers.. By external, I mean that it be translated to a
/// reference to the item in its original crate, as opposed to being translated to a reference
/// to the inlined version of the item. This is typically, but not always, what you want,
/// because most def-ids refer to external things like types or other fns that may or may not
/// be inlined. Note that even when the inlined function is referencing itself recursively, we
/// would want `tr_def_id` for that reference--- conceptually the function calls the original,
/// non-inlined version, and trans deals with linking that recursive call to the inlined copy.
///
/// However, there are a *few* cases where def-ids are used but we know that the thing being
/// referenced is in fact *internal* to the item being inlined. In those cases, you should use
/// `tr_intern_def_id()` below.
pub fn tr_def_id(&self, did: ast::DefId) -> ast::DefId {
decoder::translate_def_id(self.cdata, did)
}
/// Translates an INTERNAL def-id, meaning a def-id that is
/// known to refer to some part of the item currently being
/// inlined. In that case, we want to convert the def-id to
/// refer to the current crate and to the new, inlined node-id.
pub fn tr_intern_def_id(&self, did: ast::DefId) -> ast::DefId {
assert_eq!(did.krate, ast::LOCAL_CRATE);
ast::DefId { krate: ast::LOCAL_CRATE, node: self.tr_id(did.node) }
}
/// Translates a `Span` from an extern crate to the corresponding `Span`
/// within the local crate's codemap. `creader::import_codemap()` will
/// already have allocated any additionally needed FileMaps in the local
/// codemap as a side-effect of creating the crate_metadata's
/// `codemap_import_info`.
pub fn tr_span(&self, span: Span) -> Span {
let imported_filemaps = &self.cdata.codemap_import_info[..];
let filemap_index = {
// Optimize for the case that most spans within a translated item
// originate from the same filemap.
let last_filemap_index = self.last_filemap_index.get();
if span.lo >= imported_filemaps[last_filemap_index].original_start_pos &&
span.hi <= imported_filemaps[last_filemap_index].original_end_pos {
last_filemap_index
} else {
let mut a = 0;
let mut b = imported_filemaps.len();
while b - a > 1 {
let m = (a + b) / 2;
if imported_filemaps[m].original_start_pos > span.lo {
b = m;
} else {
a = m;
}
}
self.last_filemap_index.set(a);
a
}
};
let lo = (span.lo - imported_filemaps[filemap_index].original_start_pos) +
imported_filemaps[filemap_index].translated_filemap.start_pos;
let hi = (span.hi - imported_filemaps[filemap_index].original_start_pos) +
imported_filemaps[filemap_index].translated_filemap.start_pos;
codemap::mk_sp(lo, hi)
}
}
impl tr_intern for ast::DefId {
fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId {
dcx.tr_intern_def_id(*self)
}
}
impl tr for ast::DefId {
fn tr(&self, dcx: &DecodeContext) -> ast::DefId {
dcx.tr_def_id(*self)
}
}
impl tr for Option<ast::DefId> {
fn tr(&self, dcx: &DecodeContext) -> Option<ast::DefId> {
self.map(|d| dcx.tr_def_id(d))
}
}
impl tr for Span {
fn tr(&self, dcx: &DecodeContext) -> Span {
dcx.tr_span(*self)
}
}
trait def_id_encoder_helpers {
fn emit_def_id(&mut self, did: ast::DefId);
}
impl<S:serialize::Encoder> def_id_encoder_helpers for S {
fn emit_def_id(&mut self, did: ast::DefId) {
did.encode(self).ok().unwrap()
}
}
trait def_id_decoder_helpers {
fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId;
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata) -> ast::DefId;
}
impl<D:serialize::Decoder> def_id_decoder_helpers for D {
fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId {
let did: ast::DefId = Decodable::decode(self).ok().unwrap();
did.tr(dcx)
}
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata) -> ast::DefId {
let did: ast::DefId = Decodable::decode(self).ok().unwrap();
decoder::translate_def_id(cdata, did)
}
}
// ______________________________________________________________________
// Encoding and decoding the AST itself
//
// The hard work is done by an autogenerated module astencode_gen. To
// regenerate astencode_gen, run src/etc/gen-astencode. It will
// replace astencode_gen with a dummy file and regenerate its
// contents. If you get compile errors, the dummy file
// remains---resolve the errors and then rerun astencode_gen.
// Annoying, I know, but hopefully only temporary.
//
// When decoding, we have to renumber the AST so that the node ids that
// appear within are disjoint from the node ids in our existing ASTs.
// We also have to adjust the spans: for now we just insert a dummy span,
// but eventually we should add entries to the local codemap as required.
fn encode_ast(rbml_w: &mut Encoder, item: &ast::InlinedItem) {
rbml_w.start_tag(c::tag_tree as uint);
item.encode(rbml_w);
rbml_w.end_tag();
}
struct NestedItemsDropper;
impl Folder for NestedItemsDropper {
fn fold_block(&mut self, blk: P<ast::Block>) -> P<ast::Block> {
blk.and_then(|ast::Block {id, stmts, expr, rules, span, ..}| {
let stmts_sans_items = stmts.into_iter().filter_map(|stmt| {
let use_stmt = match stmt.node {
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true,
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => true,
ast::DeclItem(_) => false,
}
}
ast::StmtMac(..) => panic!("unexpanded macro in astencode")
};
if use_stmt {
Some(stmt)
} else {
None
}
}).collect();
let blk_sans_items = P(ast::Block {
stmts: stmts_sans_items,
expr: expr,
id: id,
rules: rules,
span: span,
});
fold::noop_fold_block(blk_sans_items, self)
})
}
}
// Produces a simplified copy of the AST which does not include things
// that we do not need to or do not want to export. For example, we
// do not include any nested items: if these nested items are to be
// inlined, their AST will be exported separately (this only makes
// sense because, in Rust, nested items are independent except for
// their visibility).
//
// As it happens, trans relies on the fact that we do not export
// nested items, as otherwise it would get confused when translating
// inlined items.
fn simplify_ast(ii: e::InlinedItemRef) -> ast::InlinedItem {
let mut fld = NestedItemsDropper;
match ii {
// HACK we're not dropping items.
e::IIItemRef(i) => {
ast::IIItem(fold::noop_fold_item(P(i.clone()), &mut fld)
.expect_one("expected one item"))
}
e::IITraitItemRef(d, ti) => {
ast::IITraitItem(d,
fold::noop_fold_trait_item(P(ti.clone()), &mut fld)
.expect_one("noop_fold_trait_item must produce \
exactly one trait item"))
}
e::IIImplItemRef(d, ii) => {
ast::IIImplItem(d,
fold::noop_fold_impl_item(P(ii.clone()), &mut fld)
.expect_one("noop_fold_impl_item must produce \
exactly one impl item"))
}
e::IIForeignRef(i) => {
ast::IIForeign(fold::noop_fold_foreign_item(P(i.clone()), &mut fld))
}
}
}
fn decode_ast(par_doc: rbml::Doc) -> ast::InlinedItem {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
// ______________________________________________________________________
// Encoding and decoding of ast::def
fn decode_def(dcx: &DecodeContext, dsr: &mut reader::Decoder) -> def::Def {
let def: def::Def = Decodable::decode(dsr).unwrap();
def.tr(dcx)
}
impl tr for def::Def {
fn tr(&self, dcx: &DecodeContext) -> def::Def {
match *self {
def::DefFn(did, is_ctor) => def::DefFn(did.tr(dcx), is_ctor),
def::DefMethod(did, p) => {
def::DefMethod(did.tr(dcx), p.map(|did2| did2.tr(dcx)))
}
def::DefSelfTy(nid) => { def::DefSelfTy(dcx.tr_id(nid)) }
def::DefMod(did) => { def::DefMod(did.tr(dcx)) }
def::DefForeignMod(did) => { def::DefForeignMod(did.tr(dcx)) }
def::DefStatic(did, m) => { def::DefStatic(did.tr(dcx), m) }
def::DefConst(did) => { def::DefConst(did.tr(dcx)) }
def::DefLocal(nid) => { def::DefLocal(dcx.tr_id(nid)) }
def::DefVariant(e_did, v_did, is_s) => {
def::DefVariant(e_did.tr(dcx), v_did.tr(dcx), is_s)
},
def::DefTrait(did) => def::DefTrait(did.tr(dcx)),
def::DefTy(did, is_enum) => def::DefTy(did.tr(dcx), is_enum),
def::DefAssociatedTy(trait_did, did) =>
def::DefAssociatedTy(trait_did.tr(dcx), did.tr(dcx)),
def::DefPrimTy(p) => def::DefPrimTy(p),
def::DefTyParam(s, index, def_id, n) => def::DefTyParam(s, index, def_id.tr(dcx), n),
def::DefUse(did) => def::DefUse(did.tr(dcx)),
def::DefUpvar(nid1, nid2) => {
def::DefUpvar(dcx.tr_id(nid1), dcx.tr_id(nid2))
}
def::DefStruct(did) => def::DefStruct(did.tr(dcx)),
def::DefRegion(nid) => def::DefRegion(dcx.tr_id(nid)),
def::DefLabel(nid) => def::DefLabel(dcx.tr_id(nid))
}
}
}
// ______________________________________________________________________
// Encoding and decoding of ancillary information
impl tr for ty::Region {
fn tr(&self, dcx: &DecodeContext) -> ty::Region {
match *self {
ty::ReLateBound(debruijn, br) => {
ty::ReLateBound(debruijn, br.tr(dcx))
}
ty::ReEarlyBound(id, space, index, ident) => {
ty::ReEarlyBound(dcx.tr_id(id), space, index, ident)
}
ty::ReScope(scope) => {
ty::ReScope(scope.tr(dcx))
}
ty::ReEmpty | ty::ReStatic | ty::ReInfer(..) => {
*self
}
ty::ReFree(ref fr) => {
ty::ReFree(fr.tr(dcx))
}
}
}
}
impl tr for ty::FreeRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::FreeRegion {
ty::FreeRegion { scope: self.scope.tr(dcx),
bound_region: self.bound_region.tr(dcx) }
}
}
impl tr for region::CodeExtent {
fn tr(&self, dcx: &DecodeContext) -> region::CodeExtent {
self.map_id(|id| dcx.tr_id(id))
}
}
impl tr for region::DestructionScopeData {
fn tr(&self, dcx: &DecodeContext) -> region::DestructionScopeData {
region::DestructionScopeData { node_id: dcx.tr_id(self.node_id) }
}
}
impl tr for ty::BoundRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::BoundRegion {
match *self {
ty::BrAnon(_) |
ty::BrFresh(_) |
ty::BrEnv => *self,
ty::BrNamed(id, ident) => ty::BrNamed(dcx.tr_def_id(id),
ident),
}
}
}
// ______________________________________________________________________
// Encoding and decoding of freevar information
fn encode_freevar_entry(rbml_w: &mut Encoder, fv: &ty::Freevar) {
(*fv).encode(rbml_w).unwrap();
}
trait rbml_decoder_helper {
fn read_freevar_entry(&mut self, dcx: &DecodeContext)
-> ty::Freevar;
fn read_capture_mode(&mut self) -> ast::CaptureClause;
}
impl<'a> rbml_decoder_helper for reader::Decoder<'a> {
fn read_freevar_entry(&mut self, dcx: &DecodeContext)
-> ty::Freevar {
let fv: ty::Freevar = Decodable::decode(self).unwrap();
fv.tr(dcx)
}
fn read_capture_mode(&mut self) -> ast::CaptureClause {
let cm: ast::CaptureClause = Decodable::decode(self).unwrap();
cm
}
}
impl tr for ty::Freevar {
fn tr(&self, dcx: &DecodeContext) -> ty::Freevar {
ty::Freevar {
def: self.def.tr(dcx),
span: self.span.tr(dcx),
}
}
}
impl tr for ty::UpvarBorrow {
fn tr(&self, dcx: &DecodeContext) -> ty::UpvarBorrow {
ty::UpvarBorrow {
kind: self.kind,
region: self.region.tr(dcx)
}
}
}
impl tr for ty::UpvarCapture {
fn tr(&self, dcx: &DecodeContext) -> ty::UpvarCapture {
match *self {
ty::UpvarCapture::ByValue => ty::UpvarCapture::ByValue,
ty::UpvarCapture::ByRef(ref data) => ty::UpvarCapture::ByRef(data.tr(dcx)),
}
}
}
// ______________________________________________________________________
// Encoding and decoding of MethodCallee
trait read_method_callee_helper<'tcx> {
fn read_method_callee<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> (ty::ExprAdjustment, MethodCallee<'tcx>);
}
fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>,
rbml_w: &mut Encoder,
adjustment: ty::ExprAdjustment,
method: &MethodCallee<'tcx>) {
use serialize::Encoder;
rbml_w.emit_struct("MethodCallee", 4, |rbml_w| {
rbml_w.emit_struct_field("adjustment", 0, |rbml_w| {
adjustment.encode(rbml_w)
});
rbml_w.emit_struct_field("origin", 1, |rbml_w| {
Ok(rbml_w.emit_method_origin(ecx, &method.origin))
});
rbml_w.emit_struct_field("ty", 2, |rbml_w| {
Ok(rbml_w.emit_ty(ecx, method.ty))
});
rbml_w.emit_struct_field("substs", 3, |rbml_w| {
Ok(rbml_w.emit_substs(ecx, &method.substs))
})
}).unwrap();
}
impl<'a, 'tcx> read_method_callee_helper<'tcx> for reader::Decoder<'a> {
fn read_method_callee<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> (ty::ExprAdjustment, MethodCallee<'tcx>) {
self.read_struct("MethodCallee", 4, |this| {
let adjustment = this.read_struct_field("adjustment", 0, |this| {
Decodable::decode(this)
}).unwrap();
Ok((adjustment, MethodCallee {
origin: this.read_struct_field("origin", 1, |this| {
Ok(this.read_method_origin(dcx))
}).unwrap(),
ty: this.read_struct_field("ty", 2, |this| {
Ok(this.read_ty(dcx))
}).unwrap(),
substs: this.read_struct_field("substs", 3, |this| {
Ok(this.read_substs(dcx))
}).unwrap()
}))
}).unwrap()
}
}
impl<'tcx> tr for MethodOrigin<'tcx> {
fn tr(&self, dcx: &DecodeContext) -> MethodOrigin<'tcx> {
match *self {
ty::MethodStatic(did) => ty::MethodStatic(did.tr(dcx)),
ty::MethodStaticClosure(did) => {
ty::MethodStaticClosure(did.tr(dcx))
}
ty::MethodTypeParam(ref mp) => {
ty::MethodTypeParam(
ty::MethodParam {
// def-id is already translated when we read it out
trait_ref: mp.trait_ref.clone(),
method_num: mp.method_num,
impl_def_id: mp.impl_def_id.tr(dcx),
}
)
}
ty::MethodTraitObject(ref mo) => {
ty::MethodTraitObject(
ty::MethodObject {
trait_ref: mo.trait_ref.clone(),
.. *mo
}
)
}
}
}
}
pub fn encode_closure_kind(ebml_w: &mut Encoder, kind: ty::ClosureKind) {
kind.encode(ebml_w).unwrap();
}
pub trait vtable_decoder_helpers<'tcx> {
fn read_vec_per_param_space<T, F>(&mut self, f: F) -> VecPerParamSpace<T> where
F: FnMut(&mut Self) -> T;
fn read_vtable_res_with_key(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> (ty::ExprAdjustment, ty::vtable_res<'tcx>);
fn read_vtable_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_res<'tcx>;
fn read_vtable_param_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_param_res<'tcx>;
fn read_vtable_origin(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_origin<'tcx>;
}
impl<'tcx, 'a> vtable_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn read_vec_per_param_space<T, F>(&mut self, mut f: F) -> VecPerParamSpace<T> where
F: FnMut(&mut reader::Decoder<'a>) -> T,
{
let types = self.read_to_vec(|this| Ok(f(this))).unwrap();
let selfs = self.read_to_vec(|this| Ok(f(this))).unwrap();
let fns = self.read_to_vec(|this| Ok(f(this))).unwrap();
VecPerParamSpace::new(types, selfs, fns)
}
fn read_vtable_res_with_key(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> (ty::ExprAdjustment, ty::vtable_res<'tcx>) {
self.read_struct("VtableWithKey", 2, |this| {
let adjustment = this.read_struct_field("adjustment", 0, |this| {
Decodable::decode(this)
}).unwrap();
Ok((adjustment, this.read_struct_field("vtable_res", 1, |this| {
Ok(this.read_vtable_res(tcx, cdata))
}).unwrap()))
}).unwrap()
}
fn read_vtable_res(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> ty::vtable_res<'tcx>
{
self.read_vec_per_param_space(
|this| this.read_vtable_param_res(tcx, cdata))
}
fn read_vtable_param_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_param_res<'tcx> {
self.read_to_vec(|this| Ok(this.read_vtable_origin(tcx, cdata)))
.unwrap().into_iter().collect()
}
fn read_vtable_origin(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_origin<'tcx> {
self.read_enum("vtable_origin", |this| {
this.read_enum_variant(&["vtable_static",
"vtable_param",
"vtable_error",
"vtable_closure"],
|this, i| {
Ok(match i {
0 => {
ty::vtable_static(
this.read_enum_variant_arg(0, |this| {
Ok(this.read_def_id_nodcx(cdata))
}).unwrap(),
this.read_enum_variant_arg(1, |this| {
Ok(this.read_substs_nodcx(tcx, cdata))
}).unwrap(),
this.read_enum_variant_arg(2, |this| {
Ok(this.read_vtable_res(tcx, cdata))
}).unwrap()
)
}
1 => {
ty::vtable_param(
this.read_enum_variant_arg(0, |this| {
Decodable::decode(this)
}).unwrap(),
this.read_enum_variant_arg(1, |this| {
this.read_uint()
}).unwrap()
)
}
2 => {
ty::vtable_closure(
this.read_enum_variant_arg(0, |this| {
Ok(this.read_def_id_nodcx(cdata))
}).unwrap()
)
}
3 => {
ty::vtable_error
}
_ => panic!("bad enum variant")
})
})
}).unwrap()
}
}
// ___________________________________________________________________________
//
fn encode_vec_per_param_space<T, F>(rbml_w: &mut Encoder,
v: &subst::VecPerParamSpace<T>,
mut f: F) where
F: FnMut(&mut Encoder, &T),
{
for &space in &subst::ParamSpace::all() {
rbml_w.emit_from_vec(v.get_slice(space),
|rbml_w, n| Ok(f(rbml_w, n))).unwrap();
}
}
// ______________________________________________________________________
// Encoding and decoding the side tables
trait get_ty_str_ctxt<'tcx> {
fn ty_str_ctxt<'a>(&'a self) -> tyencode::ctxt<'a, 'tcx>;
}
impl<'a, 'tcx> get_ty_str_ctxt<'tcx> for e::EncodeContext<'a, 'tcx> {
fn ty_str_ctxt<'b>(&'b self) -> tyencode::ctxt<'b, 'tcx> {
tyencode::ctxt {
diag: self.tcx.sess.diagnostic(),
ds: e::def_to_string,
tcx: self.tcx,
abbrevs: &self.type_abbrevs
}
}
}
trait rbml_writer_helpers<'tcx> {
fn emit_closure_type<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
closure_type: &ty::ClosureTy<'tcx>);
fn emit_method_origin<'a>(&mut self,
ecx: &e::EncodeContext<'a, 'tcx>,
method_origin: &ty::MethodOrigin<'tcx>);
fn emit_ty<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, ty: Ty<'tcx>);
fn emit_tys<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, tys: &[Ty<'tcx>]);
fn emit_type_param_def<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
type_param_def: &ty::TypeParameterDef<'tcx>);
fn emit_predicate<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
predicate: &ty::Predicate<'tcx>);
fn emit_trait_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
ty: &ty::TraitRef<'tcx>);
fn emit_type_scheme<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
type_scheme: ty::TypeScheme<'tcx>);
fn emit_substs<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
substs: &subst::Substs<'tcx>);
fn emit_existential_bounds<'b>(&mut self, ecx: &e::EncodeContext<'b,'tcx>,
bounds: &ty::ExistentialBounds<'tcx>);
fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBounds);
fn emit_auto_adjustment<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
adj: &ty::AutoAdjustment<'tcx>);
fn emit_autoref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
autoref: &ty::AutoRef<'tcx>);
fn emit_auto_deref_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
auto_deref_ref: &ty::AutoDerefRef<'tcx>);
fn emit_unsize_kind<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
uk: &ty::UnsizeKind<'tcx>);
}
impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> {
fn emit_closure_type<'b>(&mut self,
ecx: &e::EncodeContext<'b, 'tcx>,
closure_type: &ty::ClosureTy<'tcx>) {
self.emit_opaque(|this| {
Ok(e::write_closure_type(ecx, this, closure_type))
});
}
fn emit_method_origin<'b>(&mut self,
ecx: &e::EncodeContext<'b, 'tcx>,
method_origin: &ty::MethodOrigin<'tcx>)
{
use serialize::Encoder;
self.emit_enum("MethodOrigin", |this| {
match *method_origin {
ty::MethodStatic(def_id) => {
this.emit_enum_variant("MethodStatic", 0, 1, |this| {
Ok(this.emit_def_id(def_id))
})
}
ty::MethodStaticClosure(def_id) => {
this.emit_enum_variant("MethodStaticClosure", 1, 1, |this| {
Ok(this.emit_def_id(def_id))
})
}
ty::MethodTypeParam(ref p) => {
this.emit_enum_variant("MethodTypeParam", 2, 1, |this| {
this.emit_struct("MethodParam", 2, |this| {
try!(this.emit_struct_field("trait_ref", 0, |this| {
Ok(this.emit_trait_ref(ecx, &*p.trait_ref))
}));
try!(this.emit_struct_field("method_num", 0, |this| {
this.emit_uint(p.method_num)
}));
try!(this.emit_struct_field("impl_def_id", 0, |this| {
this.emit_option(|this| {
match p.impl_def_id {
None => this.emit_option_none(),
Some(did) => this.emit_option_some(|this| {
Ok(this.emit_def_id(did))
})
}
})
}));
Ok(())
})
})
}
ty::MethodTraitObject(ref o) => {
this.emit_enum_variant("MethodTraitObject", 3, 1, |this| {
this.emit_struct("MethodObject", 2, |this| {
try!(this.emit_struct_field("trait_ref", 0, |this| {
Ok(this.emit_trait_ref(ecx, &*o.trait_ref))
}));
try!(this.emit_struct_field("object_trait_id", 0, |this| {
Ok(this.emit_def_id(o.object_trait_id))
}));
try!(this.emit_struct_field("method_num", 0, |this| {
this.emit_uint(o.method_num)
}));
try!(this.emit_struct_field("vtable_index", 0, |this| {
this.emit_uint(o.vtable_index)
}));
Ok(())
})
})
}
}
});
}
fn emit_ty<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, ty: Ty<'tcx>) {
self.emit_opaque(|this| Ok(e::write_type(ecx, this, ty)));
}
fn emit_tys<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, tys: &[Ty<'tcx>]) {
self.emit_from_vec(tys, |this, ty| Ok(this.emit_ty(ecx, *ty)));
}
fn emit_trait_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
trait_ref: &ty::TraitRef<'tcx>) {
self.emit_opaque(|this| Ok(e::write_trait_ref(ecx, this, trait_ref)));
}
fn emit_type_param_def<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
type_param_def: &ty::TypeParameterDef<'tcx>) {
self.emit_opaque(|this| {
Ok(tyencode::enc_type_param_def(this,
&ecx.ty_str_ctxt(),
type_param_def))
});
}
fn emit_predicate<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
predicate: &ty::Predicate<'tcx>) {
self.emit_opaque(|this| {
Ok(tyencode::enc_predicate(this,
&ecx.ty_str_ctxt(),
predicate))
});
}
fn emit_type_scheme<'b>(&mut self,
ecx: &e::EncodeContext<'b, 'tcx>,
type_scheme: ty::TypeScheme<'tcx>) {
use serialize::Encoder;
self.emit_struct("TypeScheme", 2, |this| {
this.emit_struct_field("generics", 0, |this| {
this.emit_struct("Generics", 2, |this| {
this.emit_struct_field("types", 0, |this| {
Ok(encode_vec_per_param_space(
this, &type_scheme.generics.types,
|this, def| this.emit_type_param_def(ecx, def)))
});
this.emit_struct_field("regions", 1, |this| {
Ok(encode_vec_per_param_space(
this, &type_scheme.generics.regions,
|this, def| def.encode(this).unwrap()))
})
})
});
this.emit_struct_field("ty", 1, |this| {
Ok(this.emit_ty(ecx, type_scheme.ty))
})
});
}
fn emit_existential_bounds<'b>(&mut self, ecx: &e::EncodeContext<'b,'tcx>,
bounds: &ty::ExistentialBounds<'tcx>) {
self.emit_opaque(|this| Ok(tyencode::enc_existential_bounds(this,
&ecx.ty_str_ctxt(),
bounds)));
}
fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBounds) {
self.emit_opaque(|this| Ok(tyencode::enc_builtin_bounds(this,
&ecx.ty_str_ctxt(),
bounds)));
}
fn emit_substs<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
substs: &subst::Substs<'tcx>) {
self.emit_opaque(|this| Ok(tyencode::enc_substs(this,
&ecx.ty_str_ctxt(),
substs)));
}
fn emit_auto_adjustment<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
adj: &ty::AutoAdjustment<'tcx>) {
use serialize::Encoder;
self.emit_enum("AutoAdjustment", |this| {
match *adj {
ty::AdjustReifyFnPointer(def_id) => {
this.emit_enum_variant("AdjustReifyFnPointer", 1, 2, |this| {
this.emit_enum_variant_arg(0, |this| def_id.encode(this))
})
}
ty::AdjustUnsafeFnPointer => {
this.emit_enum_variant("AdjustUnsafeFnPointer", 2, 0, |_| {
Ok(())
})
}
ty::AdjustDerefRef(ref auto_deref_ref) => {
this.emit_enum_variant("AdjustDerefRef", 3, 2, |this| {
this.emit_enum_variant_arg(0,
|this| Ok(this.emit_auto_deref_ref(ecx, auto_deref_ref)))
})
}
}
});
}
fn emit_autoref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
autoref: &ty::AutoRef<'tcx>) {
use serialize::Encoder;
self.emit_enum("AutoRef", |this| {
match autoref {
&ty::AutoPtr(r, m, None) => {
this.emit_enum_variant("AutoPtr", 0, 3, |this| {
this.emit_enum_variant_arg(0, |this| r.encode(this));
this.emit_enum_variant_arg(1, |this| m.encode(this));
this.emit_enum_variant_arg(2,
|this| this.emit_option(|this| this.emit_option_none()))
})
}
&ty::AutoPtr(r, m, Some(box ref a)) => {
this.emit_enum_variant("AutoPtr", 0, 3, |this| {
this.emit_enum_variant_arg(0, |this| r.encode(this));
this.emit_enum_variant_arg(1, |this| m.encode(this));
this.emit_enum_variant_arg(2, |this| this.emit_option(
|this| this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a)))))
})
}
&ty::AutoUnsize(ref uk) => {
this.emit_enum_variant("AutoUnsize", 1, 1, |this| {
this.emit_enum_variant_arg(0, |this| Ok(this.emit_unsize_kind(ecx, uk)))
})
}
&ty::AutoUnsizeUniq(ref uk) => {
this.emit_enum_variant("AutoUnsizeUniq", 2, 1, |this| {
this.emit_enum_variant_arg(0, |this| Ok(this.emit_unsize_kind(ecx, uk)))
})
}
&ty::AutoUnsafe(m, None) => {
this.emit_enum_variant("AutoUnsafe", 3, 2, |this| {
this.emit_enum_variant_arg(0, |this| m.encode(this));
this.emit_enum_variant_arg(1,
|this| this.emit_option(|this| this.emit_option_none()))
})
}
&ty::AutoUnsafe(m, Some(box ref a)) => {
this.emit_enum_variant("AutoUnsafe", 3, 2, |this| {
this.emit_enum_variant_arg(0, |this| m.encode(this));
this.emit_enum_variant_arg(1, |this| this.emit_option(
|this| this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a)))))
})
}
}
});
}
fn emit_auto_deref_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
auto_deref_ref: &ty::AutoDerefRef<'tcx>) {
use serialize::Encoder;
self.emit_struct("AutoDerefRef", 2, |this| {
this.emit_struct_field("autoderefs", 0, |this| auto_deref_ref.autoderefs.encode(this));
this.emit_struct_field("autoref", 1, |this| {
this.emit_option(|this| {
match auto_deref_ref.autoref {
None => this.emit_option_none(),
Some(ref a) => this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a))),
}
})
})
});
}
fn emit_unsize_kind<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
uk: &ty::UnsizeKind<'tcx>) {
use serialize::Encoder;
self.emit_enum("UnsizeKind", |this| {
match *uk {
ty::UnsizeLength(len) => {
this.emit_enum_variant("UnsizeLength", 0, 1, |this| {
this.emit_enum_variant_arg(0, |this| len.encode(this))
})
}
ty::UnsizeStruct(box ref uk, idx) => {
this.emit_enum_variant("UnsizeStruct", 1, 2, |this| {
this.emit_enum_variant_arg(0, |this| Ok(this.emit_unsize_kind(ecx, uk)));
this.emit_enum_variant_arg(1, |this| idx.encode(this))
})
}
ty::UnsizeVtable(ty::TyTrait { ref principal,
bounds: ref b },
self_ty) => {
this.emit_enum_variant("UnsizeVtable", 2, 4, |this| {
this.emit_enum_variant_arg(0, |this| {
try!(this.emit_struct_field("principal", 0, |this| {
Ok(this.emit_trait_ref(ecx, &*principal.0))
}));
this.emit_struct_field("bounds", 1, |this| {
Ok(this.emit_existential_bounds(ecx, b))
})
});
this.emit_enum_variant_arg(1, |this| Ok(this.emit_ty(ecx, self_ty)))
})
}
ty::UnsizeUpcast(target_ty) => {
this.emit_enum_variant("UnsizeUpcast", 3, 1, |this| {
this.emit_enum_variant_arg(0, |this| Ok(this.emit_ty(ecx, target_ty)))
})
}
}
});
}
}
trait write_tag_and_id {
fn tag<F>(&mut self, tag_id: c::astencode_tag, f: F) where F: FnOnce(&mut Self);
fn id(&mut self, id: ast::NodeId);
}
impl<'a> write_tag_and_id for Encoder<'a> {
fn tag<F>(&mut self,
tag_id: c::astencode_tag,
f: F) where
F: FnOnce(&mut Encoder<'a>),
{
self.start_tag(tag_id as uint);
f(self);
self.end_tag();
}
fn id(&mut self, id: ast::NodeId) {
id.encode(self).unwrap();
}
}
struct SideTableEncodingIdVisitor<'a, 'b:'a, 'c:'a, 'tcx:'c> {
ecx: &'a e::EncodeContext<'c, 'tcx>,
rbml_w: &'a mut Encoder<'b>,
}
impl<'a, 'b, 'c, 'tcx> ast_util::IdVisitingOperation for
SideTableEncodingIdVisitor<'a, 'b, 'c, 'tcx> {
fn visit_id(&mut self, id: ast::NodeId) {
encode_side_tables_for_id(self.ecx, self.rbml_w, id)
}
}
fn encode_side_tables_for_ii(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
ii: &ast::InlinedItem) {
rbml_w.start_tag(c::tag_table as uint);
ast_util::visit_ids_for_inlined_item(ii, &mut SideTableEncodingIdVisitor {
ecx: ecx,
rbml_w: rbml_w
});
rbml_w.end_tag();
}
fn encode_side_tables_for_id(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
id: ast::NodeId) {
let tcx = ecx.tcx;
debug!("Encoding side tables for id {}", id);
if let Some(def) = tcx.def_map.borrow().get(&id).map(|d| d.full_def()) {
rbml_w.tag(c::tag_table_def, |rbml_w| {
rbml_w.id(id);
def.encode(rbml_w).unwrap();
})
}
if let Some(ty) = tcx.node_types.borrow().get(&id) {
rbml_w.tag(c::tag_table_node_type, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_ty(ecx, *ty);
})
}
if let Some(item_substs) = tcx.item_substs.borrow().get(&id) {
rbml_w.tag(c::tag_table_item_subst, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_substs(ecx, &item_substs.substs);
})
}
if let Some(fv) = tcx.freevars.borrow().get(&id) {
rbml_w.tag(c::tag_table_freevars, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_from_vec(fv, |rbml_w, fv_entry| {
Ok(encode_freevar_entry(rbml_w, fv_entry))
});
});
for freevar in fv {
rbml_w.tag(c::tag_table_upvar_capture_map, |rbml_w| {
rbml_w.id(id);
let var_id = freevar.def.def_id().node;
let upvar_id = ty::UpvarId {
var_id: var_id,
closure_expr_id: id
};
let upvar_capture = tcx.upvar_capture_map.borrow()[upvar_id].clone();
var_id.encode(rbml_w);
upvar_capture.encode(rbml_w);
})
}
}
let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id };
if let Some(type_scheme) = tcx.tcache.borrow().get(&lid) {
rbml_w.tag(c::tag_table_tcache, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_type_scheme(ecx, type_scheme.clone());
})
}
if let Some(type_param_def) = tcx.ty_param_defs.borrow().get(&id) {
rbml_w.tag(c::tag_table_param_defs, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_type_param_def(ecx, type_param_def)
})
}
let method_call = MethodCall::expr(id);
if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
encode_method_callee(ecx, rbml_w, method_call.adjustment, method)
})
}
if let Some(trait_ref) = tcx.object_cast_map.borrow().get(&id) {
rbml_w.tag(c::tag_table_object_cast_map, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_trait_ref(ecx, &*trait_ref.0);
})
}
if let Some(adjustment) = tcx.adjustments.borrow().get(&id) {
match *adjustment {
_ if ty::adjust_is_object(adjustment) => {
let method_call = MethodCall::autoobject(id);
if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
encode_method_callee(ecx, rbml_w, method_call.adjustment, method)
})
}
}
ty::AdjustDerefRef(ref adj) => {
assert!(!ty::adjust_is_object(adjustment));
for autoderef in 0..adj.autoderefs {
let method_call = MethodCall::autoderef(id, autoderef);
if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
encode_method_callee(ecx, rbml_w,
method_call.adjustment, method)
})
}
}
}
_ => {
assert!(!ty::adjust_is_object(adjustment));
}
}
rbml_w.tag(c::tag_table_adjustments, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_auto_adjustment(ecx, adjustment);
})
}
if let Some(closure_type) = tcx.closure_tys.borrow().get(&ast_util::local_def(id)) {
rbml_w.tag(c::tag_table_closure_tys, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_closure_type(ecx, closure_type);
})
}
if let Some(closure_kind) = tcx.closure_kinds.borrow().get(&ast_util::local_def(id)) {
rbml_w.tag(c::tag_table_closure_kinds, |rbml_w| {
rbml_w.id(id);
encode_closure_kind(rbml_w, *closure_kind)
})
}
for &qualif in tcx.const_qualif_map.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_const_qualif, |rbml_w| {
rbml_w.id(id);
qualif.encode(rbml_w).unwrap()
})
}
}
trait doc_decoder_helpers {
fn as_int(&self) -> int;
fn opt_child(&self, tag: c::astencode_tag) -> Option<Self>;
}
impl<'a> doc_decoder_helpers for rbml::Doc<'a> {
fn as_int(&self) -> int { reader::doc_as_u64(*self) as int }
fn opt_child(&self, tag: c::astencode_tag) -> Option<rbml::Doc<'a>> {
reader::maybe_get_doc(*self, tag as uint)
}
}
trait rbml_decoder_decoder_helpers<'tcx> {
fn read_method_origin<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::MethodOrigin<'tcx>;
fn read_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Ty<'tcx>;
fn read_tys<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Vec<Ty<'tcx>>;
fn read_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> Rc<ty::TraitRef<'tcx>>;
fn read_poly_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::PolyTraitRef<'tcx>;
fn read_type_param_def<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TypeParameterDef<'tcx>;
fn read_predicate<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::Predicate<'tcx>;
fn read_type_scheme<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TypeScheme<'tcx>;
fn read_existential_bounds<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ExistentialBounds<'tcx>;
fn read_substs<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> subst::Substs<'tcx>;
fn read_auto_adjustment<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoAdjustment<'tcx>;
fn read_closure_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ClosureKind;
fn read_closure_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ClosureTy<'tcx>;
fn read_auto_deref_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoDerefRef<'tcx>;
fn read_autoref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoRef<'tcx>;
fn read_unsize_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::UnsizeKind<'tcx>;
fn convert_def_id(&mut self,
dcx: &DecodeContext,
source: DefIdSource,
did: ast::DefId)
-> ast::DefId;
// Versions of the type reading functions that don't need the full
// DecodeContext.
fn read_ty_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata) -> Ty<'tcx>;
fn read_tys_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>>;
fn read_substs_nodcx(&mut self, tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> subst::Substs<'tcx>;
}
impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn read_ty_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata) -> Ty<'tcx> {
self.read_opaque(|_, doc| {
Ok(tydecode::parse_ty_data(
doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id)))
}).unwrap()
}
fn read_tys_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>> {
self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) )
.unwrap()
.into_iter()
.collect()
}
fn read_substs_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> subst::Substs<'tcx>
{
self.read_opaque(|_, doc| {
Ok(tydecode::parse_substs_data(
doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id)))
}).unwrap()
}
fn read_method_origin<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::MethodOrigin<'tcx>
{
self.read_enum("MethodOrigin", |this| {
let variants = &["MethodStatic", "MethodStaticClosure",
"MethodTypeParam", "MethodTraitObject"];
this.read_enum_variant(variants, |this, i| {
Ok(match i {
0 => {
let def_id = this.read_def_id(dcx);
ty::MethodStatic(def_id)
}
1 => {
let def_id = this.read_def_id(dcx);
ty::MethodStaticClosure(def_id)
}
2 => {
this.read_struct("MethodTypeParam", 2, |this| {
Ok(ty::MethodTypeParam(
ty::MethodParam {
trait_ref: {
this.read_struct_field("trait_ref", 0, |this| {
Ok(this.read_trait_ref(dcx))
}).unwrap()
},
method_num: {
this.read_struct_field("method_num", 1, |this| {
this.read_uint()
}).unwrap()
},
impl_def_id: {
this.read_struct_field("impl_def_id", 2, |this| {
this.read_option(|this, b| {
if b {
Ok(Some(this.read_def_id(dcx)))
} else {
Ok(None)
}
})
}).unwrap()
}
}))
}).unwrap()
}
3 => {
this.read_struct("MethodTraitObject", 2, |this| {
Ok(ty::MethodTraitObject(
ty::MethodObject {
trait_ref: {
this.read_struct_field("trait_ref", 0, |this| {
Ok(this.read_trait_ref(dcx))
}).unwrap()
},
object_trait_id: {
this.read_struct_field("object_trait_id", 1, |this| {
Ok(this.read_def_id(dcx))
}).unwrap()
},
method_num: {
this.read_struct_field("method_num", 2, |this| {
this.read_uint()
}).unwrap()
},
vtable_index: {
this.read_struct_field("vtable_index", 3, |this| {
this.read_uint()
}).unwrap()
},
}))
}).unwrap()
}
_ => panic!("..")
})
})
}).unwrap()
}
fn read_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> Ty<'tcx> {
// Note: regions types embed local node ids. In principle, we
// should translate these node ids into the new decode
// context. However, we do not bother, because region types
// are not used during trans.
return self.read_opaque(|this, doc| {
debug!("read_ty({})", type_string(doc));
let ty = tydecode::parse_ty_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));
Ok(ty)
}).unwrap();
fn type_string(doc: rbml::Doc) -> String {
let mut str = String::new();
for i in doc.start..doc.end {
str.push(doc.data[i] as char);
}
str
}
}
fn read_tys<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> Vec<Ty<'tcx>> {
self.read_to_vec(|this| Ok(this.read_ty(dcx))).unwrap().into_iter().collect()
}
fn read_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> Rc<ty::TraitRef<'tcx>> {
self.read_opaque(|this, doc| {
let ty = tydecode::parse_trait_ref_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));
Ok(ty)
}).unwrap()
}
fn read_poly_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::PolyTraitRef<'tcx> {
ty::Binder(self.read_opaque(|this, doc| {
let ty = tydecode::parse_trait_ref_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));
Ok(ty)
}).unwrap())
}
fn read_type_param_def<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::TypeParameterDef<'tcx> {
self.read_opaque(|this, doc| {
Ok(tydecode::parse_type_param_def_data(
doc.data,
doc.start,
dcx.cdata.cnum,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_predicate<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::Predicate<'tcx>
{
self.read_opaque(|this, doc| {
Ok(tydecode::parse_predicate_data(doc.data, doc.start, dcx.cdata.cnum, dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_type_scheme<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::TypeScheme<'tcx> {
self.read_struct("TypeScheme", 3, |this| {
Ok(ty::TypeScheme {
generics: this.read_struct_field("generics", 0, |this| {
this.read_struct("Generics", 2, |this| {
Ok(ty::Generics {
types:
this.read_struct_field("types", 0, |this| {
Ok(this.read_vec_per_param_space(
|this| this.read_type_param_def(dcx)))
}).unwrap(),
regions:
this.read_struct_field("regions", 1, |this| {
Ok(this.read_vec_per_param_space(
|this| Decodable::decode(this).unwrap()))
}).unwrap(),
})
})
}).unwrap(),
ty: this.read_struct_field("ty", 1, |this| {
Ok(this.read_ty(dcx))
}).unwrap()
})
}).unwrap()
}
fn read_existential_bounds<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::ExistentialBounds<'tcx>
{
self.read_opaque(|this, doc| {
Ok(tydecode::parse_existential_bounds_data(doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_substs<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> subst::Substs<'tcx> {
self.read_opaque(|this, doc| {
Ok(tydecode::parse_substs_data(doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_auto_adjustment<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::AutoAdjustment<'tcx> {
self.read_enum("AutoAdjustment", |this| {
let variants = ["AutoAddEnv", "AutoDerefRef"];
this.read_enum_variant(&variants, |this, i| {
Ok(match i {
1 => {
let def_id: ast::DefId =
this.read_def_id(dcx);
ty::AdjustReifyFnPointer(def_id)
}
2 => {
ty::AdjustUnsafeFnPointer
}
3 => {
let auto_deref_ref: ty::AutoDerefRef =
this.read_enum_variant_arg(0,
|this| Ok(this.read_auto_deref_ref(dcx))).unwrap();
ty::AdjustDerefRef(auto_deref_ref)
}
_ => panic!("bad enum variant for ty::AutoAdjustment")
})
})
}).unwrap()
}
fn read_auto_deref_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::AutoDerefRef<'tcx> {
self.read_struct("AutoDerefRef", 2, |this| {
Ok(ty::AutoDerefRef {
autoderefs: this.read_struct_field("autoderefs", 0, |this| {
Decodable::decode(this)
}).unwrap(),
autoref: this.read_struct_field("autoref", 1, |this| {
this.read_option(|this, b| {
if b {
Ok(Some(this.read_autoref(dcx)))
} else {
Ok(None)
}
})
}).unwrap(),
})
}).unwrap()
}
fn read_autoref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> ty::AutoRef<'tcx> {
self.read_enum("AutoRef", |this| {
let variants = ["AutoPtr",
"AutoUnsize",
"AutoUnsizeUniq",
"AutoUnsafe"];
this.read_enum_variant(&variants, |this, i| {
Ok(match i {
0 => {
let r: ty::Region =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
let m: ast::Mutability =
this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap();
let a: Option<Box<ty::AutoRef>> =
this.read_enum_variant_arg(2, |this| this.read_option(|this, b| {
if b {
Ok(Some(box this.read_autoref(dcx)))
} else {
Ok(None)
}
})).unwrap();
ty::AutoPtr(r.tr(dcx), m, a)
}
1 => {
let uk: ty::UnsizeKind =
this.read_enum_variant_arg(0,
|this| Ok(this.read_unsize_kind(dcx))).unwrap();
ty::AutoUnsize(uk)
}
2 => {
let uk: ty::UnsizeKind =
this.read_enum_variant_arg(0,
|this| Ok(this.read_unsize_kind(dcx))).unwrap();
ty::AutoUnsizeUniq(uk)
}
3 => {
let m: ast::Mutability =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
let a: Option<Box<ty::AutoRef>> =
this.read_enum_variant_arg(1, |this| this.read_option(|this, b| {
if b {
Ok(Some(box this.read_autoref(dcx)))
} else {
Ok(None)
}
})).unwrap();
ty::AutoUnsafe(m, a)
}
_ => panic!("bad enum variant for ty::AutoRef")
})
})
}).unwrap()
}
fn read_unsize_kind<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::UnsizeKind<'tcx> {
self.read_enum("UnsizeKind", |this| {
let variants = &["UnsizeLength", "UnsizeStruct", "UnsizeVtable", "UnsizeUpcast"];
this.read_enum_variant(variants, |this, i| {
Ok(match i {
0 => {
let len: uint =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
ty::UnsizeLength(len)
}
1 => {
let uk: ty::UnsizeKind =
this.read_enum_variant_arg(0,
|this| Ok(this.read_unsize_kind(dcx))).unwrap();
let idx: uint =
this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap();
ty::UnsizeStruct(box uk, idx)
}
2 => {
let ty_trait = try!(this.read_enum_variant_arg(0, |this| {
let principal = try!(this.read_struct_field("principal", 0, |this| {
Ok(this.read_poly_trait_ref(dcx))
}));
Ok(ty::TyTrait {
principal: principal,
bounds: try!(this.read_struct_field("bounds", 1, |this| {
Ok(this.read_existential_bounds(dcx))
})),
})
}));
let self_ty =
this.read_enum_variant_arg(1, |this| Ok(this.read_ty(dcx))).unwrap();
ty::UnsizeVtable(ty_trait, self_ty)
}
3 => {
let target_ty =
this.read_enum_variant_arg(0, |this| Ok(this.read_ty(dcx))).unwrap();
ty::UnsizeUpcast(target_ty)
}
_ => panic!("bad enum variant for ty::UnsizeKind")
})
})
}).unwrap()
}
fn read_closure_kind<'b, 'c>(&mut self, _dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::ClosureKind
{
Decodable::decode(self).ok().unwrap()
}
fn read_closure_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::ClosureTy<'tcx>
{
self.read_opaque(|this, doc| {
Ok(tydecode::parse_ty_closure_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
/// Converts a def-id that appears in a type. The correct
/// translation will depend on what kind of def-id this is.
/// This is a subtle point: type definitions are not
/// inlined into the current crate, so if the def-id names
/// a nominal type or type alias, then it should be
/// translated to refer to the source crate.
///
/// However, *type parameters* are cloned along with the function
/// they are attached to. So we should translate those def-ids
/// to refer to the new, cloned copy of the type parameter.
/// We only see references to free type parameters in the body of
/// an inlined function. In such cases, we need the def-id to
/// be a local id so that the TypeContents code is able to lookup
/// the relevant info in the ty_param_defs table.
///
/// *Region parameters*, unfortunately, are another kettle of fish.
/// In such cases, def_id's can appear in types to distinguish
/// shadowed bound regions and so forth. It doesn't actually
/// matter so much what we do to these, since regions are erased
/// at trans time, but it's good to keep them consistent just in
/// case. We translate them with `tr_def_id()` which will map
/// the crate numbers back to the original source crate.
///
/// Unboxed closures are cloned along with the function being
/// inlined, and all side tables use interned node IDs, so we
/// translate their def IDs accordingly.
///
/// It'd be really nice to refactor the type repr to not include
/// def-ids so that all these distinctions were unnecessary.
fn convert_def_id(&mut self,
dcx: &DecodeContext,
source: tydecode::DefIdSource,
did: ast::DefId)
-> ast::DefId {
let r = match source {
NominalType | TypeWithId | RegionParameter => dcx.tr_def_id(did),
TypeParameter | ClosureSource => dcx.tr_intern_def_id(did)
};
debug!("convert_def_id(source={:?}, did={:?})={:?}", source, did, r);
return r;
}
}
fn decode_side_tables(dcx: &DecodeContext,
ast_doc: rbml::Doc) {
let tbl_doc = ast_doc.get(c::tag_table as uint);
reader::docs(tbl_doc, |tag, entry_doc| {
let mut entry_dsr = reader::Decoder::new(entry_doc);
let id0: ast::NodeId = Decodable::decode(&mut entry_dsr).unwrap();
let id = dcx.tr_id(id0);
debug!(">> Side table document with tag 0x{:x} \
found for id {} (orig {})",
tag, id, id0);
let decoded_tag: Option<c::astencode_tag> = FromPrimitive::from_usize(tag);
match decoded_tag {
None => {
dcx.tcx.sess.bug(
&format!("unknown tag found in side tables: {:x}",
tag));
}
Some(value) => {
let val_dsr = &mut entry_dsr;
match value {
c::tag_table_def => {
let def = decode_def(dcx, val_dsr);
dcx.tcx.def_map.borrow_mut().insert(id, def::PathResolution {
base_def: def,
// This doesn't matter cross-crate.
last_private: LastMod(AllPublic),
depth: 0
});
}
c::tag_table_node_type => {
let ty = val_dsr.read_ty(dcx);
debug!("inserting ty for node {}: {}",
id, ty_to_string(dcx.tcx, ty));
dcx.tcx.node_types.borrow_mut().insert(id, ty);
}
c::tag_table_item_subst => {
let item_substs = ty::ItemSubsts {
substs: val_dsr.read_substs(dcx)
};
dcx.tcx.item_substs.borrow_mut().insert(
id, item_substs);
}
c::tag_table_freevars => {
let fv_info = val_dsr.read_to_vec(|val_dsr| {
Ok(val_dsr.read_freevar_entry(dcx))
}).unwrap().into_iter().collect();
dcx.tcx.freevars.borrow_mut().insert(id, fv_info);
}
c::tag_table_upvar_capture_map => {
let var_id: ast::NodeId = Decodable::decode(val_dsr).unwrap();
let upvar_id = ty::UpvarId {
var_id: dcx.tr_id(var_id),
closure_expr_id: id
};
let ub: ty::UpvarCapture = Decodable::decode(val_dsr).unwrap();
dcx.tcx.upvar_capture_map.borrow_mut().insert(upvar_id, ub.tr(dcx));
}
c::tag_table_tcache => {
let type_scheme = val_dsr.read_type_scheme(dcx);
let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id };
dcx.tcx.tcache.borrow_mut().insert(lid, type_scheme);
}
c::tag_table_param_defs => {
let bounds = val_dsr.read_type_param_def(dcx);
dcx.tcx.ty_param_defs.borrow_mut().insert(id, bounds);
}
c::tag_table_method_map => {
let (adjustment, method) = val_dsr.read_method_callee(dcx);
let method_call = MethodCall {
expr_id: id,
adjustment: adjustment
};
dcx.tcx.method_map.borrow_mut().insert(method_call, method);
}
c::tag_table_object_cast_map => {
let trait_ref = val_dsr.read_poly_trait_ref(dcx);
dcx.tcx.object_cast_map.borrow_mut()
.insert(id, trait_ref);
}
c::tag_table_adjustments => {
let adj: ty::AutoAdjustment = val_dsr.read_auto_adjustment(dcx);
dcx.tcx.adjustments.borrow_mut().insert(id, adj);
}
c::tag_table_closure_tys => {
let closure_ty =
val_dsr.read_closure_ty(dcx);
dcx.tcx.closure_tys.borrow_mut().insert(ast_util::local_def(id),
closure_ty);
}
c::tag_table_closure_kinds => {
let closure_kind =
val_dsr.read_closure_kind(dcx);
dcx.tcx.closure_kinds.borrow_mut().insert(ast_util::local_def(id),
closure_kind);
}
c::tag_table_const_qualif => {
let qualif: ConstQualif = Decodable::decode(val_dsr).unwrap();
dcx.tcx.const_qualif_map.borrow_mut().insert(id, qualif);
}
_ => {
dcx.tcx.sess.bug(
&format!("unknown tag found in side tables: {:x}",
tag));
}
}
}
}
debug!(">< Side table doc loaded");
true
});
}
// ______________________________________________________________________
// Testing of astencode_gen
#[cfg(test)]
fn encode_item_ast(rbml_w: &mut Encoder, item: &ast::Item) {
rbml_w.start_tag(c::tag_tree as uint);
(*item).encode(rbml_w);
rbml_w.end_tag();
}
#[cfg(test)]
fn decode_item_ast(par_doc: rbml::Doc) -> ast::Item {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
#[cfg(test)]
trait fake_ext_ctxt {
fn cfg(&self) -> ast::CrateConfig;
fn parse_sess<'a>(&'a self) -> &'a parse::ParseSess;
fn call_site(&self) -> Span;
fn ident_of(&self, st: &str) -> ast::Ident;
}
#[cfg(test)]
impl fake_ext_ctxt for parse::ParseSess {
fn cfg(&self) -> ast::CrateConfig {
Vec::new()
}
fn parse_sess<'a>(&'a self) -> &'a parse::ParseSess { self }
fn call_site(&self) -> Span {
codemap::Span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_id: codemap::NO_EXPANSION
}
}
fn ident_of(&self, st: &str) -> ast::Ident {
token::str_to_ident(st)
}
}
#[cfg(test)]
fn mk_ctxt() -> parse::ParseSess {
parse::new_parse_sess()
}
#[cfg(test)]
fn roundtrip(in_item: Option<P<ast::Item>>) {
let in_item = in_item.unwrap();
let mut wr = Cursor::new(Vec::new());
encode_item_ast(&mut Encoder::new(&mut wr), &*in_item);
let rbml_doc = rbml::Doc::new(wr.get_ref());
let out_item = decode_item_ast(rbml_doc);
assert!(*in_item == out_item);
}
#[test]
fn test_basic() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo() {}
));
}
/* NOTE: When there's a snapshot, update this (yay quasiquoter!)
#[test]
fn test_smalltalk() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo() -> int { 3 + 4 } // first smalltalk program ever executed.
));
}
*/
#[test]
fn test_more() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo(x: uint, y: uint) -> uint {
let z = x + y;
return z;
}
));
}
#[test]
fn test_simplification() {
let cx = mk_ctxt();
let item = quote_item!(&cx,
fn new_int_alist<B>() -> alist<int, B> {
fn eq_int(a: int, b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap();
let item_in = e::IIItemRef(&*item);
let item_out = simplify_ast(item_in);
let item_exp = ast::IIItem(quote_item!(&cx,
fn new_int_alist<B>() -> alist<int, B> {
return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap());
match (item_out, item_exp) {
(ast::IIItem(item_out), ast::IIItem(item_exp)) => {
assert!(pprust::item_to_string(&*item_out) ==
pprust::item_to_string(&*item_exp));
}
_ => panic!()
}
}
| { } |
case.py | """
:codeauthor: Pedro Algarvio ([email protected])
====================================
Custom Salt TestCase Implementations
====================================
Custom reusable :class:`TestCase<python2:unittest.TestCase>`
implementations.
"""
import errno
import io
import json
import logging
import os
import re
import subprocess
import sys
import tempfile
import textwrap
import time
from datetime import datetime, timedelta
import pytest
import salt.utils.files
from saltfactories.utils.processes import terminate_process
from tests.support.cli_scripts import ScriptPathMixin
from tests.support.helpers import SKIP_IF_NOT_RUNNING_PYTEST, RedirectStdStreams
from tests.support.mixins import ( # pylint: disable=unused-import
AdaptedConfigurationTestCaseMixin,
SaltClientTestCaseMixin,
SaltMultimasterClientTestCaseMixin,
)
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase
STATE_FUNCTION_RUNNING_RE = re.compile(
r"""The function (?:"|')(?P<state_func>.*)(?:"|') is running as PID """
r"(?P<pid>[\d]+) and was started at (?P<date>.*) with jid (?P<jid>[\d]+)"
)
log = logging.getLogger(__name__)
class ShellCase(TestCase, AdaptedConfigurationTestCaseMixin, ScriptPathMixin):
"""
Execute a test for a shell command
"""
RUN_TIMEOUT = 30
def run_salt(
self,
arg_str,
with_retcode=False,
catch_stderr=False,
timeout=None,
popen_kwargs=None,
config_dir=None,
):
r'''
Run the ``salt`` CLI tool with the provided arguments
.. code-block:: python
class MatchTest(ShellCase):
def test_list(self):
"""
test salt -L matcher
"""
data = self.run_salt('-L minion test.ping')
data = '\n'.join(data)
self.assertIn('minion', data)
'''
if timeout is None:
timeout = self.RUN_TIMEOUT
arg_str = "-t {} {}".format(timeout, arg_str)
return self.run_script(
"salt",
arg_str,
with_retcode=with_retcode,
catch_stderr=catch_stderr,
timeout=timeout,
config_dir=config_dir,
)
def run_ssh(
self,
arg_str,
with_retcode=False,
catch_stderr=False,
timeout=None,
wipe=False,
raw=False,
roster_file=None,
ssh_opts="",
log_level="error",
config_dir=None,
**kwargs
):
"""
Execute salt-ssh
"""
if timeout is None:
timeout = self.RUN_TIMEOUT
if not roster_file:
roster_file = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "roster")
arg_str = (
"{wipe} {raw} -l {log_level} --ignore-host-keys --priv {client_key} --roster-file "
"{roster_file} {ssh_opts} localhost {arg_str} --out=json"
).format(
wipe=" -W" if wipe else "",
raw=" -r" if raw else "",
log_level=log_level,
client_key=os.path.join(RUNTIME_VARS.TMP_SSH_CONF_DIR, "client_key"),
roster_file=roster_file,
ssh_opts=ssh_opts,
arg_str=arg_str,
)
ret = self.run_script(
"salt-ssh",
arg_str,
with_retcode=with_retcode,
catch_stderr=catch_stderr,
raw=True,
timeout=timeout,
config_dir=config_dir,
**kwargs
)
log.debug("Result of run_ssh for command '%s %s': %s", arg_str, kwargs, ret)
return ret
def run_run(
self,
arg_str,
with_retcode=False,
catch_stderr=False,
asynchronous=False,
timeout=None,
config_dir=None,
**kwargs
):
"""
Execute salt-run
"""
if timeout is None:
timeout = self.RUN_TIMEOUT
asynchronous = kwargs.get("async", asynchronous)
arg_str = "{async_flag} -t {timeout} {}".format(
arg_str, timeout=timeout, async_flag=" --async" if asynchronous else "",
)
ret = self.run_script(
"salt-run",
arg_str,
with_retcode=with_retcode,
catch_stderr=catch_stderr,
timeout=timeout,
config_dir=config_dir,
)
log.debug("Result of run_run for command '%s': %s", arg_str, ret)
return ret
def run_run_plus(self, fun, *arg, **kwargs):
"""
Execute the runner function and return the return data and output in a dict
"""
output = kwargs.pop("_output", None)
opts_overrides = kwargs.pop("opts_overrides", None)
ret = {"fun": fun}
# Late import
import salt.config
import salt.output
import salt.runner
opts = salt.config.client_config(self.get_config_file_path("master"))
if opts_overrides:
opts.update(opts_overrides)
opts_arg = list(arg)
if kwargs:
opts_arg.append({"__kwarg__": True})
opts_arg[-1].update(kwargs)
opts.update({"doc": False, "fun": fun, "arg": opts_arg})
with RedirectStdStreams():
runner = salt.runner.Runner(opts)
ret["return"] = runner.run()
try:
ret["jid"] = runner.jid
except AttributeError:
ret["jid"] = None
# Compile output
# TODO: Support outputters other than nested
opts["color"] = False
opts["output_file"] = io.StringIO()
try:
salt.output.display_output(ret["return"], opts=opts, out=output)
out = opts["output_file"].getvalue()
if output is None:
out = out.splitlines()
elif output == "json":
out = json.loads(out)
ret["out"] = out
finally:
opts["output_file"].close()
log.debug(
"Result of run_run_plus for fun '%s' with arg '%s': %s", fun, opts_arg, ret
)
return ret
def run_key(self, arg_str, catch_stderr=False, with_retcode=False, config_dir=None):
"""
Execute salt-key
"""
return self.run_script(
"salt-key",
arg_str,
catch_stderr=catch_stderr,
with_retcode=with_retcode,
config_dir=config_dir,
)
def run_cp(
self,
arg_str,
with_retcode=False,
catch_stderr=False,
timeout=None,
config_dir=None,
):
"""
Execute salt-cp
"""
if timeout is None:
timeout = self.RUN_TIMEOUT
# Note: not logging result of run_cp because it will log a bunch of
# bytes which will not be very helpful.
return self.run_script(
"salt-cp",
arg_str,
with_retcode=with_retcode,
catch_stderr=catch_stderr,
timeout=timeout,
config_dir=config_dir,
)
def run_call(
self,
arg_str,
with_retcode=False,
catch_stderr=False,
local=False,
timeout=None,
config_dir=None,
):
if timeout is None:
timeout = self.RUN_TIMEOUT
if not config_dir:
config_dir = RUNTIME_VARS.TMP_MINION_CONF_DIR
arg_str = "{} {}".format("--local" if local else "", arg_str)
ret = self.run_script(
"salt-call",
arg_str,
with_retcode=with_retcode,
catch_stderr=catch_stderr,
timeout=timeout,
config_dir=config_dir,
)
log.debug("Result of run_call for command '%s': %s", arg_str, ret)
return ret
def run_function(
self,
function,
arg=(),
with_retcode=False,
catch_stderr=False,
local=False,
timeout=RUN_TIMEOUT,
**kwargs
):
"""
Execute function with salt-call.
This function is added for compatibility with ModuleCase. This makes it possible to use
decorators like @with_system_user.
"""
arg_str = "{} {} {}".format(
function,
" ".join(str(arg_) for arg_ in arg),
" ".join("{}={}".format(*item) for item in kwargs.items()),
)
return self.run_call(arg_str, with_retcode, catch_stderr, local, timeout)
def run_cloud(self, arg_str, catch_stderr=False, timeout=None, config_dir=None):
"""
Execute salt-cloud
"""
if timeout is None:
timeout = self.RUN_TIMEOUT
ret = self.run_script(
"salt-cloud", arg_str, catch_stderr, timeout=timeout, config_dir=config_dir
)
log.debug("Result of run_cloud for command '%s': %s", arg_str, ret)
return ret
def run_spm(
self,
arg_str,
with_retcode=False,
catch_stderr=False,
timeout=None,
config_dir=None,
):
"""
Execute spm
"""
if timeout is None:
timeout = self.RUN_TIMEOUT
ret = self.run_script(
"spm",
arg_str,
with_retcode=with_retcode,
catch_stderr=catch_stderr,
timeout=timeout,
config_dir=config_dir,
)
log.debug("Result of run_spm for command '%s': %s", arg_str, ret)
return ret
def | (
self,
script,
arg_str,
catch_stderr=False,
with_retcode=False,
catch_timeout=False,
# FIXME A timeout of zero or disabling timeouts may not return results!
timeout=15,
raw=False,
popen_kwargs=None,
log_output=None,
config_dir=None,
**kwargs
):
"""
Execute a script with the given argument string
The ``log_output`` argument is ternary, it can be True, False, or None.
If the value is boolean, then it forces the results to either be logged
or not logged. If it is None, then the return code of the subprocess
determines whether or not to log results.
"""
import salt.utils.platform
script_path = self.get_script_path(script)
if not os.path.isfile(script_path):
return False
popen_kwargs = popen_kwargs or {}
if salt.utils.platform.is_windows():
cmd = "python "
if "cwd" not in popen_kwargs:
popen_kwargs["cwd"] = os.getcwd()
if "env" not in popen_kwargs:
popen_kwargs["env"] = os.environ.copy()
popen_kwargs["env"]["PYTHONPATH"] = RUNTIME_VARS.CODE_DIR
else:
cmd = "PYTHONPATH="
python_path = os.environ.get("PYTHONPATH", None)
if python_path is not None:
cmd += "{}:".format(python_path)
if sys.version_info[0] < 3:
cmd += "{} ".format(":".join(sys.path[1:]))
else:
cmd += "{} ".format(":".join(sys.path[0:]))
cmd += "python{}.{} ".format(*sys.version_info)
cmd += "{} --config-dir={} {} ".format(
script_path, config_dir or RUNTIME_VARS.TMP_CONF_DIR, arg_str
)
if kwargs:
# late import
import salt.utils.json
for key, value in kwargs.items():
cmd += "'{}={} '".format(key, salt.utils.json.dumps(value))
tmp_file = tempfile.SpooledTemporaryFile()
popen_kwargs = dict(
{"shell": True, "stdout": tmp_file, "universal_newlines": True},
**popen_kwargs
)
if catch_stderr is True:
popen_kwargs["stderr"] = subprocess.PIPE
if not sys.platform.lower().startswith("win"):
popen_kwargs["close_fds"] = True
def detach_from_parent_group():
# detach from parent group (no more inherited signals!)
os.setpgrp()
popen_kwargs["preexec_fn"] = detach_from_parent_group
def format_return(retcode, stdout, stderr=None, timed_out=False):
"""
DRY helper to log script result if it failed, and then return the
desired output based on whether or not stderr was desired, and
wither or not a retcode was desired.
"""
log_func = log.debug
if timed_out:
log.error(
"run_script timed out after %d seconds (process killed)", timeout
)
log_func = log.error
if log_output is True or timed_out or (log_output is None and retcode != 0):
log_func(
"run_script results for: %s %s\n"
"return code: %s\n"
"stdout:\n"
"%s\n\n"
"stderr:\n"
"%s",
script,
arg_str,
retcode,
stdout,
stderr,
)
stdout = stdout or ""
stderr = stderr or ""
if not raw:
stdout = stdout.splitlines()
stderr = stderr.splitlines()
ret = [stdout]
if catch_stderr:
ret.append(stderr)
if with_retcode:
ret.append(retcode)
if catch_timeout:
ret.append(timed_out)
return ret[0] if len(ret) == 1 else tuple(ret)
log.debug("Running Popen(%r, %r)", cmd, popen_kwargs)
process = subprocess.Popen(cmd, **popen_kwargs)
if timeout is not None:
stop_at = datetime.now() + timedelta(seconds=timeout)
term_sent = False
while True:
process.poll()
time.sleep(0.1)
if datetime.now() <= stop_at:
# We haven't reached the timeout yet
if process.returncode is not None:
break
else:
terminate_process(process.pid, kill_children=True)
return format_return(
process.returncode, *process.communicate(), timed_out=True
)
tmp_file.seek(0)
try:
out = tmp_file.read().decode(__salt_system_encoding__)
except (NameError, UnicodeDecodeError):
# Let's cross our fingers and hope for the best
out = tmp_file.read().decode("utf-8")
if catch_stderr:
if sys.version_info < (2, 7):
# On python 2.6, the subprocess'es communicate() method uses
# select which, is limited by the OS to 1024 file descriptors
# We need more available descriptors to run the tests which
# need the stderr output.
# So instead of .communicate() we wait for the process to
# finish, but, as the python docs state "This will deadlock
# when using stdout=PIPE and/or stderr=PIPE and the child
# process generates enough output to a pipe such that it
# blocks waiting for the OS pipe buffer to accept more data.
# Use communicate() to avoid that." <- a catch, catch situation
#
# Use this work around were it's needed only, python 2.6
process.wait()
err = process.stderr.read()
else:
_, err = process.communicate()
# Force closing stderr/stdout to release file descriptors
if process.stdout is not None:
process.stdout.close()
if process.stderr is not None:
process.stderr.close()
# pylint: disable=maybe-no-member
try:
return format_return(process.returncode, out, err or "")
finally:
try:
if os.path.exists(tmp_file.name):
if isinstance(tmp_file.name, str):
# tmp_file.name is an int when using SpooledTemporaryFiles
# int types cannot be used with os.remove() in Python 3
os.remove(tmp_file.name)
else:
# Clean up file handles
tmp_file.close()
process.terminate()
except OSError as err:
# process already terminated
pass
# pylint: enable=maybe-no-member
# TODO Remove this?
process.communicate()
if process.stdout is not None:
process.stdout.close()
try:
return format_return(process.returncode, out)
finally:
try:
if os.path.exists(tmp_file.name):
if isinstance(tmp_file.name, str):
# tmp_file.name is an int when using SpooledTemporaryFiles
# int types cannot be used with os.remove() in Python 3
os.remove(tmp_file.name)
else:
# Clean up file handles
tmp_file.close()
process.terminate()
except OSError as err:
# process already terminated
pass
class MultiMasterTestShellCase(ShellCase):
"""
'''
Execute a test for a shell command when running multi-master tests
"""
@property
def config_dir(self):
return RUNTIME_VARS.TMP_MM_CONF_DIR
class SPMTestUserInterface:
"""
Test user interface to SPMClient
"""
def __init__(self):
self._status = []
self._confirm = []
self._error = []
def status(self, msg):
self._status.append(msg)
def confirm(self, action):
self._confirm.append(action)
def error(self, msg):
self._error.append(msg)
class SPMCase(TestCase, AdaptedConfigurationTestCaseMixin):
"""
Class for handling spm commands
"""
def _spm_build_files(self, config):
self.formula_dir = os.path.join(
" ".join(config["file_roots"]["base"]), "formulas"
)
self.formula_sls_dir = os.path.join(self.formula_dir, "apache")
self.formula_sls = os.path.join(self.formula_sls_dir, "apache.sls")
self.formula_file = os.path.join(self.formula_dir, "FORMULA")
dirs = [self.formula_dir, self.formula_sls_dir]
for f_dir in dirs:
os.makedirs(f_dir)
with salt.utils.files.fopen(self.formula_sls, "w") as fp:
fp.write(
textwrap.dedent(
"""\
install-apache:
pkg.installed:
- name: apache2
"""
)
)
with salt.utils.files.fopen(self.formula_file, "w") as fp:
fp.write(
textwrap.dedent(
"""\
name: apache
os: RedHat, Debian, Ubuntu, Suse, FreeBSD
os_family: RedHat, Debian, Suse, FreeBSD
version: 201506
release: 2
summary: Formula for installing Apache
description: Formula for installing Apache
"""
)
)
def _spm_config(self, assume_yes=True):
self._tmp_spm = tempfile.mkdtemp()
config = self.get_temp_config(
"minion",
**{
"spm_logfile": os.path.join(self._tmp_spm, "log"),
"spm_repos_config": os.path.join(self._tmp_spm, "etc", "spm.repos"),
"spm_cache_dir": os.path.join(self._tmp_spm, "cache"),
"spm_build_dir": os.path.join(self._tmp_spm, "build"),
"spm_build_exclude": ["apache/.git"],
"spm_db_provider": "sqlite3",
"spm_files_provider": "local",
"spm_db": os.path.join(self._tmp_spm, "packages.db"),
"extension_modules": os.path.join(self._tmp_spm, "modules"),
"file_roots": {"base": [self._tmp_spm]},
"formula_path": os.path.join(self._tmp_spm, "salt"),
"pillar_path": os.path.join(self._tmp_spm, "pillar"),
"reactor_path": os.path.join(self._tmp_spm, "reactor"),
"assume_yes": True if assume_yes else False,
"force": False,
"verbose": False,
"cache": "localfs",
"cachedir": os.path.join(self._tmp_spm, "cache"),
"spm_repo_dups": "ignore",
"spm_share_dir": os.path.join(self._tmp_spm, "share"),
}
)
import salt.utils.yaml
if not os.path.isdir(config["formula_path"]):
os.makedirs(config["formula_path"])
with salt.utils.files.fopen(os.path.join(self._tmp_spm, "spm"), "w") as fp:
salt.utils.yaml.safe_dump(config, fp)
return config
def _spm_create_update_repo(self, config):
build_spm = self.run_spm("build", self.config, self.formula_dir)
c_repo = self.run_spm("create_repo", self.config, self.config["spm_build_dir"])
repo_conf_dir = self.config["spm_repos_config"] + ".d"
os.makedirs(repo_conf_dir)
with salt.utils.files.fopen(os.path.join(repo_conf_dir, "spm.repo"), "w") as fp:
fp.write(
textwrap.dedent(
"""\
local_repo:
url: file://{}
""".format(
self.config["spm_build_dir"]
)
)
)
u_repo = self.run_spm("update_repo", self.config)
def _spm_client(self, config):
import salt.spm
self.ui = SPMTestUserInterface()
client = salt.spm.SPMClient(self.ui, config)
return client
def run_spm(self, cmd, config, arg=None):
client = self._spm_client(config)
client.run([cmd, arg])
client._close()
return self.ui._status
class ModuleCase(TestCase, SaltClientTestCaseMixin):
"""
Execute a module function
"""
def wait_for_all_jobs(self, minions=("minion", "sub_minion"), sleep=0.3):
"""
Wait for all jobs currently running on the list of minions to finish
"""
for minion in minions:
while True:
ret = self.run_function(
"saltutil.running", minion_tgt=minion, timeout=300
)
if ret:
log.debug("Waiting for minion's jobs: %s", minion)
time.sleep(sleep)
else:
break
def minion_run(self, _function, *args, **kw):
"""
Run a single salt function on the 'minion' target and condition
the return down to match the behavior of the raw function call
"""
return self.run_function(_function, args, **kw)
def run_function(
self,
function,
arg=(),
minion_tgt="minion",
timeout=300,
master_tgt=None,
**kwargs
):
"""
Run a single salt function and condition the return down to match the
behavior of the raw function call
"""
known_to_return_none = (
"data.get",
"file.chown",
"file.chgrp",
"pkg.refresh_db",
"ssh.recv_known_host_entries",
"time.sleep",
"grains.delkey",
"grains.delval",
)
if "f_arg" in kwargs:
kwargs["arg"] = kwargs.pop("f_arg")
if "f_timeout" in kwargs:
kwargs["timeout"] = kwargs.pop("f_timeout")
client = self.client if master_tgt is None else self.clients[master_tgt]
log.debug(
"Running client.cmd(minion_tgt=%r, function=%r, arg=%r, timeout=%r, kwarg=%r)",
minion_tgt,
function,
arg,
timeout,
kwargs,
)
orig = client.cmd(minion_tgt, function, arg, timeout=timeout, kwarg=kwargs)
if RUNTIME_VARS.PYTEST_SESSION:
fail_or_skip_func = self.fail
else:
fail_or_skip_func = self.skipTest
if minion_tgt not in orig:
fail_or_skip_func(
"WARNING(SHOULD NOT HAPPEN #1935): Failed to get a reply "
"from the minion '{}'. Command output: {}".format(minion_tgt, orig)
)
elif orig[minion_tgt] is None and function not in known_to_return_none:
fail_or_skip_func(
"WARNING(SHOULD NOT HAPPEN #1935): Failed to get '{}' from "
"the minion '{}'. Command output: {}".format(function, minion_tgt, orig)
)
# Try to match stalled state functions
orig[minion_tgt] = self._check_state_return(orig[minion_tgt])
return orig[minion_tgt]
def run_state(self, function, **kwargs):
"""
Run the state.single command and return the state return structure
"""
ret = self.run_function("state.single", [function], **kwargs)
return self._check_state_return(ret)
def _check_state_return(self, ret):
if isinstance(ret, dict):
# This is the supposed return format for state calls
return ret
if isinstance(ret, list):
jids = []
# These are usually errors
for item in ret[:]:
if not isinstance(item, str):
# We don't know how to handle this
continue
match = STATE_FUNCTION_RUNNING_RE.match(item)
if not match:
# We don't know how to handle this
continue
jid = match.group("jid")
if jid in jids:
continue
jids.append(jid)
job_data = self.run_function("saltutil.find_job", [jid])
job_kill = self.run_function("saltutil.kill_job", [jid])
msg = (
"A running state.single was found causing a state lock. "
"Job details: '{}' Killing Job Returned: '{}'".format(
job_data, job_kill
)
)
ret.append(
"[TEST SUITE ENFORCED]{}" "[/TEST SUITE ENFORCED]".format(msg)
)
return ret
class MultimasterModuleCase(ModuleCase, SaltMultimasterClientTestCaseMixin):
"""
Execute a module function
"""
def run_function(
self,
function,
arg=(),
minion_tgt="mm-minion",
timeout=300,
master_tgt="mm-master",
**kwargs
):
"""
Run a single salt function and condition the return down to match the
behavior of the raw function call
"""
known_to_return_none = (
"data.get",
"file.chown",
"file.chgrp",
"pkg.refresh_db",
"ssh.recv_known_host_entries",
"time.sleep",
)
if minion_tgt == "mm-sub-minion":
known_to_return_none += ("mine.update",)
if "f_arg" in kwargs:
kwargs["arg"] = kwargs.pop("f_arg")
if "f_timeout" in kwargs:
kwargs["timeout"] = kwargs.pop("f_timeout")
if master_tgt is None:
client = self.clients["mm-master"]
elif isinstance(master_tgt, int):
client = self.clients[list(self.clients)[master_tgt]]
else:
client = self.clients[master_tgt]
orig = client.cmd(minion_tgt, function, arg, timeout=timeout, kwarg=kwargs)
if RUNTIME_VARS.PYTEST_SESSION:
fail_or_skip_func = self.fail
else:
fail_or_skip_func = self.skipTest
if minion_tgt not in orig:
fail_or_skip_func(
"WARNING(SHOULD NOT HAPPEN #1935): Failed to get a reply "
"from the minion '{}'. Command output: {}".format(minion_tgt, orig)
)
elif orig[minion_tgt] is None and function not in known_to_return_none:
fail_or_skip_func(
"WARNING(SHOULD NOT HAPPEN #1935): Failed to get '{}' from "
"the minion '{}'. Command output: {}".format(function, minion_tgt, orig)
)
# Try to match stalled state functions
orig[minion_tgt] = self._check_state_return(orig[minion_tgt])
return orig[minion_tgt]
def run_function_all_masters(
self, function, arg=(), minion_tgt="mm-minion", timeout=300, **kwargs
):
"""
Run a single salt function from all the masters in multimaster environment
and condition the return down to match the behavior of the raw function call
"""
ret = []
for master_id in self.clients:
ret.append(
self.run_function(
function,
arg=arg,
minion_tgt=minion_tgt,
timeout=timeout,
master_tgt=master_id,
**kwargs
)
)
return ret
class SyndicCase(TestCase, SaltClientTestCaseMixin):
"""
Execute a syndic based execution test
"""
_salt_client_config_file_name_ = "syndic_master"
def run_function(self, function, arg=(), timeout=90):
"""
Run a single salt function and condition the return down to match the
behavior of the raw function call
"""
orig = self.client.cmd("minion", function, arg, timeout=timeout)
if RUNTIME_VARS.PYTEST_SESSION:
fail_or_skip_func = self.fail
else:
fail_or_skip_func = self.skipTest
if "minion" not in orig:
fail_or_skip_func(
"WARNING(SHOULD NOT HAPPEN #1935): Failed to get a reply "
"from the minion. Command output: {}".format(orig)
)
return orig["minion"]
@SKIP_IF_NOT_RUNNING_PYTEST
@pytest.mark.usefixtures("salt_ssh_cli")
@pytest.mark.requires_sshd_server
class SSHCase(ShellCase):
"""
Execute a command via salt-ssh
"""
def _arg_str(self, function, arg):
return "{} {}".format(function, " ".join(arg))
# pylint: disable=arguments-differ
def run_function(
self, function, arg=(), timeout=180, wipe=True, raw=False, **kwargs
):
"""
We use a 180s timeout here, which some slower systems do end up needing
"""
ret = self.run_ssh(
self._arg_str(function, arg), timeout=timeout, wipe=wipe, raw=raw, **kwargs
)
log.debug(
"SSHCase run_function executed %s with arg %s and kwargs %s",
function,
arg,
kwargs,
)
log.debug("SSHCase JSON return: %s", ret)
# Late import
import salt.utils.json
try:
return salt.utils.json.loads(ret)["localhost"]
except Exception: # pylint: disable=broad-except
return ret
# pylint: enable=arguments-differ
def custom_roster(self, new_roster, data):
"""
helper method to create a custom roster to use for a ssh test
"""
roster = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "roster")
with salt.utils.files.fopen(roster, "r") as fp_:
conf = salt.utils.yaml.safe_load(fp_)
conf["localhost"].update(data)
with salt.utils.files.fopen(new_roster, "w") as fp_:
salt.utils.yaml.safe_dump(conf, fp_)
class ClientCase(AdaptedConfigurationTestCaseMixin, TestCase):
"""
A base class containing relevant options for starting the various Salt
Python API entrypoints
"""
def get_opts(self):
# Late import
import salt.config
return salt.config.client_config(self.get_config_file_path("master"))
def mkdir_p(self, path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
| run_script |
dcel.rs | // Copyright 2017 The Spade Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// Handle to a vertex.
///
/// This handle is "fixed", meaning it is intended to be used for
/// mutation (e.g., removing a vertex) or storage (e.g., storing
/// references to vertices for later usage).
pub type FixedVertexHandle = usize;
/// Handle to an edge.
///
/// This handle is "fixed", meaning it is intended to be used
/// for storage. Note that removal operations will invalidate
/// edge handles.
pub type FixedEdgeHandle = usize;
/// Handle to a face.
///
/// This handle is "fixed", meaning it is intended to be used
/// for storage. Note that removal operations will invalidate
/// face handles.
pub type FixedFaceHandle = usize;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VertexRemovalResult<V> {
pub updated_vertex: Option<FixedVertexHandle>,
pub data: V,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde_serialize", derive(Serialize, Deserialize))]
struct FaceEntry {
adjacent_edge: Option<FixedEdgeHandle>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde_serialize", derive(Serialize, Deserialize))]
struct VertexEntry<V> {
data: V,
out_edge: Option<FixedEdgeHandle>,
}
impl<V> VertexEntry<V> {
fn new(data: V) -> VertexEntry<V> {
VertexEntry {
data,
out_edge: None,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde_serialize", derive(Serialize, Deserialize))]
struct HalfEdgeEntry<T> {
next: FixedEdgeHandle,
prev: FixedEdgeHandle,
twin: FixedEdgeHandle,
origin: FixedVertexHandle,
face: FixedFaceHandle,
data: T,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde_serialize", derive(Serialize, Deserialize))]
pub struct DCEL<V, E = ()> {
vertices: Vec<VertexEntry<V>>,
faces: Vec<FaceEntry>,
edges: Vec<HalfEdgeEntry<E>>,
}
impl<V> DCEL<V> {
pub fn new() -> Self {
Self::new_with_edge()
}
}
impl<V, E> DCEL<V, E>
where
E: Default,
{
pub fn new_with_edge() -> Self {
DCEL {
vertices: Vec::new(),
edges: Vec::new(),
faces: vec![FaceEntry {
adjacent_edge: None,
}],
}
}
pub fn num_vertices(&self) -> usize {
self.vertices.len()
}
pub fn num_edges(&self) -> usize {
self.edges.len() / 2
}
pub fn num_faces(&self) -> usize {
self.faces.len()
}
pub fn vertex(&self, handle: FixedVertexHandle) -> VertexHandle<V, E> {
VertexHandle::new(self, handle)
}
pub fn edge(&self, handle: FixedEdgeHandle) -> EdgeHandle<V, E> {
EdgeHandle::new(self, handle)
}
pub fn edge_data(&self, handle: FixedEdgeHandle) -> &E {
&self.edges[handle].data
}
pub fn edge_data_mut(&mut self, handle: FixedEdgeHandle) -> &mut E {
&mut self.edges[handle].data
}
pub fn face(&self, handle: FixedFaceHandle) -> FaceHandle<V, E> {
FaceHandle::new(self, handle)
}
pub fn vertex_mut(&mut self, handle: FixedVertexHandle) -> &mut V {
&mut self.vertices[handle].data
}
pub fn insert_vertex(&mut self, vertex: V) -> FixedVertexHandle {
self.vertices.push(VertexEntry::new(vertex));
self.vertices.len() - 1
}
pub fn get_edge_from_neighbors(
&self,
from: FixedVertexHandle,
to: FixedVertexHandle,
) -> Option<EdgeHandle<V, E>> {
let vertex = self.vertex(from);
for edge in vertex.ccw_out_edges() {
if edge.to().fix() == to {
return Some(edge);
}
}
None
}
pub fn connect_two_isolated_vertices(
&mut self,
v0: FixedVertexHandle,
v1: FixedVertexHandle,
face: FixedFaceHandle,
) -> FixedEdgeHandle {
assert!(self.vertices[v0].out_edge.is_none(), "v0 is not isolated");
assert!(self.vertices[v1].out_edge.is_none(), "v1 is not isolated");
assert!(
self.faces[face].adjacent_edge.is_none(),
"face must not contain any adjacent edges"
);
let edge_index = self.edges.len();
let twin_index = edge_index + 1;
let edge = HalfEdgeEntry {
next: twin_index,
prev: twin_index,
twin: twin_index,
origin: v0,
face,
data: Default::default(),
};
self.edges.push(edge);
let twin = HalfEdgeEntry {
next: edge_index,
prev: edge_index,
twin: edge_index,
origin: v1,
face,
data: Default::default(),
};
self.edges.push(twin);
self.vertices[v0].out_edge = Some(edge_index);
self.vertices[v1].out_edge = Some(twin_index);
self.faces[face].adjacent_edge = Some(edge_index);
edge_index
}
pub fn update_vertex(&mut self, handle: FixedVertexHandle, data: V) {
self.vertices[handle].data = data;
}
pub fn edges(&self) -> EdgesIterator<V, E> {
EdgesIterator::new(&self)
}
pub fn vertices(&self) -> VerticesIterator<V, E> {
VerticesIterator::new(&self)
}
pub fn fixed_vertices(&self) -> FixedVerticesIterator {
0..self.num_vertices()
}
pub fn faces(&self) -> FacesIterator<V, E> {
FacesIterator::new(&self)
}
}
impl<V, E> DCEL<V, E>
where
E: Default + Copy,
{
pub fn connect_edge_to_isolated_vertex(
&mut self,
prev_handle: FixedEdgeHandle,
vertex: FixedVertexHandle,
) -> FixedEdgeHandle {
assert!(
self.vertices[vertex].out_edge.is_none(),
"Given vertex is not isolated"
);
let prev = self.edges[prev_handle];
let edge_index = self.edges.len();
let twin_index = edge_index + 1;
let edge = HalfEdgeEntry {
next: twin_index,
prev: prev_handle,
twin: twin_index,
origin: self.edges[prev.twin].origin,
face: prev.face,
data: Default::default(),
};
self.edges.push(edge);
let twin = HalfEdgeEntry {
next: prev.next,
prev: edge_index,
twin: edge_index,
origin: vertex,
face: prev.face,
data: Default::default(),
};
self.edges.push(twin);
self.edges[prev_handle].next = edge_index;
self.edges[prev.next].prev = twin_index;
self.vertices[vertex].out_edge = Some(twin_index);
edge_index
}
pub fn remove_vertex(
&mut self,
vertex_handle: FixedVertexHandle,
remaining_face: Option<FixedFaceHandle>,
) -> VertexRemovalResult<V> {
while let Some(out_edge) = self.vertices[vertex_handle].out_edge {
self.remove_edge(out_edge, remaining_face);
}
let data = self.vertices.swap_remove(vertex_handle).data;
let updated_vertex = if self.vertices.len() == vertex_handle {
None
} else {
// Update origin of all out edges
let to_update: Vec<_> = self
.vertex(vertex_handle)
.ccw_out_edges()
.map(|e| e.fix())
.collect();
for e in to_update {
self.edges[e].origin = vertex_handle;
}
Some(self.vertices.len())
};
VertexRemovalResult {
updated_vertex,
data,
}
}
pub fn connect_edge_to_edge(
&mut self,
prev_edge_handle: FixedEdgeHandle,
next_edge_handle: FixedEdgeHandle,
) -> FixedEdgeHandle {
let edge_index = self.edges.len();
let twin_index = edge_index + 1;
let next_edge = self.edges[next_edge_handle];
let prev_edge = self.edges[prev_edge_handle];
let edge = HalfEdgeEntry {
next: next_edge_handle,
prev: prev_edge_handle,
twin: twin_index,
origin: self.edges[prev_edge.twin].origin,
face: next_edge.face,
data: Default::default(),
};
self.edges.push(edge);
let twin = HalfEdgeEntry {
next: prev_edge.next,
prev: next_edge.prev,
twin: edge_index,
origin: next_edge.origin,
face: next_edge.face,
data: Default::default(),
};
self.edges.push(twin);
self.edges[next_edge_handle].prev = edge_index;
self.edges[prev_edge_handle].next = edge_index;
self.edges[next_edge.prev].next = twin_index;
self.edges[prev_edge.next].prev = twin_index;
edge_index
}
pub fn split_edge(
&mut self,
edge_handle: FixedEdgeHandle,
split_vertex: FixedVertexHandle,
) -> FixedEdgeHandle {
assert!(
self.vertices[split_vertex].out_edge.is_none(),
"Given vertex must be isolated"
);
let edge = self.edges[edge_handle];
let twin = self.edges[edge.twin];
let is_isolated = edge.next == edge.twin;
let new_edge_index = self.edges.len();
let new_twin_index = new_edge_index + 1;
let (new_edge_next, new_twin_prev) = if is_isolated {
(new_twin_index, new_edge_index)
} else {
(edge.next, twin.prev)
};
let new_edge = HalfEdgeEntry {
next: new_edge_next,
prev: edge_handle,
twin: new_twin_index,
origin: split_vertex,
face: edge.face,
data: Default::default(),
};
let new_twin = HalfEdgeEntry {
next: edge.twin,
prev: new_twin_prev,
twin: new_edge_index,
origin: twin.origin,
face: twin.face,
data: Default::default(),
};
if !is_isolated {
self.edges[edge.next].prev = new_edge_index;
self.edges[twin.prev].next = new_twin_index;
}
self.edges[edge.twin].prev = new_twin_index;
self.edges[edge_handle].next = new_edge_index;
self.edges[edge.twin].origin = split_vertex;
self.vertices[twin.origin].out_edge = Some(new_twin_index);
self.vertices[split_vertex].out_edge = Some(new_edge_index);
self.edges.push(new_edge);
self.edges.push(new_twin);
new_edge_index
}
pub fn remove_edge(
&mut self,
edge_handle: FixedEdgeHandle,
remaining_face: Option<FixedFaceHandle>,
) {
let edge = self.edges[edge_handle];
let twin = self.edges[edge.twin];
self.edges[edge.prev].next = twin.next;
self.edges[twin.next].prev = edge.prev;
self.edges[edge.next].prev = twin.prev;
self.edges[twin.prev].next = edge.next;
let (to_remove, to_keep) = if remaining_face == Some(twin.face) {
(edge, twin)
} else {
(twin, edge)
};
if edge.prev == edge.twin && edge.next == edge.twin {
// We remove an isolated edge
self.faces[to_keep.face].adjacent_edge = None;
} else {
let new_adjacent_edge = if edge.prev != edge.twin {
edge.prev
} else {
edge.next
};
self.faces[to_keep.face].adjacent_edge = Some(new_adjacent_edge);
self.edges[new_adjacent_edge].face = to_keep.face;
}
if edge.prev == edge.twin {
self.vertices[edge.origin].out_edge = None;
} else {
self.vertices[edge.origin].out_edge = Some(twin.next);
}
if edge.next == edge.twin {
self.vertices[twin.origin].out_edge = None;
} else {
self.vertices[twin.origin].out_edge = Some(edge.next);
}
// We must remove the larger index first to prevent the other edge
// from being updated
if edge_handle > edge.twin {
self.swap_out_edge(edge_handle);
self.swap_out_edge(edge.twin);
} else {
self.swap_out_edge(edge.twin);
self.swap_out_edge(edge_handle);
}
if edge.face != twin.face {
let neighs: Vec<_> = self
.face(to_keep.face)
.adjacent_edges()
.map(|e| e.fix())
.collect();
for n in neighs {
self.edges[n].face = to_keep.face
}
self.remove_face(to_remove.face);
}
}
fn remove_face(&mut self, face: FixedFaceHandle) {
self.faces.swap_remove(face);
if self.faces.len() > face {
let neighs: Vec<_> = self.face(face).adjacent_edges().map(|e| e.fix()).collect();
for n in neighs {
self.edges[n].face = face;
}
}
}
fn swap_out_edge(&mut self, edge_handle: FixedEdgeHandle) {
self.edges.swap_remove(edge_handle);
if self.edges.len() > edge_handle {
// Update edge index
let old_handle = self.edges.len();
let edge = self.edges[edge_handle];
self.edges[edge.next].prev = edge_handle;
self.edges[edge.prev].next = edge_handle;
self.edges[edge.twin].twin = edge_handle;
if self.vertices[edge.origin].out_edge == Some(old_handle) {
self.vertices[edge.origin].out_edge = Some(edge_handle);
}
self.faces[edge.face].adjacent_edge = Some(edge_handle);
}
}
pub fn create_face(
&mut self,
prev_edge_handle: FixedEdgeHandle,
next_edge_handle: FixedEdgeHandle,
) -> FixedEdgeHandle {
let edge_index = self.connect_edge_to_edge(prev_edge_handle, next_edge_handle);
let new_face = self.num_faces();
self.faces.push(FaceEntry {
adjacent_edge: Some(edge_index),
});
// Set the face to the left of the new edge
let mut cur_edge = edge_index;
loop {
self.edges[cur_edge].face = new_face;
cur_edge = self.edges[cur_edge].next;
if cur_edge == edge_index {
break;
}
}
let twin = self.edges[edge_index].twin;
self.faces[self.edges[twin].face].adjacent_edge = Some(twin);
edge_index
}
pub fn flip_cw(&mut self, e: FixedEdgeHandle) {
let en = self.edges[e].next;
let ep = self.edges[e].prev;
let t = self.edges[e].twin;
let tn = self.edges[t].next;
let tp = self.edges[t].prev;
self.edges[en].next = e;
self.edges[en].prev = tp;
self.edges[e].next = tp;
self.edges[e].prev = en;
self.edges[tp].next = en;
self.edges[tp].prev = e;
self.edges[tn].next = t;
self.edges[tn].prev = ep;
self.edges[t].next = ep;
self.edges[t].prev = tn;
self.edges[ep].next = tn;
self.edges[ep].prev = t;
self.vertices[self.edges[e].origin].out_edge = Some(tn);
self.vertices[self.edges[t].origin].out_edge = Some(en);
self.edges[e].origin = self.edges[ep].origin;
self.edges[t].origin = self.edges[tp].origin;
self.faces[self.edges[e].face].adjacent_edge = Some(e);
self.faces[self.edges[t].face].adjacent_edge = Some(t);
self.edges[tp].face = self.edges[e].face;
self.edges[ep].face = self.edges[t].face;
}
#[cfg(test)]
pub fn sanity_check(&self) {
for (index, face) in self.faces.iter().enumerate() {
if let Some(adj) = face.adjacent_edge {
assert_eq!(self.edges[adj].face, index);
}
}
for (index, vertex) in self.vertices.iter().enumerate() {
if let Some(out_edge) = vertex.out_edge {
assert_eq!(self.edges[out_edge].origin, index);
}
}
for handle in 0..self.num_edges() {
let edge = self.edge(handle);
assert_eq!(edge, edge.o_next().o_prev());
assert_eq!(edge, edge.o_prev().o_next());
assert_eq!(edge, edge.sym().sym());
}
}
}
impl<V, E> DCEL<V, E>
where
E: ::std::fmt::Debug,
{
#[cfg(test)]
fn print(&self) {
for (index, edge) in self.edges.iter().enumerate() {
println!("edge {}: {:#?}", index, edge);
}
for (index, vertex) in self.vertices.iter().enumerate() {
println!("vertex {}: {:?}", index, vertex.out_edge);
}
for (index, face) in self.faces.iter().enumerate() {
println!("face {}: {:?}", index, face);
}
}
}
/// An iterator that iterates over the edges adjacent to a face.
///
/// The iterator will traverse the edges in oriented order.
/// This order is counterclockwise for right handed coordinate systems
/// or clockwise for left handed systems.
pub struct ONextIterator<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
cur_until: Option<(FixedEdgeHandle, FixedEdgeHandle)>,
}
impl<'a, V, E> ONextIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
fn new_empty(dcel: &'a DCEL<V, E>) -> Self {
ONextIterator {
dcel,
cur_until: None,
}
}
fn new(dcel: &'a DCEL<V, E>, edge: FixedEdgeHandle) -> Self {
let edge = dcel.edge(edge);
ONextIterator {
dcel,
cur_until: Some((edge.fix(), edge.o_prev().fix())),
}
}
}
impl<'a, V, E> Iterator for ONextIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
type Item = EdgeHandle<'a, V, E>;
fn next(&mut self) -> Option<EdgeHandle<'a, V, E>> {
if let Some((cur, until)) = self.cur_until {
let cur_handle = self.dcel.edge(cur);
if cur == until {
self.cur_until = None;
} else {
let new_cur = cur_handle.o_next().fix();
self.cur_until = Some((new_cur, until));
}
Some(cur_handle)
} else {
None
}
}
}
impl<'a, V, E> DoubleEndedIterator for ONextIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
fn next_back(&mut self) -> Option<EdgeHandle<'a, V, E>> {
if let Some((cur, until)) = self.cur_until {
let until_handle = self.dcel.edge(until);
if cur == until {
self.cur_until = None;
} else {
let new_until = until_handle.o_prev().fix();
self.cur_until = Some((cur, new_until));
}
Some(until_handle)
} else {
None
}
}
}
/// An iterator that iterates over the outgoing edges from a vertex.
///
/// The edges will be iterated in counterclockwise order. Note that
/// this assumes that you use a right handed coordinate system,
/// otherwise the sense of orientation is inverted.
pub struct CCWIterator<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
cur_until: Option<(FixedEdgeHandle, FixedEdgeHandle)>,
}
impl<'a, V, E> CCWIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
fn new(dcel: &'a DCEL<V, E>, vertex: FixedVertexHandle) -> Self {
let cur_until = if let Some(edge) = dcel.vertex(vertex).out_edge() {
Some((edge.ccw().fix(), edge.fix()))
} else {
None
};
CCWIterator { dcel, cur_until }
}
fn from_edge(dcel: &'a DCEL<V, E>, edge: FixedEdgeHandle) -> Self {
let edge = dcel.edge(edge);
CCWIterator {
dcel,
cur_until: Some((edge.fix(), edge.cw().fix())),
}
}
}
impl<'a, V, E> Iterator for CCWIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
type Item = EdgeHandle<'a, V, E>;
fn next(&mut self) -> Option<EdgeHandle<'a, V, E>> {
if let Some((cur, until)) = self.cur_until {
let cur_handle = self.dcel.edge(cur);
if cur == until {
self.cur_until = None;
} else {
let new_cur = cur_handle.ccw().fix();
self.cur_until = Some((new_cur, until));
}
Some(cur_handle)
} else {
None
}
}
}
impl<'a, V, E> DoubleEndedIterator for CCWIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
fn next_back(&mut self) -> Option<EdgeHandle<'a, V, E>> {
if let Some((cur, until)) = self.cur_until {
let until_handle = self.dcel.edge(until);
if cur == until {
self.cur_until = None;
} else {
let new_until = until_handle.cw().fix();
self.cur_until = Some((cur, new_until));
}
Some(until_handle)
} else {
None
}
}
}
pub struct FacesIterator<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
current: FixedFaceHandle,
}
impl<'a, V, E> FacesIterator<'a, V, E>
where
V: 'a,
E: 'a,
{
fn new(dcel: &'a DCEL<V, E>) -> Self {
FacesIterator { dcel, current: 0 }
}
}
impl<'a, V, E> Iterator for FacesIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
type Item = FaceHandle<'a, V, E>;
fn next(&mut self) -> Option<FaceHandle<'a, V, E>> {
if self.current < self.dcel.num_faces() {
let result = FaceHandle::new(self.dcel, self.current);
self.current += 1;
Some(result)
} else {
None
}
}
}
type FixedVerticesIterator = ::std::ops::Range<usize>;
pub struct VerticesIterator<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
current: FixedVertexHandle,
}
impl<'a, V, E> VerticesIterator<'a, V, E>
where
V: 'a,
E: 'a,
{
fn new(dcel: &'a DCEL<V, E>) -> Self {
VerticesIterator { dcel, current: 0 }
}
}
impl<'a, V, E> Iterator for VerticesIterator<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
type Item = VertexHandle<'a, V, E>;
fn next(&mut self) -> Option<VertexHandle<'a, V, E>> {
if self.current < self.dcel.num_vertices() {
let result = VertexHandle::new(self.dcel, self.current);
self.current += 1;
Some(result)
} else {
None
}
}
}
pub struct EdgesIterator<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
current: FixedEdgeHandle,
}
impl<'a, V, E> EdgesIterator<'a, V, E>
where
V: 'a,
E: 'a,
{
fn new(dcel: &'a DCEL<V, E>) -> Self {
EdgesIterator { dcel, current: 0 }
}
}
impl<'a, V, E> Iterator for EdgesIterator<'a, V, E>
where
E: Default,
{
type Item = EdgeHandle<'a, V, E>;
fn next(&mut self) -> Option<EdgeHandle<'a, V, E>> {
if let Some(edge) = self.dcel.edges.get(self.current) {
let twin = edge.twin;
self.current += 1;
if self.current - 1 < twin {
Some(EdgeHandle::new(self.dcel, self.current - 1))
} else {
self.next()
}
} else {
None
}
}
}
/// A handle to a directed edge.
///
/// Used to retrieve adjacent vertices and faces.
pub struct EdgeHandle<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
handle: FixedEdgeHandle,
}
/// A handle to a vertex.
///
/// Used to retrieve its outgoing edges.
pub struct VertexHandle<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
handle: FixedVertexHandle,
}
/// A handle to a face.
///
/// Used to retrieve its adjacent edges.
pub struct FaceHandle<'a, V, E = ()>
where
V: 'a,
E: 'a,
{
dcel: &'a DCEL<V, E>,
handle: FixedFaceHandle,
}
impl<'a, V, E> ::std::fmt::Debug for VertexHandle<'a, V, E>
where
V: 'a,
{
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "VertexHandle({:?})", self.handle)
}
}
impl<'a, V, E> PartialEq for VertexHandle<'a, V, E>
where
V: 'a,
{
fn eq(&self, other: &Self) -> bool {
self.handle == other.handle
}
}
impl<'a, V, E> Copy for VertexHandle<'a, V, E> where V: 'a {}
impl<'a, V, E> VertexHandle<'a, V, E>
where
V: 'a,
E: 'a,
{
fn new(dcel: &'a DCEL<V, E>, handle: FixedVertexHandle) -> Self {
VertexHandle { dcel, handle }
}
}
impl<'a, V, E> VertexHandle<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
/// Returns an outgoing edge.
///
/// If the vertex has multiple outgoing edges, any of them is returned.
pub fn out_edge(&self) -> Option<EdgeHandle<'a, V, E>> {
self.dcel.vertices[self.handle]
.out_edge
.map(|e| self.dcel.edge(e))
}
/// Returns all outgoing edges in counter clockwise order.
///
/// Note that this assumes that you use a right handed coordinate system,
/// otherwise the sense of orientation is inverted.
pub fn ccw_out_edges(&self) -> CCWIterator<'a, V, E> {
CCWIterator::new(self.dcel, self.handle)
}
/// Creates a fixed vertex handle from this dynamic handle.
///
/// # Notes
///
/// Calling `DelaunayTriangulation::insert()` will create vertices in increasing order.
pub fn fix(&self) -> FixedVertexHandle {
self.handle
}
}
impl<'a, V, E> Clone for VertexHandle<'a, V, E>
where
V: 'a,
E: 'a,
{
fn clone(&self) -> Self {
VertexHandle::new(self.dcel, self.handle)
}
}
impl<'a, V, E> ::std::ops::Deref for VertexHandle<'a, V, E> {
type Target = V;
fn deref(&self) -> &V {
&self.dcel.vertices[self.handle].data
}
}
impl<'a, V, E> Copy for EdgeHandle<'a, V, E> where V: 'a {}
impl<'a, V, E> Clone for EdgeHandle<'a, V, E>
where
V: 'a,
{
fn clone(&self) -> Self {
EdgeHandle::new(self.dcel, self.handle)
}
}
impl<'a, V, E> PartialEq for EdgeHandle<'a, V, E>
where
V: 'a,
{
fn eq(&self, other: &Self) -> bool {
self.handle == other.handle
}
}
impl<'a, V, E> ::std::fmt::Debug for EdgeHandle<'a, V, E>
where
V: 'a,
E: Default,
{
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(
f,
"EdgeHandle - id: {:?} ({:?} -> {:?})",
self.handle,
self.from().fix(),
self.to().fix()
)
}
}
impl<'a, V, E> EdgeHandle<'a, V, E>
where
V: 'a,
E: 'a,
{
fn new(dcel: &'a DCEL<V, E>, handle: FixedEdgeHandle) -> Self {
EdgeHandle { dcel, handle }
}
}
impl<'a, V, E> EdgeHandle<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
/// Creates a fixed edge handle from this dynamic handle.
pub fn fix(&self) -> FixedEdgeHandle {
self.handle
}
/// Returns the edge's source vertex.
pub fn from(&self) -> VertexHandle<'a, V, E> {
let edge = &self.dcel.edges[self.handle];
VertexHandle::new(self.dcel, edge.origin)
}
/// Returns the oriented next edge.
///
/// The oriented next edge shares the same face as this edge.
/// When traversing the face's edges in oriented order,
/// this edge is the predecessor of the oriented next edge.
/// "Oriented" means counterclockwise for right handed
/// coordinate systems.
pub fn o_next(&self) -> EdgeHandle<'a, V, E> {
EdgeHandle::new(self.dcel, self.dcel.edges[self.handle].next)
}
/// Returns the oriented previous edge.
///
/// The oriented previous edge shares the same face as this edge.
/// When traversing the face's edges in oriented order,
/// this edge is the successor of the oriented previous edge.
/// "Oriented" means counterclockwise for right handed
/// coordinate systems.
pub fn o_prev(&self) -> EdgeHandle<'a, V, E> {
EdgeHandle::new(self.dcel, self.dcel.edges[self.handle].prev)
}
/// Returns an iterator over all edges sharing the same face
/// as this edge.
///
/// The face's edges will be traversed in oriented order.
/// This order is counterclockwise for right handed coordinate
/// systems or clockwise for left handed systems.
pub fn o_next_iterator(&self) -> ONextIterator<'a, V, E> {
ONextIterator::new(self.dcel, self.handle)
}
/// Returns the edges destination vertex.
pub fn to(&self) -> VertexHandle<'a, V, E> {
self.sym().from()
}
/// Returns the face located to the left of this edge.
pub fn face(&self) -> FaceHandle<'a, V, E> {
self.dcel.face(self.dcel.edges[self.handle].face)
}
/// Returns this edge's mirror edge.
pub fn sym(&self) -> EdgeHandle<'a, V, E> {
EdgeHandle {
dcel: self.dcel,
handle: self.dcel.edges[self.handle].twin,
}
}
/// Returns the next edge in clockwise direction.
///
/// Note that this assumes that you use a right handed coordinate system,
/// otherwise the sense of orientation is inverted.
pub fn cw(&self) -> EdgeHandle<'a, V, E> {
let twin = self.sym().handle;
EdgeHandle {
dcel: self.dcel,
handle: self.dcel.edges[twin].next,
}
}
/// Returns the next edge in counter clockwise direction.
///
/// Note that this assumes that you use a right handed coordinate system,
/// otherwise the sense of orientation is inverted.
pub fn ccw(&self) -> EdgeHandle<'a, V, E> {
EdgeHandle {
dcel: self.dcel,
handle: self.dcel.edges[self.handle].prev,
}
.sym()
}
/// Returns an iterator over all edges in counter clockwise
/// order.
///
/// Note that this assumes that you use a right handed coordinate system,
/// otherwise the sense of orientation is inverted.
pub fn ccw_iter(&self) -> CCWIterator<'a, V, E> {
CCWIterator::from_edge(self.dcel, self.handle)
}
}
impl<'a, V, E> Copy for FaceHandle<'a, V, E> where V: 'a {}
impl<'a, V, E> Clone for FaceHandle<'a, V, E>
where
V: 'a,
{
fn clone(&self) -> Self {
FaceHandle::new(self.dcel, self.handle)
}
}
impl<'a, V, E> PartialEq for FaceHandle<'a, V, E>
where
V: 'a,
{
fn eq(&self, other: &Self) -> bool {
self.handle == other.handle
}
}
impl<'a, V, E> ::std::fmt::Debug for FaceHandle<'a, V, E>
where
V: 'a,
{
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "FaceHandle({:?})", self.handle)
}
}
impl<'a, V, E> FaceHandle<'a, V, E>
where
V: 'a,
E: 'a, | fn new(dcel: &'a DCEL<V, E>, handle: FixedFaceHandle) -> Self {
FaceHandle { dcel, handle }
}
}
impl<'a, V, E> FaceHandle<'a, V, E>
where
V: 'a,
E: Default + 'a,
{
/// Tries to interpret this face as a triangle, returning its 3 vertices.
///
/// # Notes
///
/// The retuned vertices are in clockwise order. The position of the first one is undefined.
///
/// # Panic
/// This method will panic if the face does not form a triangle, for example if it is called on the [infinite face].
///
/// [infinite face]: struct.DelaunayTriangulation.html#method.infinite_face
pub fn as_triangle(&self) -> [VertexHandle<'a, V, E>; 3] {
let adjacent = self.dcel.faces[self.handle]
.adjacent_edge
.expect("Face has no adjacent edge");
let edge = self.dcel.edge(adjacent);
let prev = edge.o_prev();
debug_assert!(
prev.o_prev() == edge.o_next(),
"Face does not form a triangle"
);
[prev.from(), edge.from(), edge.to()]
}
/// Returns an edge that is adjacent to this face.
///
/// If this face has multiple adjacent edges, any of them is returned.
pub fn adjacent_edge(&self) -> Option<EdgeHandle<'a, V, E>> {
self.dcel.faces[self.handle]
.adjacent_edge
.map(|e| EdgeHandle::new(self.dcel, e))
}
/// Returns an iterator that iterates over all adjacent edges.
///
/// The edges are traversed in oriented order.
/// This order will be counterclockwise for right handed coordinate
/// system or clockwise for left handed systems.
pub fn adjacent_edges(&self) -> ONextIterator<'a, V, E> {
if let Some(adj) = self.dcel.faces[self.handle].adjacent_edge {
ONextIterator::new(self.dcel, adj)
} else {
ONextIterator::new_empty(self.dcel)
}
}
/// Creates a fixed face handle from this dynamic face handle.
pub fn fix(&self) -> FixedFaceHandle {
self.handle
}
}
#[cfg(test)]
mod test {
use super::{HalfEdgeEntry, DCEL};
#[test]
fn test_create_triangle() {
let mut dcel = DCEL::new();
let v0 = dcel.insert_vertex(());
let v1 = dcel.insert_vertex(());
let v2 = dcel.insert_vertex(());
let e01 = dcel.connect_two_isolated_vertices(v0, v1, 0);
let e12 = dcel.connect_edge_to_isolated_vertex(e01, v2);
let e20 = dcel.create_face(e12, e01);
let t01 = dcel.edges[e01].twin;
let t12 = dcel.edges[e12].twin;
let t20 = dcel.edges[e20].twin;
assert_eq!(
dcel.edges[e01],
HalfEdgeEntry {
next: e12,
prev: e20,
twin: t01,
origin: 0,
face: 1,
data: (),
}
);
assert_eq!(
dcel.edges[e12],
HalfEdgeEntry {
next: e20,
prev: e01,
twin: t12,
origin: 1,
face: 1,
data: (),
}
);
assert_eq!(
dcel.edges[e20],
HalfEdgeEntry {
next: e01,
prev: e12,
twin: t20,
origin: 2,
face: 1,
data: (),
}
);
assert_eq!(dcel.edges[t01].face, 0);
assert_eq!(dcel.edges[t12].face, 0);
assert_eq!(dcel.edges[t20].face, 0);
}
#[test]
fn test_flip() {
let mut dcel = DCEL::new();
let v0 = dcel.insert_vertex(());
let v1 = dcel.insert_vertex(());
let v2 = dcel.insert_vertex(());
let v3 = dcel.insert_vertex(());
let e01 = dcel.connect_two_isolated_vertices(v0, v1, 0);
let e12 = dcel.connect_edge_to_isolated_vertex(e01, v2);
let e23 = dcel.connect_edge_to_isolated_vertex(e12, v3);
let e30 = dcel.create_face(e23, e01);
let e_flip = dcel.create_face(e30, e23);
assert_eq!(
dcel.edges[e_flip],
HalfEdgeEntry {
next: e23,
prev: e30,
twin: dcel.edges[e_flip].twin,
origin: 0,
face: 2,
data: (),
}
);
dcel.flip_cw(e_flip);
let twin = dcel.edges[e_flip].twin;
assert_eq!(
dcel.edges[e_flip],
HalfEdgeEntry {
next: e12,
prev: e23,
twin: twin,
origin: 3,
face: 2,
data: (),
}
);
assert_eq!(
dcel.edges[twin],
HalfEdgeEntry {
next: e30,
prev: e01,
twin: e_flip,
origin: 1,
face: 1,
data: (),
}
);
}
#[test]
fn test_split_isolated_edge() {
let mut dcel = DCEL::new();
let v0 = dcel.insert_vertex(());
let v1 = dcel.insert_vertex(());
let edge = dcel.connect_two_isolated_vertices(v0, v1, 0);
let split_vertex = dcel.insert_vertex(());
dcel.split_edge(edge, split_vertex);
dcel.print();
dcel.sanity_check();
}
#[test]
fn test_split_unisolated() {
let mut dcel = DCEL::new();
let v0 = dcel.insert_vertex(());
let v1 = dcel.insert_vertex(());
let v2 = dcel.insert_vertex(());
let v3 = dcel.insert_vertex(());
let e01 = dcel.connect_two_isolated_vertices(v0, v1, 0);
let t01 = dcel.edge(e01).sym().fix();
let e12 = dcel.connect_edge_to_isolated_vertex(e01, v2);
let t12 = dcel.edge(e12).sym().fix();
let e20 = dcel.create_face(e12, e01);
let t20 = dcel.edge(e20).sym().fix();
let e_split = dcel.split_edge(e20, v3);
let t_split = dcel.edge(e_split).sym().fix();
assert_eq!(
dcel.edges[e20],
HalfEdgeEntry {
next: e_split,
prev: e12,
twin: t20,
origin: v2,
face: 1,
data: (),
}
);
assert_eq!(
dcel.edges[e_split],
HalfEdgeEntry {
next: e01,
prev: e20,
twin: t_split,
origin: v3,
face: 1,
data: (),
}
);
assert_eq!(
dcel.edges[t_split],
HalfEdgeEntry {
next: t20,
prev: t01,
origin: v0,
twin: e_split,
face: 0,
data: (),
}
);
assert_eq!(
dcel.edges[t20],
HalfEdgeEntry {
next: t12,
prev: t_split,
origin: v3,
twin: e20,
face: 0,
data: (),
}
);
assert_eq!(dcel.edges[t01].next, t_split);
assert_eq!(dcel.edges[e01].prev, e_split);
assert_eq!(dcel.edges[t12].prev, t20);
assert_eq!(dcel.edges[e12].next, e20);
assert!(
dcel.vertices[v3].out_edge == Some(e_split) || dcel.vertices[v3].out_edge == Some(t20)
);
dcel.sanity_check();
}
#[test]
fn test_split_half_isolated() {
let mut dcel = DCEL::new();
let v0 = dcel.insert_vertex(());
let v1 = dcel.insert_vertex(());
let v2 = dcel.insert_vertex(());
let v_split = dcel.insert_vertex(());
let e1 = dcel.connect_two_isolated_vertices(v0, v1, 0);
let e2 = dcel.connect_edge_to_isolated_vertex(e1, v2);
dcel.split_edge(e2, v_split);
dcel.sanity_check();
}
#[test]
fn test_cw_ccw() {
let mut dcel = DCEL::new();
let v0 = dcel.insert_vertex(());
let v1 = dcel.insert_vertex(());
let v2 = dcel.insert_vertex(());
let v3 = dcel.insert_vertex(());
let e01 = dcel.connect_two_isolated_vertices(v0, v1, 0);
let e12 = dcel.connect_edge_to_isolated_vertex(e01, v2);
let e23 = dcel.connect_edge_to_isolated_vertex(e12, v3);
let e30 = dcel.create_face(e23, e01);
let e02 = dcel.create_face(e30, e23);
let e02 = dcel.edge(e02);
assert_eq!(e02.cw().fix(), e01);
assert_eq!(e02.ccw().fix(), dcel.edges[e30].twin);
}
#[test]
fn pentagon_test() {
let mut dcel = DCEL::new();
let mut v = Vec::new();
for _ in 0..5 {
v.push(dcel.insert_vertex(()));
}
let e01 = dcel.connect_two_isolated_vertices(v[0], v[1], 0);
let e12 = dcel.connect_edge_to_isolated_vertex(e01, v[2]);
let e23 = dcel.connect_edge_to_isolated_vertex(e12, v[3]);
let e34 = dcel.connect_edge_to_isolated_vertex(e23, v[4]);
let e40 = dcel.create_face(e34, e01);
let e02 = dcel.create_face(e40, e23);
let e03 = dcel.create_face(e40, e34);
let entry = dcel.edges[e02];
assert_eq!(entry.next, e23);
assert_eq!(entry.prev, dcel.edges[e03].twin);
assert_eq!(entry.origin, v[0]);
}
#[test]
fn test_ccw_iterator() {
let mut dcel = DCEL::new();
let mut vs = Vec::new();
let central = dcel.insert_vertex(());
assert_eq!(dcel.vertex(central).ccw_out_edges().next(), None);
for _ in 0..5 {
vs.push(dcel.insert_vertex(()));
}
let mut last_edge = dcel.connect_two_isolated_vertices(central, vs[0], 0);
last_edge = dcel.edge(last_edge).sym().fix();
for vertex in &vs[1..] {
last_edge = dcel.connect_edge_to_isolated_vertex(last_edge, *vertex);
last_edge = dcel.edge(last_edge).sym().fix();
}
let out_edge = dcel.vertex(central).out_edge().unwrap();
let mut neighs: Vec<_> = out_edge.ccw_iter().map(|e| e.to().fix()).collect();
assert_eq!(neighs.len(), 5);
for i in 0..5 {
let first = neighs[i];
let second = neighs[(i + 1) % 5];
assert_eq!(first - 1, second % 5);
}
let revs: Vec<_> = out_edge.ccw_iter().rev().map(|e| e.to().fix()).collect();
neighs.reverse();
assert_eq!(neighs, revs);
}
#[test]
fn test_o_next_iterator() {
let mut dcel = DCEL::new();
let mut vs = Vec::new();
for _ in 0..5 {
vs.push(dcel.insert_vertex(()));
}
let mut last_edge = dcel.connect_two_isolated_vertices(vs[0], vs[1], 0);
let mut edges = vec![last_edge];
for vertex in &vs[2..] {
last_edge = dcel.connect_edge_to_isolated_vertex(last_edge, *vertex);
edges.push(last_edge);
}
edges.push(dcel.connect_edge_to_edge(last_edge, vs[0]));
let mut iterated: Vec<_> = dcel
.edge(edges[0])
.o_next_iterator()
.map(|e| e.fix())
.collect();
assert_eq!(iterated, edges);
let rev: Vec<_> = dcel
.edge(edges[0])
.o_next_iterator()
.rev()
.map(|e| e.fix())
.collect();
iterated.reverse();
assert_eq!(iterated, rev);
}
} | { |
main.py | import pygame
from pygame.sprite import Group
from settings import Settings
from ship import Ship
import game_functions as gf
from game_stats import GameStats
from button import Button
from scoreboard import Scoreboard
from mouse_move import MouseMove
from musics import Musics
def run_game():
# 初始化游戏并且创建一个屏幕对象
pygame.init()
ai_settings | = Settings()
screen = pygame.display.set_mode((ai_settings.screen_width, ai_settings.screen_height))
pygame.display.set_caption("Plane_Wars")
gf.set_bgscreen(ai_settings, screen)
# 创建按钮
play_button = Button(ai_settings, screen, "Play")
# 创建一艘飞船
ship = Ship(ai_settings, screen)
# 创建一个存储子弹的编组
bullets = Group()
# 创建入侵者
invaders = Group()
gf.create_fleet(ai_settings, screen, invaders)
# 创建一个用于存储游戏统计数据的实例
stats = GameStats(ai_settings)
stats.read_history_score()
scoreboard = Scoreboard(ai_settings, screen, stats)
# 显示生命数
scoreboard.show_ships()
# 创建鼠标移动类
x, y =pygame.mouse.get_pos()
mouse_move = MouseMove(x, y)
# 创建背景音乐类
musics = Musics()
musics.bgmusic.play(-1, 0)
# 开始游戏主循环
while True:
# 监视键盘和鼠标事件
gf.check_events(ai_settings, screen, ship, bullets, play_button, stats, invaders, scoreboard, mouse_move, musics)
if stats.game_active:
gf.update_bullets(bullets, invaders, ai_settings, screen, stats, musics)
ship.update()
gf.update_invaders(ai_settings, stats, invaders, ship, screen, scoreboard)
# 每次循环时重绘屏幕 让最近绘制的屏幕可见
gf.update_screen(ai_settings, screen, stats, ship, invaders, bullets, play_button, scoreboard)
run_game()
|
|
context.rs | use crate::Error;
use alloc::vec::Vec;
use parity_wasm::elements::{
BlockType, FunctionType, GlobalType, MemoryType, TableType, ValueType,
};
#[derive(Default, Debug)]
pub struct ModuleContext {
pub memories: Vec<MemoryType>,
pub tables: Vec<TableType>,
pub globals: Vec<GlobalType>,
pub types: Vec<FunctionType>,
pub func_type_indexes: Vec<u32>,
}
impl ModuleContext {
pub fn memories(&self) -> &[MemoryType] {
&self.memories
}
pub fn tables(&self) -> &[TableType] {
&self.tables
}
pub fn globals(&self) -> &[GlobalType] {
&self.globals
}
pub fn types(&self) -> &[FunctionType] {
&self.types
}
pub fn func_type_indexes(&self) -> &[u32] |
pub fn require_memory(&self, idx: u32) -> Result<(), Error> {
if self.memories().get(idx as usize).is_none() {
return Err(Error(format!("Memory at index {} doesn't exists", idx)));
}
Ok(())
}
pub fn require_table(&self, idx: u32) -> Result<&TableType, Error> {
self.tables()
.get(idx as usize)
.ok_or_else(|| Error(format!("Table at index {} doesn't exists", idx)))
}
pub fn require_function(&self, idx: u32) -> Result<(&[ValueType], BlockType), Error> {
let ty_idx = self
.func_type_indexes()
.get(idx as usize)
.ok_or_else(|| Error(format!("Function at index {} doesn't exists", idx)))?;
self.require_function_type(*ty_idx)
}
pub fn require_function_type(&self, idx: u32) -> Result<(&[ValueType], BlockType), Error> {
let ty = self
.types()
.get(idx as usize)
.ok_or_else(|| Error(format!("Type at index {} doesn't exists", idx)))?;
let params = ty.params();
let return_ty = ty
.results()
.first()
.map(|vty| BlockType::Value(*vty))
.unwrap_or(BlockType::NoResult);
Ok((params, return_ty))
}
pub fn require_global(&self, idx: u32, mutability: Option<bool>) -> Result<&GlobalType, Error> {
let global = self
.globals()
.get(idx as usize)
.ok_or_else(|| Error(format!("Global at index {} doesn't exists", idx)))?;
if let Some(expected_mutable) = mutability {
if expected_mutable && !global.is_mutable() {
return Err(Error(format!("Expected global {} to be mutable", idx)));
}
if !expected_mutable && global.is_mutable() {
return Err(Error(format!("Expected global {} to be immutable", idx)));
}
}
Ok(global)
}
}
#[derive(Default)]
pub struct ModuleContextBuilder {
memories: Vec<MemoryType>,
tables: Vec<TableType>,
globals: Vec<GlobalType>,
types: Vec<FunctionType>,
func_type_indexes: Vec<u32>,
}
impl ModuleContextBuilder {
pub fn new() -> ModuleContextBuilder {
ModuleContextBuilder::default()
}
pub fn push_memory(&mut self, memory: MemoryType) {
self.memories.push(memory);
}
pub fn push_table(&mut self, table: TableType) {
self.tables.push(table);
}
pub fn push_global(&mut self, global: GlobalType) {
self.globals.push(global);
}
pub fn set_types(&mut self, types: Vec<FunctionType>) {
self.types = types;
}
pub fn push_func_type_index(&mut self, func_type_index: u32) {
self.func_type_indexes.push(func_type_index);
}
pub fn build(self) -> ModuleContext {
let ModuleContextBuilder {
memories,
tables,
globals,
types,
func_type_indexes,
} = self;
ModuleContext {
memories,
tables,
globals,
types,
func_type_indexes,
}
}
}
| {
&self.func_type_indexes
} |
222_CountCompleteTreeNode.js | /**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val; | /**
* @param {TreeNode} root
* @return {number}
*/
var countNodes = function(root) {
// 都没有
if(root === null) return 0;
// 左右都无
if(!root.left && !root.right) return 1;
// 左右子节点 加上自己
return countNodes(root.left) + countNodes(root.right) + 1;
};
// https://leetcode-cn.com/problems/count-complete-tree-nodes/solution/di-gui-by-meiko-7/ | * this.left = this.right = null;
* }
*/ |
viper.go | package arrange
import (
"errors"
"github.com/spf13/viper"
"go.uber.org/fx"
)
var (
// ErrNilViper is returned to the fx.App when the externally supplied Viper
// instance is nil
ErrNilViper = errors.New("the viper instance cannot be nil")
)
// ViperUnmarshaler is the standard Unmarshaler implementation used by arrange.
// It couples a Viper instance together with zero or more decoder options.
type ViperUnmarshaler struct {
// Viper is the required Viper instance to which all unmarshal operations are delegated
Viper *viper.Viper
// Options is the optional slice of viper.DecoderConfigOptions passed to all
// unmarshal calls
Options []viper.DecoderConfigOption
// Printer is the required fx.Printer component to which informational messages are written.
//
// NOTE: This field won't be defaulted. It must be set. ForViper ensures this field
// is set even if no fx.Printer component is present in the enclosing fx.App.
Printer fx.Printer
}
// Unmarshal implements Unmarshaler
func (vu ViperUnmarshaler) Unmarshal(value interface{}) error {
vu.Printer.Printf("UNMARSHAL => %T", value)
return vu.Viper.Unmarshal(value, vu.Options...)
}
// UnmarshalKey implements Unmarshaler
func (vu ViperUnmarshaler) UnmarshalKey(key string, value interface{}) error {
vu.Printer.Printf("UNMARSHAL KEY\t[%s] => %T", key, value)
return vu.Viper.UnmarshalKey(key, value, vu.Options...)
}
// ViperUnmarshalerIn is the set of dependencies required to build a ViperUnmarshaler
// Note that the actual viper instance must be supplied externally.
type ViperUnmarshalerIn struct {
fx.In
// Options is the optional slice of viper.DecoderConfigOption that will be
// applied to every unmarshal or unmarshal key operation
Options []viper.DecoderConfigOption `optional:"true"`
// Printer is an optional fx.Printer component to which the viper unmarshaler
// prints informational messages. If not supplied, DefaultPrinter() is used.
Printer fx.Printer `optional:"true"`
}
// ForViper creates a ViperUnmarshaler backed by an externally supplied viper instance.
// The returned component is of type Unmarshaler.
//
// This function DOES NOT make the viper instance itself available as a component.
// If that is desired, use fx.Supply.
//
// The set of viper.DecoderConfigOptions used will be the merging of the options supplied
// to this function and an optional []viper.DecoderConfigOption component.
func ForViper(v *viper.Viper, o ...viper.DecoderConfigOption) fx.Option | {
if v == nil {
return fx.Error(ErrNilViper)
}
return fx.Options(
fx.Provide(
func(in ViperUnmarshalerIn) Unmarshaler {
return ViperUnmarshaler{
Viper: v,
Options: append(
append([]viper.DecoderConfigOption{}, o...),
in.Options...,
),
Printer: NewModulePrinter(Module, in.Printer),
}
},
),
)
} |
|
get2_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package formats
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
)
// NewGet2Params creates a new Get2Params object,
// with the default timeout for this client.
//
// Default values are not hydrated, since defaults are normally applied by the API server side.
//
// To enforce default values in parameter, use SetDefaults or WithDefaults.
func NewGet2Params() *Get2Params {
return &Get2Params{
timeout: cr.DefaultTimeout,
}
}
// NewGet2ParamsWithTimeout creates a new Get2Params object
// with the ability to set a timeout on a request.
func | (timeout time.Duration) *Get2Params {
return &Get2Params{
timeout: timeout,
}
}
// NewGet2ParamsWithContext creates a new Get2Params object
// with the ability to set a context for a request.
func NewGet2ParamsWithContext(ctx context.Context) *Get2Params {
return &Get2Params{
Context: ctx,
}
}
// NewGet2ParamsWithHTTPClient creates a new Get2Params object
// with the ability to set a custom HTTPClient for a request.
func NewGet2ParamsWithHTTPClient(client *http.Client) *Get2Params {
return &Get2Params{
HTTPClient: client,
}
}
/* Get2Params contains all the parameters to send to the API endpoint
for the get 2 operation.
Typically these are written to a http.Request.
*/
type Get2Params struct {
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithDefaults hydrates default values in the get 2 params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *Get2Params) WithDefaults() *Get2Params {
o.SetDefaults()
return o
}
// SetDefaults hydrates default values in the get 2 params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *Get2Params) SetDefaults() {
// no default values defined for this parameter
}
// WithTimeout adds the timeout to the get 2 params
func (o *Get2Params) WithTimeout(timeout time.Duration) *Get2Params {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the get 2 params
func (o *Get2Params) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the get 2 params
func (o *Get2Params) WithContext(ctx context.Context) *Get2Params {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the get 2 params
func (o *Get2Params) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the get 2 params
func (o *Get2Params) WithHTTPClient(client *http.Client) *Get2Params {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the get 2 params
func (o *Get2Params) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WriteToRequest writes these params to a swagger request
func (o *Get2Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| NewGet2ParamsWithTimeout |
inlay_hints.rs | use either::Either;
use hir::{known, Callable, HirDisplay, Semantics};
use ide_db::helpers::FamousDefs;
use ide_db::RootDatabase;
use stdx::to_lower_snake_case;
use syntax::{
ast::{self, ArgListOwner, AstNode, NameOwner},
match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, TextRange, T,
};
use crate::FileId;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct InlayHintsConfig {
pub type_hints: bool,
pub parameter_hints: bool,
pub chaining_hints: bool,
pub max_length: Option<usize>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum InlayKind {
TypeHint,
ParameterHint,
ChainingHint,
}
#[derive(Debug)]
pub struct InlayHint {
pub range: TextRange,
pub kind: InlayKind,
pub label: SmolStr,
}
// Feature: Inlay Hints
//
// rust-analyzer shows additional information inline with the source code.
// Editors usually render this using read-only virtual text snippets interspersed with code.
// | // * types of chained expressions
//
// **Note:** VS Code does not have native support for inlay hints https://github.com/microsoft/vscode/issues/16221[yet] and the hints are implemented using decorations.
// This approach has limitations, the caret movement and bracket highlighting near the edges of the hint may be weird:
// https://github.com/rust-analyzer/rust-analyzer/issues/1623[1], https://github.com/rust-analyzer/rust-analyzer/issues/3453[2].
//
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Toggle inlay hints*
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
pub(crate) fn inlay_hints(
db: &RootDatabase,
file_id: FileId,
config: &InlayHintsConfig,
) -> Vec<InlayHint> {
let _p = profile::span("inlay_hints");
let sema = Semantics::new(db);
let file = sema.parse(file_id);
let mut res = Vec::new();
for node in file.syntax().descendants() {
if let Some(expr) = ast::Expr::cast(node.clone()) {
get_chaining_hints(&mut res, &sema, config, expr);
}
match_ast! {
match node {
ast::CallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); },
ast::MethodCallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); },
ast::IdentPat(it) => { get_bind_pat_hints(&mut res, &sema, config, it); },
_ => (),
}
}
}
res
}
fn get_chaining_hints(
acc: &mut Vec<InlayHint>,
sema: &Semantics<RootDatabase>,
config: &InlayHintsConfig,
expr: ast::Expr,
) -> Option<()> {
if !config.chaining_hints {
return None;
}
if matches!(expr, ast::Expr::RecordExpr(_)) {
return None;
}
let krate = sema.scope(expr.syntax()).module().map(|it| it.krate());
let famous_defs = FamousDefs(&sema, krate);
let mut tokens = expr
.syntax()
.siblings_with_tokens(Direction::Next)
.filter_map(NodeOrToken::into_token)
.filter(|t| match t.kind() {
SyntaxKind::WHITESPACE if !t.text().contains('\n') => false,
SyntaxKind::COMMENT => false,
_ => true,
});
// Chaining can be defined as an expression whose next sibling tokens are newline and dot
// Ignoring extra whitespace and comments
let next = tokens.next()?.kind();
if next == SyntaxKind::WHITESPACE {
let mut next_next = tokens.next()?.kind();
while next_next == SyntaxKind::WHITESPACE {
next_next = tokens.next()?.kind();
}
if next_next == T![.] {
let ty = sema.type_of_expr(&expr)?;
if ty.is_unknown() {
return None;
}
if matches!(expr, ast::Expr::PathExpr(_)) {
if let Some(hir::Adt::Struct(st)) = ty.as_adt() {
if st.fields(sema.db).is_empty() {
return None;
}
}
}
acc.push(InlayHint {
range: expr.syntax().text_range(),
kind: InlayKind::ChainingHint,
label: hint_iterator(sema, &famous_defs, config, &ty).unwrap_or_else(|| {
ty.display_truncated(sema.db, config.max_length).to_string().into()
}),
});
}
}
Some(())
}
fn get_param_name_hints(
acc: &mut Vec<InlayHint>,
sema: &Semantics<RootDatabase>,
config: &InlayHintsConfig,
expr: ast::Expr,
) -> Option<()> {
if !config.parameter_hints {
return None;
}
let args = match &expr {
ast::Expr::CallExpr(expr) => expr.arg_list()?.args(),
ast::Expr::MethodCallExpr(expr) => expr.arg_list()?.args(),
_ => return None,
};
let callable = get_callable(sema, &expr)?;
let hints = callable
.params(sema.db)
.into_iter()
.zip(args)
.filter_map(|((param, _ty), arg)| {
let param_name = match param? {
Either::Left(_) => "self".to_string(),
Either::Right(pat) => match pat {
ast::Pat::IdentPat(it) => it.name()?.to_string(),
_ => return None,
},
};
Some((param_name, arg))
})
.filter(|(param_name, arg)| should_show_param_name_hint(sema, &callable, param_name, &arg))
.map(|(param_name, arg)| InlayHint {
range: arg.syntax().text_range(),
kind: InlayKind::ParameterHint,
label: param_name.into(),
});
acc.extend(hints);
Some(())
}
fn get_bind_pat_hints(
acc: &mut Vec<InlayHint>,
sema: &Semantics<RootDatabase>,
config: &InlayHintsConfig,
pat: ast::IdentPat,
) -> Option<()> {
if !config.type_hints {
return None;
}
let krate = sema.scope(pat.syntax()).module().map(|it| it.krate());
let famous_defs = FamousDefs(&sema, krate);
let ty = sema.type_of_pat(&pat.clone().into())?;
if should_not_display_type_hint(sema, &pat, &ty) {
return None;
}
acc.push(InlayHint {
range: pat.syntax().text_range(),
kind: InlayKind::TypeHint,
label: hint_iterator(sema, &famous_defs, config, &ty)
.unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string().into()),
});
Some(())
}
/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator<Item = Ty>`.
fn hint_iterator(
sema: &Semantics<RootDatabase>,
famous_defs: &FamousDefs,
config: &InlayHintsConfig,
ty: &hir::Type,
) -> Option<SmolStr> {
let db = sema.db;
let strukt = std::iter::successors(Some(ty.clone()), |ty| ty.remove_ref())
.last()
.and_then(|strukt| strukt.as_adt())?;
let krate = strukt.krate(db);
if krate != famous_defs.core()? {
return None;
}
let iter_trait = famous_defs.core_iter_Iterator()?;
let iter_mod = famous_defs.core_iter()?;
// assert this struct comes from `core::iter`
iter_mod.visibility_of(db, &strukt.into()).filter(|&vis| vis == hir::Visibility::Public)?;
if ty.impls_trait(db, iter_trait, &[]) {
let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item {
hir::AssocItem::TypeAlias(alias) if alias.name(db) == known::Item => Some(alias),
_ => None,
})?;
if let Some(ty) = ty.normalize_trait_assoc_type(db, iter_trait, &[], assoc_type_item) {
const LABEL_START: &str = "impl Iterator<Item = ";
const LABEL_END: &str = ">";
let ty_display = hint_iterator(sema, famous_defs, config, &ty)
.map(|assoc_type_impl| assoc_type_impl.to_string())
.unwrap_or_else(|| {
ty.display_truncated(
db,
config
.max_length
.map(|len| len.saturating_sub(LABEL_START.len() + LABEL_END.len())),
)
.to_string()
});
return Some(format!("{}{}{}", LABEL_START, ty_display, LABEL_END).into());
}
}
None
}
fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {
if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() {
let pat_text = bind_pat.to_string();
enum_data
.variants(db)
.into_iter()
.map(|variant| variant.name(db).to_string())
.any(|enum_name| enum_name == pat_text)
} else {
false
}
}
fn should_not_display_type_hint(
sema: &Semantics<RootDatabase>,
bind_pat: &ast::IdentPat,
pat_ty: &hir::Type,
) -> bool {
let db = sema.db;
if pat_ty.is_unknown() {
return true;
}
if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() {
if s.fields(db).is_empty() && s.name(db).to_string() == bind_pat.to_string() {
return true;
}
}
for node in bind_pat.syntax().ancestors() {
match_ast! {
match node {
ast::LetStmt(it) => {
return it.ty().is_some()
},
ast::Param(it) => {
return it.ty().is_some()
},
ast::MatchArm(_it) => {
return pat_is_enum_variant(db, bind_pat, pat_ty);
},
ast::IfExpr(it) => {
return it.condition().and_then(|condition| condition.pat()).is_some()
&& pat_is_enum_variant(db, bind_pat, pat_ty);
},
ast::WhileExpr(it) => {
return it.condition().and_then(|condition| condition.pat()).is_some()
&& pat_is_enum_variant(db, bind_pat, pat_ty);
},
ast::ForExpr(it) => {
// We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit).
// Type of expr should be iterable.
return it.in_token().is_none() ||
it.iterable()
.and_then(|iterable_expr|sema.type_of_expr(&iterable_expr))
.map(|iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit())
.unwrap_or(true)
},
_ => (),
}
}
}
false
}
fn should_show_param_name_hint(
sema: &Semantics<RootDatabase>,
callable: &hir::Callable,
param_name: &str,
argument: &ast::Expr,
) -> bool {
let param_name = param_name.trim_start_matches('_');
let fn_name = match callable.kind() {
hir::CallableKind::Function(it) => Some(it.name(sema.db).to_string()),
hir::CallableKind::TupleStruct(_)
| hir::CallableKind::TupleEnumVariant(_)
| hir::CallableKind::Closure => None,
};
if param_name.is_empty()
|| Some(param_name) == fn_name.as_ref().map(|s| s.trim_start_matches('_'))
|| is_argument_similar_to_param_name(sema, argument, param_name)
|| is_param_name_similar_to_fn_name(param_name, callable, fn_name.as_ref())
|| param_name.starts_with("ra_fixture")
{
return false;
}
// avoid displaying hints for common functions like map, filter, etc.
// or other obvious words used in std
!(callable.n_params() == 1 && is_obvious_param(param_name))
}
fn is_argument_similar_to_param_name(
sema: &Semantics<RootDatabase>,
argument: &ast::Expr,
param_name: &str,
) -> bool {
if is_enum_name_similar_to_param_name(sema, argument, param_name) {
return true;
}
match get_string_representation(argument) {
None => false,
Some(argument_string) => {
let num_leading_underscores =
argument_string.bytes().take_while(|&c| c == b'_').count();
// Does the argument name begin with the parameter name? Ignore leading underscores.
let mut arg_bytes = argument_string.bytes().skip(num_leading_underscores);
let starts_with_pattern = param_name.bytes().all(
|expected| matches!(arg_bytes.next(), Some(actual) if expected.eq_ignore_ascii_case(&actual)),
);
if starts_with_pattern {
return true;
}
// Does the argument name end with the parameter name?
let mut arg_bytes = argument_string.bytes().skip(num_leading_underscores);
param_name.bytes().rev().all(
|expected| matches!(arg_bytes.next_back(), Some(actual) if expected.eq_ignore_ascii_case(&actual)),
)
}
}
}
fn is_param_name_similar_to_fn_name(
param_name: &str,
callable: &Callable,
fn_name: Option<&String>,
) -> bool {
// if it's the only parameter, don't show it if:
// - is the same as the function name, or
// - the function ends with '_' + param_name
match (callable.n_params(), fn_name) {
(1, Some(function)) => {
function == param_name
|| (function.len() > param_name.len()
&& function.ends_with(param_name)
&& function[..function.len() - param_name.len()].ends_with('_'))
}
_ => false,
}
}
fn is_enum_name_similar_to_param_name(
sema: &Semantics<RootDatabase>,
argument: &ast::Expr,
param_name: &str,
) -> bool {
match sema.type_of_expr(argument).and_then(|t| t.as_adt()) {
Some(hir::Adt::Enum(e)) => to_lower_snake_case(&e.name(sema.db).to_string()) == param_name,
_ => false,
}
}
fn get_string_representation(expr: &ast::Expr) -> Option<String> {
match expr {
ast::Expr::MethodCallExpr(method_call_expr) => {
let name_ref = method_call_expr.name_ref()?;
match name_ref.text().as_str() {
"clone" => method_call_expr.receiver().map(|rec| rec.to_string()),
name_ref => Some(name_ref.to_owned()),
}
}
ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()),
ast::Expr::PathExpr(path_expr) => Some(path_expr.to_string()),
ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?),
ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?),
_ => None,
}
}
fn is_obvious_param(param_name: &str) -> bool {
let is_obvious_param_name =
matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
param_name.len() == 1 || is_obvious_param_name
}
fn get_callable(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<hir::Callable> {
match expr {
ast::Expr::CallExpr(expr) => sema.type_of_expr(&expr.expr()?)?.as_callable(sema.db),
ast::Expr::MethodCallExpr(expr) => sema.resolve_method_call_as_callable(expr),
_ => None,
}
}
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use ide_db::helpers::FamousDefs;
use test_utils::extract_annotations;
use crate::{fixture, inlay_hints::InlayHintsConfig};
const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
type_hints: true,
parameter_hints: true,
chaining_hints: true,
max_length: None,
};
fn check(ra_fixture: &str) {
check_with_config(TEST_CONFIG, ra_fixture);
}
fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
let ra_fixture =
format!("//- /main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE);
let (analysis, file_id) = fixture::file(&ra_fixture);
let expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap();
let actual =
inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::<Vec<_>>();
assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
}
fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
let ra_fixture =
format!("//- /main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE);
let (analysis, file_id) = fixture::file(&ra_fixture);
let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap();
expect.assert_debug_eq(&inlay_hints)
}
#[test]
fn param_hints_only() {
check_with_config(
InlayHintsConfig {
parameter_hints: true,
type_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn foo(a: i32, b: i32) -> i32 { a + b }
fn main() {
let _x = foo(
4,
//^ a
4,
//^ b
);
}"#,
);
}
#[test]
fn param_name_similar_to_fn_name_still_hints() {
check_with_config(
InlayHintsConfig {
parameter_hints: true,
type_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn max(x: i32, y: i32) -> i32 { x + y }
fn main() {
let _x = max(
4,
//^ x
4,
//^ y
);
}"#,
);
}
#[test]
fn param_name_similar_to_fn_name() {
check_with_config(
InlayHintsConfig {
parameter_hints: true,
type_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore }
fn main() {
let _x = param_with_underscore(
4,
);
}"#,
);
}
#[test]
fn param_name_same_as_fn_name() {
check_with_config(
InlayHintsConfig {
parameter_hints: true,
type_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn foo(foo: i32) -> i32 { foo }
fn main() {
let _x = foo(
4,
);
}"#,
);
}
#[test]
fn never_hide_param_when_multiple_params() {
check_with_config(
InlayHintsConfig {
parameter_hints: true,
type_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn foo(bar: i32, baz: i32) -> i32 { bar + baz }
fn main() {
let _x = foo(
4,
//^ bar
8,
//^ baz
);
}"#,
);
}
#[test]
fn hints_disabled() {
check_with_config(
InlayHintsConfig {
type_hints: false,
parameter_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn foo(a: i32, b: i32) -> i32 { a + b }
fn main() {
let _x = foo(4, 4);
}"#,
);
}
#[test]
fn type_hints_only() {
check_with_config(
InlayHintsConfig {
type_hints: true,
parameter_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn foo(a: i32, b: i32) -> i32 { a + b }
fn main() {
let _x = foo(4, 4);
//^^ i32
}"#,
);
}
#[test]
fn default_generic_types_should_not_be_displayed() {
check(
r#"
struct Test<K, T = u8> { k: K, t: T }
fn main() {
let zz = Test { t: 23u8, k: 33 };
//^^ Test<i32>
let zz_ref = &zz;
//^^^^^^ &Test<i32>
let test = || zz;
//^^^^ || -> Test<i32>
}"#,
);
}
#[test]
fn let_statement() {
check(
r#"
#[derive(PartialEq)]
enum Option<T> { None, Some(T) }
#[derive(PartialEq)]
struct Test { a: Option<u32>, b: u8 }
fn main() {
struct InnerStruct {}
let test = 54;
//^^^^ i32
let test: i32 = 33;
let mut test = 33;
//^^^^^^^^ i32
let _ = 22;
let test = "test";
//^^^^ &str
let test = InnerStruct {};
//^^^^ InnerStruct
let test = unresolved();
let test = (42, 'a');
//^^^^ (i32, char)
let (a, (b, (c,)) = (2, (3, (9.2,));
//^ i32 ^ i32 ^ f64
let &x = &92;
//^ i32
}"#,
);
}
#[test]
fn closure_parameters() {
check(
r#"
fn main() {
let mut start = 0;
//^^^^^^^^^ i32
(0..2).for_each(|increment| { start += increment; });
//^^^^^^^^^ i32
let multiply =
//^^^^^^^^ |…| -> i32
| a, b| a * b
//^ i32 ^ i32
;
let _: i32 = multiply(1, 2);
let multiply_ref = &multiply;
//^^^^^^^^^^^^ &|…| -> i32
let return_42 = || 42;
//^^^^^^^^^ || -> i32
}"#,
);
}
#[test]
fn if_expr() {
check(
r#"
enum Option<T> { None, Some(T) }
use Option::*;
struct Test { a: Option<u32>, b: u8 }
fn main() {
let test = Some(Test { a: Some(3), b: 1 });
//^^^^ Option<Test>
if let None = &test {};
if let test = &test {};
//^^^^ &Option<Test>
if let Some(test) = &test {};
//^^^^ &Test
if let Some(Test { a, b }) = &test {};
//^ &Option<u32> ^ &u8
if let Some(Test { a: x, b: y }) = &test {};
//^ &Option<u32> ^ &u8
if let Some(Test { a: Some(x), b: y }) = &test {};
//^ &u32 ^ &u8
if let Some(Test { a: None, b: y }) = &test {};
//^ &u8
if let Some(Test { b: y, .. }) = &test {};
//^ &u8
if test == None {}
}"#,
);
}
#[test]
fn while_expr() {
check(
r#"
enum Option<T> { None, Some(T) }
use Option::*;
struct Test { a: Option<u32>, b: u8 }
fn main() {
let test = Some(Test { a: Some(3), b: 1 });
//^^^^ Option<Test>
while let Some(Test { a: Some(x), b: y }) = &test {};
//^ &u32 ^ &u8
}"#,
);
}
#[test]
fn match_arm_list() {
check(
r#"
enum Option<T> { None, Some(T) }
use Option::*;
struct Test { a: Option<u32>, b: u8 }
fn main() {
match Some(Test { a: Some(3), b: 1 }) {
None => (),
test => (),
//^^^^ Option<Test>
Some(Test { a: Some(x), b: y }) => (),
//^ u32 ^ u8
_ => {}
}
}"#,
);
}
#[test]
fn hint_truncation() {
check_with_config(
InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
r#"
struct Smol<T>(T);
struct VeryLongOuterName<T>(T);
fn main() {
let a = Smol(0u32);
//^ Smol<u32>
let b = VeryLongOuterName(0usize);
//^ VeryLongOuterName<…>
let c = Smol(Smol(0u32))
//^ Smol<Smol<…>>
}"#,
);
}
#[test]
fn function_call_parameter_hint() {
check(
r#"
enum Option<T> { None, Some(T) }
use Option::*;
struct FileId {}
struct SmolStr {}
struct TextRange {}
struct SyntaxKind {}
struct NavigationTarget {}
struct Test {}
impl Test {
fn method(&self, mut param: i32) -> i32 { param * 2 }
fn from_syntax(
file_id: FileId,
name: SmolStr,
focus_range: Option<TextRange>,
full_range: TextRange,
kind: SyntaxKind,
docs: Option<String>,
) -> NavigationTarget {
NavigationTarget {}
}
}
fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 {
foo + bar
}
fn main() {
let not_literal = 1;
//^^^^^^^^^^^ i32
let _: i32 = test_func(1, 2, "hello", 3, not_literal);
//^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last
let t: Test = Test {};
t.method(123);
//^^^ param
Test::method(&t, 3456);
//^^ self ^^^^ param
Test::from_syntax(
FileId {},
//^^^^^^^^^ file_id
"impl".into(),
//^^^^^^^^^^^^^ name
None,
//^^^^ focus_range
TextRange {},
//^^^^^^^^^^^^ full_range
SyntaxKind {},
//^^^^^^^^^^^^^ kind
None,
//^^^^ docs
);
}"#,
);
}
#[test]
fn omitted_parameters_hints_heuristics() {
check_with_config(
InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
r#"
fn map(f: i32) {}
fn filter(predicate: i32) {}
struct TestVarContainer {
test_var: i32,
}
impl TestVarContainer {
fn test_var(&self) -> i32 {
self.test_var
}
}
struct Test {}
impl Test {
fn map(self, f: i32) -> Self {
self
}
fn filter(self, predicate: i32) -> Self {
self
}
fn field(self, value: i32) -> Self {
self
}
fn no_hints_expected(&self, _: i32, test_var: i32) {}
fn frob(&self, frob: bool) {}
}
struct Param {}
fn different_order(param: &Param) {}
fn different_order_mut(param: &mut Param) {}
fn has_underscore(_param: bool) {}
fn enum_matches_param_name(completion_kind: CompletionKind) {}
fn param_destructuring_omitted_1((a, b): (u32, u32)) {}
fn param_destructuring_omitted_2(TestVarContainer { test_var: _ }: TestVarContainer) {}
fn twiddle(twiddle: bool) {}
fn doo(_doo: bool) {}
enum CompletionKind {
Keyword,
}
fn main() {
let container: TestVarContainer = TestVarContainer { test_var: 42 };
let test: Test = Test {};
map(22);
filter(33);
let test_processed: Test = test.map(1).filter(2).field(3);
let test_var: i32 = 55;
test_processed.no_hints_expected(22, test_var);
test_processed.no_hints_expected(33, container.test_var);
test_processed.no_hints_expected(44, container.test_var());
test_processed.frob(false);
twiddle(true);
doo(true);
const TWIDDLE_UPPERCASE: bool = true;
twiddle(TWIDDLE_UPPERCASE);
let mut param_begin: Param = Param {};
different_order(¶m_begin);
different_order(&mut param_begin);
let param: bool = true;
has_underscore(param);
enum_matches_param_name(CompletionKind::Keyword);
let a: f64 = 7.0;
let b: f64 = 4.0;
let _: f64 = a.div_euclid(b);
let _: f64 = a.abs_sub(b);
let range: (u32, u32) = (3, 5);
param_destructuring_omitted_1(range);
param_destructuring_omitted_2(container);
}"#,
);
}
#[test]
fn unit_structs_have_no_type_hints() {
check_with_config(
InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
r#"
enum Result<T, E> { Ok(T), Err(E) }
use Result::*;
struct SyntheticSyntax;
fn main() {
match Ok(()) {
Ok(_) => (),
Err(SyntheticSyntax) => (),
}
}"#,
);
}
#[test]
fn chaining_hints_ignore_comments() {
check_expect(
InlayHintsConfig {
parameter_hints: false,
type_hints: false,
chaining_hints: true,
max_length: None,
},
r#"
struct A(B);
impl A { fn into_b(self) -> B { self.0 } }
struct B(C);
impl B { fn into_c(self) -> C { self.0 } }
struct C;
fn main() {
let c = A(B(C))
.into_b() // This is a comment
// This is another comment
.into_c();
}
"#,
expect![[r#"
[
InlayHint {
range: 148..173,
kind: ChainingHint,
label: "B",
},
InlayHint {
range: 148..155,
kind: ChainingHint,
label: "A",
},
]
"#]],
);
}
#[test]
fn chaining_hints_without_newlines() {
check_with_config(
InlayHintsConfig {
parameter_hints: false,
type_hints: false,
chaining_hints: true,
max_length: None,
},
r#"
struct A(B);
impl A { fn into_b(self) -> B { self.0 } }
struct B(C);
impl B { fn into_c(self) -> C { self.0 } }
struct C;
fn main() {
let c = A(B(C)).into_b().into_c();
}"#,
);
}
#[test]
fn struct_access_chaining_hints() {
check_expect(
InlayHintsConfig {
parameter_hints: false,
type_hints: false,
chaining_hints: true,
max_length: None,
},
r#"
struct A { pub b: B }
struct B { pub c: C }
struct C(pub bool);
struct D;
impl D {
fn foo(&self) -> i32 { 42 }
}
fn main() {
let x = A { b: B { c: C(true) } }
.b
.c
.0;
let x = D
.foo();
}"#,
expect![[r#"
[
InlayHint {
range: 144..191,
kind: ChainingHint,
label: "C",
},
InlayHint {
range: 144..180,
kind: ChainingHint,
label: "B",
},
]
"#]],
);
}
#[test]
fn generic_chaining_hints() {
check_expect(
InlayHintsConfig {
parameter_hints: false,
type_hints: false,
chaining_hints: true,
max_length: None,
},
r#"
struct A<T>(T);
struct B<T>(T);
struct C<T>(T);
struct X<T,R>(T, R);
impl<T> A<T> {
fn new(t: T) -> Self { A(t) }
fn into_b(self) -> B<T> { B(self.0) }
}
impl<T> B<T> {
fn into_c(self) -> C<T> { C(self.0) }
}
fn main() {
let c = A::new(X(42, true))
.into_b()
.into_c();
}
"#,
expect![[r#"
[
InlayHint {
range: 247..284,
kind: ChainingHint,
label: "B<X<i32, bool>>",
},
InlayHint {
range: 247..266,
kind: ChainingHint,
label: "A<X<i32, bool>>",
},
]
"#]],
);
}
#[test]
fn incomplete_for_no_hint() {
check(
r#"
fn main() {
let data = &[1i32, 2, 3];
//^^^^ &[i32; _]
for i
}"#,
);
check(
r#"
pub struct Vec<T> {}
impl<T> Vec<T> {
pub fn new() -> Self { Vec {} }
pub fn push(&mut self, t: T) {}
}
impl<T> IntoIterator for Vec<T> {
type Item=T;
}
fn main() {
let mut data = Vec::new();
//^^^^^^^^ Vec<&str>
data.push("foo");
for i in
println!("Unit expr");
}
"#,
);
}
#[test]
fn complete_for_hint() {
check(
r#"
pub struct Vec<T> {}
impl<T> Vec<T> {
pub fn new() -> Self { Vec {} }
pub fn push(&mut self, t: T) {}
}
impl<T> IntoIterator for Vec<T> {
type Item=T;
}
fn main() {
let mut data = Vec::new();
//^^^^^^^^ Vec<&str>
data.push("foo");
for i in data {
//^ &str
let z = i;
//^ &str
}
}
"#,
);
}
#[test]
fn multi_dyn_trait_bounds() {
check_with_config(
InlayHintsConfig {
type_hints: true,
parameter_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
pub struct Vec<T> {}
impl<T> Vec<T> {
pub fn new() -> Self { Vec {} }
}
pub struct Box<T> {}
trait Display {}
trait Sync {}
fn main() {
let _v = Vec::<Box<&(dyn Display + Sync)>>::new();
//^^ Vec<Box<&(dyn Display + Sync)>>
let _v = Vec::<Box<*const (dyn Display + Sync)>>::new();
//^^ Vec<Box<*const (dyn Display + Sync)>>
let _v = Vec::<Box<dyn Display + Sync>>::new();
//^^ Vec<Box<dyn Display + Sync>>
}
"#,
);
}
#[test]
fn shorten_iterator_hints() {
check_with_config(
InlayHintsConfig {
parameter_hints: false,
type_hints: true,
chaining_hints: false,
max_length: None,
},
r#"
use core::iter;
struct MyIter;
impl Iterator for MyIter {
type Item = ();
fn next(&mut self) -> Option<Self::Item> {
None
}
}
fn main() {
let _x = MyIter;
//^^ MyIter
let _x = iter::repeat(0);
//^^ impl Iterator<Item = i32>
fn generic<T: Clone>(t: T) {
let _x = iter::repeat(t);
//^^ impl Iterator<Item = T>
let _chained = iter::repeat(t).take(10);
//^^^^^^^^ impl Iterator<Item = T>
}
}
"#,
);
}
#[test]
fn shorten_iterator_chaining_hints() {
check_expect(
InlayHintsConfig {
parameter_hints: false,
type_hints: false,
chaining_hints: true,
max_length: None,
},
r#"
use core::iter;
struct MyIter;
impl Iterator for MyIter {
type Item = ();
fn next(&mut self) -> Option<Self::Item> {
None
}
}
fn main() {
let _x = MyIter.by_ref()
.take(5)
.by_ref()
.take(5)
.by_ref();
}
"#,
expect![[r#"
[
InlayHint {
range: 175..242,
kind: ChainingHint,
label: "impl Iterator<Item = ()>",
},
InlayHint {
range: 175..225,
kind: ChainingHint,
label: "impl Iterator<Item = ()>",
},
InlayHint {
range: 175..207,
kind: ChainingHint,
label: "impl Iterator<Item = ()>",
},
InlayHint {
range: 175..190,
kind: ChainingHint,
label: "&mut MyIter",
},
]
"#]],
);
}
#[test]
fn shorten_iterators_in_associated_params() {
check_with_config(
InlayHintsConfig {
parameter_hints: false,
type_hints: true,
chaining_hints: false,
max_length: None,
},
r#"
use core::iter;
pub struct SomeIter<T> {}
impl<T> SomeIter<T> {
pub fn new() -> Self { SomeIter {} }
pub fn push(&mut self, t: T) {}
}
impl<T> Iterator for SomeIter<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
None
}
}
fn main() {
let mut some_iter = SomeIter::new();
//^^^^^^^^^^^^^ SomeIter<Take<Repeat<i32>>>
some_iter.push(iter::repeat(2).take(2));
let iter_of_iters = some_iter.take(2);
//^^^^^^^^^^^^^ impl Iterator<Item = impl Iterator<Item = i32>>
}
"#,
);
}
#[test]
fn hide_param_hints_for_clones() {
check_with_config(
InlayHintsConfig {
parameter_hints: true,
type_hints: false,
chaining_hints: false,
max_length: None,
},
r#"
fn foo(bar: i32, baz: String, qux: f32) {}
fn main() {
let bar = 3;
let baz = &"baz";
let fez = 1.0;
foo(bar.clone(), baz.clone(), fez.clone());
//^^^^^^^^^^^ qux
}
"#,
);
}
#[test]
fn infer_call_method_return_associated_types_with_generic() {
check(
r#"
pub trait Default {
fn default() -> Self;
}
pub trait Foo {
type Bar: Default;
}
pub fn quux<T: Foo>() -> T::Bar {
let y = Default::default();
//^ <T as Foo>::Bar
y
}
"#,
);
}
#[test]
fn self_param_hints() {
check(
r#"
struct Foo;
impl Foo {
fn foo(self: Self) {}
fn bar(self: &Self) {}
}
fn main() {
Foo::foo(Foo);
//^^^ self
Foo::bar(&Foo);
//^^^^ self
}
"#,
)
}
#[test]
fn fn_hints() {
check(
r#"
trait Sized {}
fn foo() -> impl Fn() { loop {} }
fn foo1() -> impl Fn(f64) { loop {} }
fn foo2() -> impl Fn(f64, f64) { loop {} }
fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
fn main() {
let foo = foo();
// ^^^ impl Fn()
let foo = foo1();
// ^^^ impl Fn(f64)
let foo = foo2();
// ^^^ impl Fn(f64, f64)
let foo = foo3();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo4();
// ^^^ &dyn Fn(f64, f64) -> u32
let foo = foo5();
// ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
let foo = foo6();
// ^^^ impl Fn(f64, f64) -> u32 + Sized
let foo = foo7();
// ^^^ *const (impl Fn(f64, f64) -> u32 + Sized)
}
"#,
)
}
#[test]
fn param_name_hints_show_for_literals() {
check(
r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] }
fn main() {
test(
0x0fab272b,
//^^^^^^^^^^ a
0x0fab272b
//^^^^^^^^^^ b
);
}"#,
)
}
} | // rust-analyzer shows hints for
//
// * types of local variables
// * names of function arguments |
RelayModernSelector.js | /**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
*
* @format
*/
// flowlint ambiguous-object-type:error
'use strict';
var _require = require('./RelayConcreteVariables'),
getFragmentVariables = _require.getFragmentVariables;
var _require2 = require('./RelayStoreUtils'),
CLIENT_EDGE_TRAVERSAL_PATH = _require2.CLIENT_EDGE_TRAVERSAL_PATH,
FRAGMENT_OWNER_KEY = _require2.FRAGMENT_OWNER_KEY,
FRAGMENTS_KEY = _require2.FRAGMENTS_KEY,
ID_KEY = _require2.ID_KEY,
IS_WITHIN_UNMATCHED_TYPE_REFINEMENT = _require2.IS_WITHIN_UNMATCHED_TYPE_REFINEMENT;
var areEqual = require("fbjs/lib/areEqual");
var invariant = require('invariant');
var warning = require("fbjs/lib/warning");
/**
* @public
*
* Given the result `item` from a parent that fetched `fragment`, creates a
* selector that can be used to read the results of that fragment for that item.
*
* Example:
*
* Given two fragments as follows:
*
* ```
* fragment Parent on User {
* id
* ...Child
* }
* fragment Child on User {
* name
* }
* ```
*
* And given some object `parent` that is the results of `Parent` for id "4",
* the results of `Child` can be accessed by first getting a selector and then
* using that selector to `lookup()` the results against the environment:
*
* ```
* const childSelector = getSingularSelector(queryVariables, Child, parent);
* const childData = environment.lookup(childSelector).data;
* ```
*/
function getSingularSelector(fragment, item) {
!(typeof item === 'object' && item !== null && !Array.isArray(item)) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernSelector: Expected value for fragment `%s` to be an object, got ' + '`%s`.', fragment.name, JSON.stringify(item)) : invariant(false) : void 0;
var dataID = item[ID_KEY];
var fragments = item[FRAGMENTS_KEY];
var mixedOwner = item[FRAGMENT_OWNER_KEY];
var isWithinUnmatchedTypeRefinement = item[IS_WITHIN_UNMATCHED_TYPE_REFINEMENT] === true;
var mixedClientEdgeTraversalPath = item[CLIENT_EDGE_TRAVERSAL_PATH];
if (typeof dataID === 'string' && typeof fragments === 'object' && fragments !== null && typeof fragments[fragment.name] === 'object' && fragments[fragment.name] !== null && typeof mixedOwner === 'object' && mixedOwner !== null && (mixedClientEdgeTraversalPath == null || Array.isArray(mixedClientEdgeTraversalPath))) {
var owner = mixedOwner;
var clientEdgeTraversalPath = mixedClientEdgeTraversalPath;
var argumentVariables = fragments[fragment.name];
var fragmentVariables = getFragmentVariables(fragment, owner.variables, argumentVariables);
return createReaderSelector(fragment, dataID, fragmentVariables, owner, isWithinUnmatchedTypeRefinement, clientEdgeTraversalPath);
}
if (process.env.NODE_ENV !== "production") {
var stringifiedItem = JSON.stringify(item);
if (stringifiedItem.length > 499) {
stringifiedItem = stringifiedItem.substr(0, 498) + "\u2026";
}
process.env.NODE_ENV !== "production" ? warning(false, 'RelayModernSelector: Expected object to contain data for fragment `%s`, got ' + '`%s`. Make sure that the parent operation/fragment included fragment ' + '`...%s` without `@relay(mask: false)`.', fragment.name, stringifiedItem, fragment.name) : void 0;
}
return null;
}
/**
* @public
*
* Given the result `items` from a parent that fetched `fragment`, creates a
* selector that can be used to read the results of that fragment on those
* items. This is similar to `getSingularSelector` but for "plural" fragments that
* expect an array of results and therefore return an array of selectors.
*/
function getPluralSelector(fragment, items) {
var selectors = null;
items.forEach(function (item, ii) {
var selector = item != null ? getSingularSelector(fragment, item) : null;
if (selector != null) {
selectors = selectors || [];
selectors.push(selector);
}
});
if (selectors == null) {
return null;
} else {
return {
kind: 'PluralReaderSelector',
selectors: selectors
};
}
}
function getSelector(fragment, item) {
if (item == null) {
return item;
} else if (fragment.metadata && fragment.metadata.plural === true) {
!Array.isArray(item) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernSelector: Expected value for fragment `%s` to be an array, got `%s`. ' + 'Remove `@relay(plural: true)` from fragment `%s` to allow the prop to be an object.', fragment.name, JSON.stringify(item), fragment.name) : invariant(false) : void 0;
return getPluralSelector(fragment, item);
} else {
!!Array.isArray(item) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernSelector: Expected value for fragment `%s` to be an object, got `%s`. ' + 'Add `@relay(plural: true)` to fragment `%s` to allow the prop to be an array of items.', fragment.name, JSON.stringify(item), fragment.name) : invariant(false) : void 0;
return getSingularSelector(fragment, item);
}
}
/**
* @public
*
* Given a mapping of keys -> results and a mapping of keys -> fragments,
* extracts the selectors for those fragments from the results.
*
* The canonical use-case for this function is ReactRelayFragmentContainer, which
* uses this function to convert (props, fragments) into selectors so that it
* can read the results to pass to the inner component.
*/
function getSelectorsFromObject(fragments, object) {
var selectors = {};
for (var _key in fragments) {
if (fragments.hasOwnProperty(_key)) {
var fragment = fragments[_key];
var item = object[_key];
selectors[_key] = getSelector(fragment, item);
}
}
return selectors;
}
/**
* @public
*
* Given a mapping of keys -> results and a mapping of keys -> fragments,
* extracts a mapping of keys -> id(s) of the results.
*
* Similar to `getSelectorsFromObject()`, this function can be useful in
* determining the "identity" of the props passed to a component.
*/
function getDataIDsFromObject(fragments, object) {
var ids = {};
for (var _key2 in fragments) {
if (fragments.hasOwnProperty(_key2)) {
var fragment = fragments[_key2];
var item = object[_key2];
ids[_key2] = getDataIDsFromFragment(fragment, item);
}
}
return ids;
}
function getDataIDsFromFragment(fragment, item) {
if (item == null) {
return item;
} else if (fragment.metadata && fragment.metadata.plural === true) {
!Array.isArray(item) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernSelector: Expected value for fragment `%s` to be an array, got `%s`. ' + 'Remove `@relay(plural: true)` from fragment `%s` to allow the prop to be an object.', fragment.name, JSON.stringify(item), fragment.name) : invariant(false) : void 0;
return getDataIDs(fragment, item);
} else {
!!Array.isArray(item) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernFragmentSpecResolver: Expected value for fragment `%s` to be an object, got `%s`. ' + 'Add `@relay(plural: true)` to fragment `%s` to allow the prop to be an array of items.', fragment.name, JSON.stringify(item), fragment.name) : invariant(false) : void 0;
return getDataID(fragment, item);
}
}
/**
* @internal
*/
function getDataIDs(fragment, items) {
var ids = null;
items.forEach(function (item) {
var id = item != null ? getDataID(fragment, item) : null;
if (id != null) {
ids = ids || [];
ids.push(id);
}
});
return ids;
}
/**
* @internal
*/
function getDataID(fragment, item) {
!(typeof item === 'object' && item !== null && !Array.isArray(item)) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernSelector: Expected value for fragment `%s` to be an object, got ' + '`%s`.', fragment.name, JSON.stringify(item)) : invariant(false) : void 0;
var dataID = item[ID_KEY];
if (typeof dataID === 'string') {
return dataID;
}
process.env.NODE_ENV !== "production" ? warning(false, 'RelayModernSelector: Expected object to contain data for fragment `%s`, got ' + '`%s`. Make sure that the parent operation/fragment included fragment ' + '`...%s` without `@relay(mask: false)`, or `null` is passed as the fragment ' + "reference for `%s` if it's conditonally included and the condition isn't met.", fragment.name, JSON.stringify(item), fragment.name, fragment.name) : void 0;
return null;
}
/**
* @public
*
* Given a mapping of keys -> results and a mapping of keys -> fragments,
* extracts the merged variables that would be in scope for those
* fragments/results.
*
* This can be useful in determing what varaibles were used to fetch the data
* for a Relay container, for example.
*/
function getVariablesFromObject(fragments, object) {
var variables = {};
for (var _key3 in fragments) {
if (fragments.hasOwnProperty(_key3)) {
var fragment = fragments[_key3];
var item = object[_key3];
var itemVariables = getVariablesFromFragment(fragment, item);
Object.assign(variables, itemVariables);
}
}
return variables;
}
function getVariablesFromFragment(fragment, item) {
var _fragment$metadata;
if (item == null) {
return {};
} else if (((_fragment$metadata = fragment.metadata) === null || _fragment$metadata === void 0 ? void 0 : _fragment$metadata.plural) === true) {
!Array.isArray(item) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernSelector: Expected value for fragment `%s` to be an array, got `%s`. ' + 'Remove `@relay(plural: true)` from fragment `%s` to allow the prop to be an object.', fragment.name, JSON.stringify(item), fragment.name) : invariant(false) : void 0;
return getVariablesFromPluralFragment(fragment, item);
} else {
!!Array.isArray(item) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernFragmentSpecResolver: Expected value for fragment `%s` to be an object, got `%s`. ' + 'Add `@relay(plural: true)` to fragment `%s` to allow the prop to be an array of items.', fragment.name, JSON.stringify(item), fragment.name) : invariant(false) : void 0;
return getVariablesFromSingularFragment(fragment, item) || {};
}
}
function getVariablesFromSingularFragment(fragment, item) {
var selector = getSingularSelector(fragment, item); | return null;
}
return selector.variables;
}
function getVariablesFromPluralFragment(fragment, items) {
var variables = {};
items.forEach(function (value, ii) {
if (value != null) {
var itemVariables = getVariablesFromSingularFragment(fragment, value);
if (itemVariables != null) {
Object.assign(variables, itemVariables);
}
}
});
return variables;
}
/**
* @public
*
* Determine if two selectors are equal (represent the same selection). Note
* that this function returns `false` when the two queries/fragments are
* different objects, even if they select the same fields.
*/
function areEqualSelectors(thisSelector, thatSelector) {
return thisSelector.owner === thatSelector.owner && thisSelector.dataID === thatSelector.dataID && thisSelector.node === thatSelector.node && areEqual(thisSelector.variables, thatSelector.variables);
}
function createReaderSelector(fragment, dataID, variables, request) {
var isWithinUnmatchedTypeRefinement = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : false;
var clientEdgeTraversalPath = arguments.length > 5 ? arguments[5] : undefined;
return {
kind: 'SingularReaderSelector',
dataID: dataID,
isWithinUnmatchedTypeRefinement: isWithinUnmatchedTypeRefinement,
clientEdgeTraversalPath: clientEdgeTraversalPath !== null && clientEdgeTraversalPath !== void 0 ? clientEdgeTraversalPath : null,
node: fragment,
variables: variables,
owner: request
};
}
function createNormalizationSelector(node, dataID, variables) {
return {
dataID: dataID,
node: node,
variables: variables
};
}
module.exports = {
areEqualSelectors: areEqualSelectors,
createReaderSelector: createReaderSelector,
createNormalizationSelector: createNormalizationSelector,
getDataIDsFromFragment: getDataIDsFromFragment,
getDataIDsFromObject: getDataIDsFromObject,
getSingularSelector: getSingularSelector,
getPluralSelector: getPluralSelector,
getSelector: getSelector,
getSelectorsFromObject: getSelectorsFromObject,
getVariablesFromSingularFragment: getVariablesFromSingularFragment,
getVariablesFromPluralFragment: getVariablesFromPluralFragment,
getVariablesFromFragment: getVariablesFromFragment,
getVariablesFromObject: getVariablesFromObject
}; |
if (!selector) { |
init.go | package horizon
import (
"github.com/leevlad/go/support/log"
)
// InitFn is a function that contributes to the initialization of an App struct
type InitFn func(*App)
type initializer struct {
Name string
Fn InitFn
Deps []string
}
type initializerSet []initializer
var appInit initializerSet
// Add adds a new initializer into the chain
func (is *initializerSet) Add(name string, fn InitFn, deps ...string) {
*is = append(*is, initializer{
Name: name,
Fn: fn,
Deps: deps,
})
}
// Run initializes the provided application, but running every Initializer
func (is *initializerSet) Run(app *App) {
init := *is
alreadyRun := make(map[string]bool)
for {
ranInitializer := false
for _, i := range init {
runnable := true
// if we've already been run, skip
if _, ok := alreadyRun[i.Name]; ok {
runnable = false
}
// if any of our dependencies haven't been run, skip
for _, d := range i.Deps {
if _, ok := alreadyRun[d]; !ok {
runnable = false
break
}
}
if !runnable {
continue
}
log.WithField("init_name", i.Name).Debug("running initializer")
i.Fn(app)
alreadyRun[i.Name] = true
ranInitializer = true
}
// If, after a full loop through the initializers we ran nothing
// we are done
if !ranInitializer {
break
}
}
// if we didn't get to run all initializers, we have a cycle
if len(alreadyRun) != len(init) |
}
| {
log.Panic("initializer cycle detected")
} |
test_gamma_functions.py | from sympy import Symbol, gamma, oo, nan, zoo, factorial, sqrt, Rational, log,\
polygamma, EulerGamma, pi, uppergamma, S, expand_func, loggamma, sin, cos, \
O, cancel
x = Symbol('x')
y = Symbol('y')
n = Symbol('n', integer=True)
def test_gamma():
assert gamma(nan) == nan
assert gamma(oo) == oo
assert gamma(-100) == zoo
assert gamma(0) == zoo
assert gamma(1) == 1
assert gamma(2) == 1
assert gamma(3) == 2
assert gamma(102) == factorial(101)
assert gamma(Rational(1,2)) == sqrt(pi)
assert gamma(Rational(3, 2)) == Rational(1, 2)*sqrt(pi)
assert gamma(Rational(5, 2)) == Rational(3, 4)*sqrt(pi)
assert gamma(Rational(7, 2)) == Rational(15, 8)*sqrt(pi)
assert gamma(Rational(-1, 2)) == -2*sqrt(pi)
assert gamma(Rational(-3, 2)) == Rational(4, 3)*sqrt(pi)
assert gamma(Rational(-5, 2)) == -Rational(8, 15)*sqrt(pi)
assert gamma(Rational(-15, 2)) == Rational(256, 2027025)*sqrt(pi)
assert gamma(x).diff(x) == gamma(x)*polygamma(0, x)
assert gamma(x - 1).expand(func=True) == gamma(x)/(x-1)
assert gamma(x + 2).expand(func=True, mul=False) == x*(x+1)*gamma(x)
assert expand_func(gamma(x + Rational(3, 2))) == \
(x + Rational(1, 2))*gamma(x + Rational(1, 2))
assert expand_func(gamma(x - Rational(1, 2))) == \
gamma(Rational(1, 2) + x)/(x - Rational(1, 2))
def test_gamma_series():
assert gamma(x + 1).series(x, 0, 3) == \
1 - x*EulerGamma + x**2*EulerGamma**2/2 + pi**2*x**2/12 + O(x**3)
def test_lowergamma():
pass
def test_uppergamma():
assert uppergamma(4, 0) == 6
def test_polygamma():
assert polygamma(n, nan) == nan
assert polygamma(0, oo) == oo
assert polygamma(1, oo) == 0
assert polygamma(5, oo) == 0
assert polygamma(0, -9) == zoo
assert polygamma(0, -9) == zoo
assert polygamma(0, -1) == zoo
assert polygamma(0, 0) == zoo
assert polygamma(0, 1) == -EulerGamma
assert polygamma(0, 7) == Rational(49, 20) - EulerGamma
assert polygamma(1, 1) == pi**2/6
assert polygamma(1, 2) == pi**2/6 - 1
assert polygamma(1, 3) == pi**2/6 - Rational(5, 4)
assert polygamma(3, 1) == pi**4 / 15
assert polygamma(3, 5) == 6*(Rational(-22369,20736) + pi**4/90)
assert polygamma(5, 1) == 8 * pi**6 / 63
assert polygamma(3, 7*x).diff(x) == 7*polygamma(4, 7*x)
def test_polygamma_expand_func():
assert polygamma(0, x).expand(func=True) == polygamma(0, x)
assert polygamma(0, 2*x).expand(func=True) == \
polygamma(0, x)/2 + polygamma(0, Rational(1, 2) + x)/2 + log(2)
assert polygamma(1, 2*x).expand(func=True) == \
polygamma(1, x)/4 + polygamma(1, Rational(1, 2) + x)/4
assert polygamma(2, x).expand(func=True) == \
polygamma(2, x)
assert polygamma(0, -1 + x).expand(func=True) == \
polygamma(0, x) - 1/(x - 1)
assert polygamma(0, 1 + x).expand(func=True) == \
1/x + polygamma(0, x )
assert polygamma(0, 2 + x).expand(func=True) == \
1/x + 1/(1 + x) + polygamma(0, x)
assert polygamma(0, 3 + x).expand(func=True) == \
polygamma(0, x) + 1/x + 1/(1 + x) + 1/(2 + x)
assert polygamma(0, 4 + x).expand(func=True) == \
polygamma(0, x) + 1/x + 1/(1 + x) + 1/(2 + x) + 1/(3 + x)
assert polygamma(1, 1 + x).expand(func=True) == \
polygamma(1, x) - 1/x**2
assert polygamma(1, 2 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2
assert polygamma(1, 3 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2 - 1/(2 + x)**2
assert polygamma(1, 4 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2 - \
1/(2 + x)**2 - 1/(3 + x)**2
assert polygamma(0, x + y).expand(func=True) == \
polygamma(0, x + y)
assert polygamma(1, x + y).expand(func=True) == \
polygamma(1, x + y)
assert polygamma(1, 3 + 4*x + y).expand(func=True, multinomial=False) == \
polygamma(1, y + 4*x) - 1/(y + 4*x)**2 - \
1/(1 + y + 4*x)**2 - 1/(2 + y + 4*x)**2
assert polygamma(3, 3 + 4*x + y).expand(func=True, multinomial=False) == \
polygamma(3, y + 4*x) - 6/(y + 4*x)**4 - \
6/(1 + y + 4*x)**4 - 6/(2 + y + 4*x)**4
assert polygamma(3, 4*x + y + 1).expand(func=True, multinomial=False) == \
polygamma(3, y + 4*x) - 6/(y + 4*x)**4
e = polygamma(3, 4*x + y + S(3)/2)
assert e.expand(func=True) == e
e = polygamma(3, x + y + S(3)/4)
assert e.expand(func = True, basic = False) == e
def test_loggamma():
s1 = loggamma(1/(x+sin(x))+cos(x)).nseries(x,n=4)
s2 = (-log(2*x)-1)/(2*x) - log(x/pi)/2 + (4-log(2*x))*x/24 + O(x**2)
assert cancel(s1 - s2).removeO() == 0
s1 = loggamma(1/x).series(x)
s2 = (1/x-S(1)/2)*log(1/x) - 1/x + log(2*pi)/2 + \ | def tN(N, M):
assert loggamma(1/x)._eval_nseries(x,n=N,logx=None).getn() == M
tN(0, 0)
tN(1, 1)
tN(2, 3)
tN(3, 3)
tN(4, 5)
tN(5, 5)
def test_polygamma_expansion():
# A. & S., pa. 259 and 260
assert polygamma(0, 1/x).nseries(x, n=3) \
== -log(x) - x/2 - x**2/12 + O(x**4)
assert polygamma(1, 1/x).series(x, n=5) \
== x + x**2/2 + x**3/6 + O(x**5)
assert polygamma(3, 1/x).nseries(x, n=8) \
== 2*x**3 + 3*x**4 + 2*x**5 - x**7 + 4*x**9/3 + O(x**11) | x/12 - x**3/360 + x**5/1260 + O(x**7)
assert cancel(s1 - s2).removeO() == 0
|
alert.widget.component.ts | import {
Component, Input
} from 'angular2/core';
import {Widget} from "./widget.model";
import {WidgetComponent} from "./widget.component";
@Component({
selector: 'alert-widget-component',
template: `
<div class="carousel-item-src">
<div class="pictogram">
<img [src]="widget.icon_url"/>
</div>
<div class="main-content">
<div class="header">
<div class="date-of-source"> {{widget.date | date:'shortDate'}}</div>
<div class="news-source"> {{widget.source}}</div>
</div>
<div class="body-of-item">
<div class="title-carousel"> {{widget.title}}</div>
<div class="desc-carousel"> {{widget.desc}}</div>
</div>
</div>
</div>
`
})
export class | extends WidgetComponent {
}
| AlertWidgetComponent |
terminalparamsubpkloader_gen.go | // Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package model
import (
"sync"
"time"
)
// TerminalParamSubPkLoaderConfig captures the config to create a new TerminalParamSubPkLoader
type TerminalParamSubPkLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []string) ([]*TerminalParamSub, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewTerminalParamSubPkLoader creates a new TerminalParamSubPkLoader given a fetch, wait, and maxBatch
func NewTerminalParamSubPkLoader(config TerminalParamSubPkLoaderConfig) *TerminalParamSubPkLoader |
// TerminalParamSubPkLoader batches and caches requests
type TerminalParamSubPkLoader struct {
// this method provides the data for the loader
fetch func(keys []string) ([]*TerminalParamSub, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[string]*TerminalParamSub
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *terminalParamSubPkLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type terminalParamSubPkLoaderBatch struct {
keys []string
data []*TerminalParamSub
error []error
closing bool
done chan struct{}
}
// Load a TerminalParamSub by key, batching and caching will be applied automatically
func (l *TerminalParamSubPkLoader) Load(key string) (*TerminalParamSub, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a TerminalParamSub.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *TerminalParamSubPkLoader) LoadThunk(key string) func() (*TerminalParamSub, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*TerminalParamSub, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &terminalParamSubPkLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*TerminalParamSub, error) {
<-batch.done
var data *TerminalParamSub
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *TerminalParamSubPkLoader) LoadAll(keys []string) ([]*TerminalParamSub, []error) {
results := make([]func() (*TerminalParamSub, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
terminalParamSubs := make([]*TerminalParamSub, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
terminalParamSubs[i], errors[i] = thunk()
}
return terminalParamSubs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a TerminalParamSubs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *TerminalParamSubPkLoader) LoadAllThunk(keys []string) func() ([]*TerminalParamSub, []error) {
results := make([]func() (*TerminalParamSub, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*TerminalParamSub, []error) {
terminalParamSubs := make([]*TerminalParamSub, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
terminalParamSubs[i], errors[i] = thunk()
}
return terminalParamSubs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *TerminalParamSubPkLoader) Prime(key string, value *TerminalParamSub) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *TerminalParamSubPkLoader) Clear(key string) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *TerminalParamSubPkLoader) unsafeSet(key string, value *TerminalParamSub) {
if l.cache == nil {
l.cache = map[string]*TerminalParamSub{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *terminalParamSubPkLoaderBatch) keyIndex(l *TerminalParamSubPkLoader, key string) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *terminalParamSubPkLoaderBatch) startTimer(l *TerminalParamSubPkLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *terminalParamSubPkLoaderBatch) end(l *TerminalParamSubPkLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}
| {
return &TerminalParamSubPkLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
} |
display.py | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from datetime import date, datetime, time, timedelta
from functools import partial
from io import BytesIO
from itertools import chain, groupby, imap
from math import ceil
from operator import attrgetter, itemgetter
from time import mktime
import dateutil
from dateutil.relativedelta import relativedelta
from flask import Response, flash, jsonify, redirect, request, session
from pytz import utc
from sqlalchemy.orm import joinedload, load_only, subqueryload, undefer, undefer_group
from werkzeug.exceptions import BadRequest, NotFound
from indico.core.db import db
from indico.core.db.sqlalchemy.colors import ColorTuple
from indico.core.db.sqlalchemy.util.queries import get_n_matching
from indico.modules.categories.controllers.base import RHDisplayCategoryBase
from indico.modules.categories.legacy import XMLCategorySerializer
from indico.modules.categories.models.categories import Category
from indico.modules.categories.serialize import (serialize_categories_ical, serialize_category, serialize_category_atom,
serialize_category_chain)
from indico.modules.categories.util import get_category_stats, get_upcoming_events, serialize_event_for_json_ld
from indico.modules.categories.views import WPCategory, WPCategoryCalendar, WPCategoryStatistics
from indico.modules.events.models.events import Event
from indico.modules.events.timetable.util import get_category_timetable
from indico.modules.events.util import get_base_ical_parameters
from indico.modules.news.util import get_recent_news
from indico.modules.users import User
from indico.modules.users.models.favorites import favorite_category_table
from indico.util.date_time import format_date, format_number, now_utc
from indico.util.decorators import classproperty
from indico.util.fs import secure_filename
from indico.util.i18n import _
from indico.util.string import to_unicode
from indico.web.flask.templating import get_template_module
from indico.web.flask.util import send_file, url_for
from indico.web.rh import RH
from indico.web.util import jsonify_data
CALENDAR_COLOR_PALETTE = [
ColorTuple('#1F1100', '#ECC495'),
ColorTuple('#0F0202', '#B9CBCA'),
ColorTuple('#0D1E1F', '#C2ECEF'),
ColorTuple('#000000', '#D0C296'),
ColorTuple('#202020', '#EFEBC2')
]
def _flat_map(func, list_):
return chain.from_iterable(imap(func, list_))
class RHCategoryIcon(RHDisplayCategoryBase):
_category_query_options = undefer('icon'),
def _check_access(self):
# Category icons are always public
pass
def _process(self):
if not self.category.has_icon:
raise NotFound
metadata = self.category.icon_metadata
return send_file(metadata['filename'], BytesIO(self.category.icon), mimetype=metadata['content_type'],
conditional=True)
class RHCategoryLogo(RHDisplayCategoryBase):
_category_query_options = undefer('logo'),
def _process(self):
if not self.category.has_logo:
raise NotFound
metadata = self.category.logo_metadata
return send_file(metadata['filename'], BytesIO(self.category.logo), mimetype=metadata['content_type'],
conditional=True)
class RHCategoryStatistics(RHDisplayCategoryBase):
def _get_stats_json(self, stats):
data = {'events': stats['events_by_year'], 'contributions': stats['contribs_by_year'],
'files': stats['attachments'], 'updated': stats['updated'].isoformat()}
if self.category.is_root:
data['users'] = self._count_users()
return jsonify(data)
def _get_stats_html(self, stats):
plots, values, updated = self._process_stats(stats, root=self.category.is_root)
return WPCategoryStatistics.render_template('category_statistics.html', self.category,
plots=plots, values=values, updated=updated, has_stats=True)
def _process(self):
stats = get_category_stats(self.category.id)
if request.accept_mimetypes.best_match(('application/json', 'text/html')) == 'application/json':
return self._get_stats_json(stats)
else:
return self._get_stats_html(stats)
def _plot_data(self, stats, tooltip=''):
years = sorted(stats.iterkeys())
min_year = now_utc().year
max_year = min_year
if years:
min_year = min(min_year, years[0]) - 1
max_year = max(max_year, years[-1])
data = {year: stats.get(year, 0) for year in xrange(min_year, max_year + 1)}
max_y = ceil(max(data.itervalues()) * 1.1) # 1.1 for padding in the graph
else:
data = {}
max_y = 0
return {'min_x': min_year, 'max_x': max_year, 'min_y': 0, 'max_y': max_y, 'values': data,
'total': sum(data.itervalues()), 'label_x': _("Years"), 'label_y': '', 'tooltip': tooltip}
def _process_stats(self, stats, root=False):
# tooltip formatting is for ease of translation
plots = [(_('Number of events'),
_('The year is the one of the start date of the event.'),
self._plot_data(stats.get('events_by_year', {}),
tooltip=_('{value} events in {year}').format(value='', year=''))),
(_('Number of contributions'),
_('The year is the one of the start date of the contribution.'),
self._plot_data(stats.get('contribs_by_year', {}),
tooltip=_('{value} contributions in {year}').format(value='', year='')))]
values = [(_('Number of attachments'), stats['attachments'])]
if root:
values.append((_('Number of users'), self._count_users()))
return plots, values, stats['updated']
def _count_users(self):
return User.find(is_deleted=False, is_pending=False).count()
class RHCategoryInfo(RHDisplayCategoryBase):
@classproperty
@classmethod
def _category_query_options(cls):
children_strategy = subqueryload('children')
children_strategy.load_only('id', 'parent_id', 'title', 'protection_mode', 'event_creation_restricted')
children_strategy.subqueryload('acl_entries')
children_strategy.undefer('deep_children_count')
children_strategy.undefer('deep_events_count')
children_strategy.undefer('has_events')
return (children_strategy,
load_only('id', 'parent_id', 'title', 'protection_mode'),
subqueryload('acl_entries'),
undefer('deep_children_count'),
undefer('deep_events_count'),
undefer('has_events'),
undefer('chain'))
def _process(self):
return jsonify_data(flash=False,
**serialize_category_chain(self.category, include_children=True, include_parents=True))
class RHReachableCategoriesInfo(RH):
def _get_reachable_categories(self, id_, excluded_ids):
cat = Category.query.filter_by(id=id_).options(joinedload('children').load_only('id')).one()
ids = ({c.id for c in cat.children} | {c.id for c in cat.parent_chain_query}) - excluded_ids
if not ids:
return []
return (Category.query
.filter(Category.id.in_(ids))
.options(*RHCategoryInfo._category_query_options)
.all())
def _process(self):
excluded_ids = set(request.json.get('exclude', set())) if request.json else set()
categories = self._get_reachable_categories(request.view_args['category_id'], excluded_ids=excluded_ids)
return jsonify_data(categories=[serialize_category_chain(c, include_children=True) for c in categories],
flash=False)
class RHCategorySearch(RH):
def _process(self):
q = request.args['q'].lower()
query = (Category.query
.filter(Category.title_matches(q))
.options(undefer('deep_children_count'), undefer('deep_events_count'), undefer('has_events'),
joinedload('acl_entries')))
if session.user:
# Prefer favorite categories
query = query.order_by(Category.favorite_of.any(favorite_category_table.c.user_id == session.user.id)
.desc())
# Prefer exact matches and matches at the beginning, then order by category title and if
# those are identical by the chain titles
query = (query
.order_by((db.func.lower(Category.title) == q).desc(),
db.func.lower(Category.title).startswith(q).desc(),
db.func.lower(Category.title),
Category.chain_titles))
total_count = query.count()
query = query.limit(10)
return jsonify_data(categories=[serialize_category(c, with_favorite=True, with_path=True) for c in query],
total_count=total_count, flash=False)
class RHSubcatInfo(RHDisplayCategoryBase):
"""Get basic information about subcategories.
This is intended to return information shown on a category display
page that is not needed immediately and is somewhat expensive to
retrieve.
"""
@classproperty
@classmethod
def _category_query_options(cls):
children_strategy = joinedload('children')
children_strategy.load_only('id')
children_strategy.undefer('deep_events_count')
return children_strategy, load_only('id', 'parent_id', 'protection_mode')
def _process(self):
event_counts = {c.id: {'value': c.deep_events_count, 'pretty': format_number(c.deep_events_count)}
for c in self.category.children}
return jsonify_data(flash=False, event_counts=event_counts)
class RHDisplayCategoryEventsBase(RHDisplayCategoryBase):
"""Base class for display pages displaying an event list"""
_category_query_options = (joinedload('children').load_only('id', 'title', 'protection_mode'),
undefer('attachment_count'), undefer('has_events'))
_event_query_options = (joinedload('person_links'), joinedload('series'), undefer_group('series'),
load_only('id', 'category_id', 'created_dt', 'start_dt', 'end_dt', 'timezone',
'protection_mode', 'title', 'type_', 'series_pos', 'series_count',
'own_address', 'own_venue_id', 'own_venue_name'))
def _process_args(self):
RHDisplayCategoryBase._process_args(self)
self.now = now_utc(exact=False).astimezone(self.category.display_tzinfo)
def format_event_date(self, event):
day_month = 'dd MMM'
tzinfo = self.category.display_tzinfo
start_dt = event.start_dt.astimezone(tzinfo)
end_dt = event.end_dt.astimezone(tzinfo)
if start_dt.year != end_dt.year:
return '{} - {}'.format(to_unicode(format_date(start_dt, timezone=tzinfo)),
to_unicode(format_date(end_dt, timezone=tzinfo)))
elif (start_dt.month != end_dt.month) or (start_dt.day != end_dt.day):
return '{} - {}'.format(to_unicode(format_date(start_dt, day_month, timezone=tzinfo)),
to_unicode(format_date(end_dt, day_month, timezone=tzinfo)))
else:
return to_unicode(format_date(start_dt, day_month, timezone=tzinfo))
def group_by_month(self, events):
def _format_tuple(x):
(year, month), events = x
return {'name': format_date(date(year, month, 1), format='MMMM yyyy'),
'events': list(events),
'is_current': year == self.now.year and month == self.now.month}
def _key(event):
start_dt = event.start_dt.astimezone(self.category.tzinfo)
return start_dt.year, start_dt.month
months = groupby(events, key=_key)
return map(_format_tuple, months)
def happening_now(self, event):
return event.start_dt <= self.now < event.end_dt
def is_recent(self, dt):
return dt > self.now - relativedelta(weeks=1)
class RHDisplayCategory(RHDisplayCategoryEventsBase):
"""Show the contents of a category (events/subcategories)"""
def _process(self):
# Current events, which are always shown by default are events of this month and of the previous month.
# If there are no events in this range, it will include the last and next month containing events.
past_threshold = self.now - relativedelta(months=1, day=1, hour=0, minute=0)
future_threshold = self.now + relativedelta(months=1, day=1, hour=0, minute=0)
next_event_start_dt = (db.session.query(Event.start_dt)
.filter(Event.start_dt >= self.now, Event.category_id == self.category.id)
.order_by(Event.start_dt.asc(), Event.id.asc())
.first() or (None,))[0]
previous_event_start_dt = (db.session.query(Event.start_dt)
.filter(Event.start_dt < self.now, Event.category_id == self.category.id)
.order_by(Event.start_dt.desc(), Event.id.desc())
.first() or (None,))[0]
if next_event_start_dt is not None and next_event_start_dt > future_threshold:
future_threshold = next_event_start_dt + relativedelta(months=1, day=1, hour=0, minute=0)
if previous_event_start_dt is not None and previous_event_start_dt < past_threshold:
past_threshold = previous_event_start_dt.replace(day=1, hour=0, minute=0)
event_query = (Event.query.with_parent(self.category)
.options(*self._event_query_options)
.order_by(Event.start_dt.desc(), Event.id.desc()))
past_event_query = event_query.filter(Event.start_dt < past_threshold)
future_event_query = event_query.filter(Event.start_dt >= future_threshold)
current_event_query = event_query.filter(Event.start_dt >= past_threshold,
Event.start_dt < future_threshold)
json_ld_events = events = current_event_query.filter(Event.start_dt < future_threshold).all()
events_by_month = self.group_by_month(events)
future_event_count = future_event_query.count()
past_event_count = past_event_query.count()
if not session.user and future_event_count:
json_ld_events = json_ld_events + future_event_query.all()
show_future_events = bool(self.category.id in session.get('fetch_future_events_in', set()) or
(session.user and session.user.settings.get('show_future_events', False)))
show_past_events = bool(self.category.id in session.get('fetch_past_events_in', set()) or
(session.user and session.user.settings.get('show_past_events', False)))
managers = sorted(self.category.get_manager_list(), key=attrgetter('principal_type.name', 'name'))
threshold_format = '%Y-%m'
params = {'event_count': len(events),
'events_by_month': events_by_month,
'format_event_date': self.format_event_date,
'future_event_count': future_event_count,
'show_future_events': show_future_events,
'future_threshold': future_threshold.strftime(threshold_format),
'happening_now': self.happening_now,
'is_recent': self.is_recent,
'managers': managers,
'past_event_count': past_event_count,
'show_past_events': show_past_events,
'past_threshold': past_threshold.strftime(threshold_format),
'json_ld': map(serialize_event_for_json_ld, json_ld_events),
'atom_feed_url': url_for('.export_atom', self.category),
'atom_feed_title': _('Events of "{}"').format(self.category.title)}
params.update(get_base_ical_parameters(session.user, 'category',
'/export/categ/{0}.ics'.format(self.category.id), {'from': '-31d'}))
if not self.category.is_root:
return WPCategory.render_template('display/category.html', self.category, **params)
news = get_recent_news()
upcoming_events = get_upcoming_events()
return WPCategory.render_template('display/root_category.html', self.category, news=news,
upcoming_events=upcoming_events, **params)
class RHEventList(RHDisplayCategoryEventsBase):
"""Return the HTML for the event list before/after a specific month"""
def _parse_year_month(self, string):
try:
dt = datetime.strptime(string, '%Y-%m')
except (TypeError, ValueError):
return None
return self.category.display_tzinfo.localize(dt)
def _process_args(self):
RHDisplayCategoryEventsBase._process_args(self)
before = self._parse_year_month(request.args.get('before'))
after = self._parse_year_month(request.args.get('after'))
if before is None and after is None:
raise BadRequest('"before" or "after" parameter must be specified')
event_query = (Event.query.with_parent(self.category)
.options(*self._event_query_options)
.order_by(Event.start_dt.desc(), Event.id.desc()))
if before:
event_query = event_query.filter(Event.start_dt < before)
if after:
event_query = event_query.filter(Event.start_dt >= after)
self.events = event_query.all()
def _process(self):
events_by_month = self.group_by_month(self.events)
tpl = get_template_module('categories/display/event_list.html')
html = tpl.event_list_block(events_by_month=events_by_month, format_event_date=self.format_event_date,
is_recent=self.is_recent, happening_now=self.happening_now)
return jsonify_data(flash=False, html=html)
class RHShowEventsInCategoryBase(RHDisplayCategoryBase):
"""Set whether the events in a category are automatically displayed or not"""
session_field = ''
def _show_events(self, show_events):
category_ids = session.setdefault(self.session_field, set())
if show_events:
category_ids.add(self.category.id)
else:
category_ids.discard(self.category.id)
session.modified = True
def _process_DELETE(self):
self._show_events(False)
def _process_PUT(self):
self._show_events(True)
class RHShowFutureEventsInCategory(RHShowEventsInCategoryBase):
"""Set whether the past events in a category are automatically displayed or not"""
session_field = 'fetch_future_events_in'
class RHShowPastEventsInCategory(RHShowEventsInCategoryBase):
"""Set whether the past events in a category are automatically displayed or not"""
session_field = 'fetch_past_events_in'
class RHExportCategoryICAL(RHDisplayCategoryBase):
def _process(self):
filename = '{}-category.ics'.format(secure_filename(self.category.title, str(self.category.id)))
buf = serialize_categories_ical([self.category.id], session.user,
Event.end_dt >= (now_utc() - timedelta(weeks=4)))
return send_file(filename, buf, 'text/calendar')
class RHExportCategoryAtom(RHDisplayCategoryBase):
def _process(self):
filename = '{}-category.atom'.format(secure_filename(self.category.title, str(self.category.id)))
buf = serialize_category_atom(self.category,
url_for(request.endpoint, self.category, _external=True),
session.user,
Event.end_dt >= now_utc())
return send_file(filename, buf, 'application/atom+xml')
class RHXMLExportCategoryInfo(RH):
def _process_args(self):
try:
id_ = int(request.args['id'])
except ValueError:
raise BadRequest('Invalid Category ID')
self.category = Category.get_one(id_, is_deleted=False)
def _process(self):
category_xml_info = XMLCategorySerializer(self.category).serialize_category()
return Response(category_xml_info, mimetype='text/xml')
class RHCategoryOverview(RHDisplayCategoryBase):
"""Display the events for a particular day, week or month"""
def _process_args(self):
RHDisplayCategoryBase._process_args(self)
self.detail = request.args.get('detail', 'event')
if self.detail not in ('event', 'session', 'contribution'):
raise BadRequest('Invalid detail argument')
self.period = request.args.get('period', 'day')
if self.period not in ('day', 'month', 'week'):
raise BadRequest('Invalid period argument')
if 'date' in request.args:
try:
date = datetime.strptime(request.args['date'], '%Y-%m-%d')
except ValueError:
raise BadRequest('Invalid date argument')
else:
date = datetime.now()
date = self.category.display_tzinfo.localize(date)
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
if self.period == 'day':
self.start_dt = date
self.end_dt = self.start_dt + relativedelta(days=1)
elif self.period == 'week':
self.start_dt = date - relativedelta(days=date.weekday())
self.end_dt = self.start_dt + relativedelta(days=7)
elif self.period == 'month':
self.start_dt = date + relativedelta(day=1)
self.end_dt = self.start_dt + relativedelta(months=1)
def _process(self):
info = get_category_timetable([self.category.id], self.start_dt, self.end_dt, detail_level=self.detail,
tz=self.category.display_tzinfo, from_categ=self.category, grouped=False)
events = info['events']
# Only categories with icons are listed in the sidebar
subcategory_ids = {event.category.effective_icon_data['source_id']
for event in events if event.category.has_effective_icon}
subcategories = Category.query.filter(Category.id.in_(subcategory_ids))
# Events spanning multiple days must appear on all days
events = _flat_map(partial(self._process_multiday_events, info), events)
def _event_sort_key(event):
# Ongoing events are shown after all other events on the same day and are sorted by start_date
ongoing = getattr(event, 'ongoing', False)
return (event.start_dt.date(), ongoing,
-mktime(event.first_occurence_start_dt.timetuple()) if ongoing else event.start_dt.time())
events = sorted(events, key=_event_sort_key)
params = {
'detail': self.detail,
'period': self.period,
'subcategories': subcategories,
'start_dt': self.start_dt,
'end_dt': self.end_dt - relativedelta(days=1), # Display a close-ended interval
'previous_day_url': self._other_day_url(self.start_dt - relativedelta(days=1)),
'next_day_url': self._other_day_url(self.start_dt + relativedelta(days=1)),
'previous_month_url': self._other_day_url(self.start_dt - relativedelta(months=1)),
'next_month_url': self._other_day_url(self.start_dt + relativedelta(months=1)),
'previous_year_url': self._other_day_url(self.start_dt - relativedelta(years=1)),
'next_year_url': self._other_day_url(self.start_dt + relativedelta(years=1)),
'mathjax': True
}
if self.detail != 'event':
cte = self.category.get_protection_parent_cte()
params['accessible_categories'] = {cat_id
for cat_id, prot_parent_id in db.session.query(cte)
if prot_parent_id == self.category.id}
if self.period == 'day':
return WPCategory.render_template('display/overview/day.html', self.category, events=events, **params)
elif self.period == 'week':
days = self._get_week_days()
template = 'display/overview/week.html'
params['previous_week_url'] = self._other_day_url(self.start_dt - relativedelta(days=7))
params['next_week_url'] = self._other_day_url(self.start_dt + relativedelta(days=7))
elif self.period == 'month':
days = self._get_calendar_days()
template = 'display/overview/month.html'
events_by_day = []
for day in days:
events_by_day.append((day, self._pop_head_while(lambda x: x.start_dt.date() <= day.date(), events)))
# Check whether all weekends are empty
hide_weekend = (not any(map(itemgetter(1), events_by_day[5::7])) and
not any(map(itemgetter(1), events_by_day[6::7])))
if hide_weekend:
events_by_day = [x for x in events_by_day if x[0].weekday() not in (5, 6)]
return WPCategory.render_template(template, self.category, events_by_day=events_by_day,
hide_weekend=hide_weekend, **params)
def _get_week_days(self):
# Return the days shown in the weekly overview
return self._get_days(self.start_dt, self.end_dt)
def _get_calendar_days(self):
# Return the days shown in the monthly overview
start_dt = self.start_dt - relativedelta(days=self.start_dt.weekday())
end_dt = self.end_dt + relativedelta(days=(7 - self.end_dt.weekday()) % 7)
return self._get_days(start_dt, end_dt)
@staticmethod
def _get_days(start_dt, end_dt):
# Return all days in the open-ended interval
current_dt = start_dt
tz = current_dt.tzinfo
next_day = current_dt.date() + timedelta(1)
beginning_of_next_day = tz.localize(datetime.combine(next_day, time()))
while current_dt < end_dt:
yield current_dt
current_dt = beginning_of_next_day
beginning_of_next_day = current_dt + relativedelta(days=1)
@staticmethod
def _pop_head_while(predicate, list_):
# Pop the head of the list while the predicate is true and return the popped elements
res = []
while len(list_) and predicate(list_[0]):
res.append(list_[0])
list_.pop(0)
return res
def _other_day_url(self, date):
return url_for('.overview', self.category, detail=self.detail, period=self.period,
date=format_date(date, 'yyyy-MM-dd'))
def _process_multiday_events(self, info, event):
# Add "fake" proxy events for events spanning multiple days such that there is one event per day
# Function type: Event -> List[Event]
tzinfo = self.category.display_tzinfo
# Breaks, contributions and sessions grouped by start_dt. Each EventProxy will return the relevant ones only
timetable_objects = sorted(chain(*info[event.id].values()), key=attrgetter('timetable_entry.start_dt'))
timetable_objects_by_date = {x[0]: list(x[1]) for x
in groupby(timetable_objects, key=lambda x: x.start_dt.astimezone(tzinfo).date())}
# All the days of the event shown in the overview
event_days = self._get_days(max(self.start_dt, event.start_dt.astimezone(tzinfo)),
min(self.end_dt, event.end_dt.astimezone(tzinfo)))
# Generate a proxy object with adjusted start_dt and timetable_objects for each day
return [_EventProxy(event, day, tzinfo, timetable_objects_by_date.get(day.date(), [])) for day in event_days]
class _EventProxy(object):
def __init__(self, event, date, tzinfo, timetable_objects):
start_dt = datetime.combine(date, event.start_dt.astimezone(tzinfo).timetz())
assert date >= event.start_dt
assert date <= event.end_dt
object.__setattr__(self, '_start_dt', start_dt)
object.__setattr__(self, '_real_event', event)
object.__setattr__(self, '_event_tz_start_date', event.start_dt.astimezone(tzinfo).date())
object.__setattr__(self, '_timetable_objects', timetable_objects)
def __getattribute__(self, name):
if name == 'start_dt':
return object.__getattribute__(self, '_start_dt')
event = object.__getattribute__(self, '_real_event')
if name == 'timetable_objects':
return object.__getattribute__(self, '_timetable_objects')
if name == 'ongoing':
# the event is "ongoing" if the dates (in the tz of the category)
# of the event and the proxy (calendar entry) don't match
event_start_date = object.__getattribute__(self, '_event_tz_start_date')
return event_start_date != self.start_dt.date()
if name == 'first_occurence_start_dt':
|
return getattr(event, name)
def __setattr__(self, name, value):
raise AttributeError('This instance is read-only')
def __repr__(self):
return '<_EventProxy({}, {})>'.format(self.start_dt, object.__getattribute__(self, '_real_event'))
class RHCategoryCalendarView(RHDisplayCategoryBase):
def _process(self):
if not request.is_xhr:
return WPCategoryCalendar.render_template('display/calendar.html', self.category,
start_dt=request.args.get('start_dt'))
tz = self.category.display_tzinfo
start = tz.localize(dateutil.parser.parse(request.args['start'])).astimezone(utc)
end = tz.localize(dateutil.parser.parse(request.args['end'])).astimezone(utc)
query = (Event.query
.filter(Event.starts_between(start, end),
Event.is_visible_in(self.category.id),
~Event.is_deleted)
.options(load_only('id', 'title', 'start_dt', 'end_dt', 'category_id')))
events = self._get_event_data(query)
ongoing_events = (Event.query
.filter(Event.is_visible_in(self.category.id),
Event.start_dt < start,
Event.end_dt > end)
.options(load_only('id', 'title', 'start_dt', 'end_dt', 'timezone'))
.order_by(Event.title)
.all())
return jsonify_data(flash=False, events=events, ongoing_event_count=len(ongoing_events),
ongoing_events_html=self._render_ongoing_events(ongoing_events))
def _get_event_data(self, event_query):
data = []
tz = self.category.display_tzinfo
for event in event_query:
category_id = event.category_id
event_data = {'title': event.title,
'start': event.start_dt.astimezone(tz).replace(tzinfo=None).isoformat(),
'end': event.end_dt.astimezone(tz).replace(tzinfo=None).isoformat(),
'url': event.url}
colors = CALENDAR_COLOR_PALETTE[category_id % len(CALENDAR_COLOR_PALETTE)]
event_data.update({'textColor': '#' + colors.text, 'color': '#' + colors.background})
data.append(event_data)
return data
def _render_ongoing_events(self, ongoing_events):
template = get_template_module('categories/display/_calendar_ongoing_events.html')
return template.render_ongoing_events(ongoing_events, self.category.display_tzinfo)
class RHCategoryUpcomingEvent(RHDisplayCategoryBase):
"""Redirect to the upcoming event of a category."""
def _process(self):
event = self._get_upcoming_event()
if event:
return redirect(event.url)
else:
flash(_('There are no upcoming events for this category'))
return redirect(self.category.url)
def _get_upcoming_event(self):
query = (Event.query
.filter(Event.is_visible_in(self.category.id),
Event.start_dt > now_utc(),
~Event.is_deleted)
.options(subqueryload('acl_entries'))
.order_by(Event.start_dt, Event.id))
res = get_n_matching(query, 1, lambda event: event.can_access(session.user))
if res:
return res[0]
| return event.start_dt |
context.rs | //! The main handle to an IR instance.
//!
//! [`Context`] contains several
//! [generational_arena](https://github.com/fitzgen/generational-arena) collections to maintain the
//! IR ECS.
//!
//! It is passed around as a mutable reference to many of the Sway-IR APIs.
use std::collections::HashMap;
use generational_arena::Arena;
use crate::{
asm::AsmBlockContent,
block::BlockContent,
function::FunctionContent,
irtype::AggregateContent,
metadata::{MetadataIndex, Metadatum, StorageOperation},
module::ModuleContent,
module::ModuleIterator,
pointer::PointerContent,
value::ValueContent,
};
/// The main IR context handle.
///
/// Every module, function, block and value is stored here. Some aggregate metadata is also
/// managed by the context.
#[derive(Default)]
pub struct | {
pub modules: Arena<ModuleContent>,
pub functions: Arena<FunctionContent>,
pub blocks: Arena<BlockContent>,
pub values: Arena<ValueContent>,
pub pointers: Arena<PointerContent>,
pub aggregates: Arena<AggregateContent>,
pub asm_blocks: Arena<AsmBlockContent>,
// The metadata indices for locations need a fast lookup, hence the metadata_reverse_map.
// Using a HashMap might be overkill as most projects have only a handful of source files.
pub metadata: Arena<Metadatum>,
pub metadata_reverse_map: HashMap<*const std::path::PathBuf, MetadataIndex>,
pub(crate) metadata_storage_indices: HashMap<StorageOperation, MetadataIndex>,
next_unique_sym_tag: u64,
}
impl Context {
/// Return an interator for every module in this context.
pub fn module_iter(&self) -> ModuleIterator {
ModuleIterator::new(self)
}
/// Get a globally unique symbol.
///
/// The name will be in the form `"anon_N"`, where `N` is an incrementing decimal.
pub fn get_unique_name(&mut self) -> String {
let sym = format!("anon_{}", self.next_unique_sym_tag);
self.next_unique_sym_tag += 1;
sym
}
}
use std::fmt::{Display, Error, Formatter};
impl Display for Context {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{}", crate::printer::to_string(self))
}
}
impl From<Context> for String {
fn from(context: Context) -> Self {
crate::printer::to_string(&context)
}
}
| Context |
provider.shaka.js | webpackJsonpjwplayer([4],{107:function(a,b,c){var d,e;d=[c(70),c(46),c(53),c(64),c(65),c(62),c(108)],e=function(a,b,c,d,e,f,g){function h(a){var b=a/1e3;return Math.floor(b).toLocaleString()+" kbps"}function i(a){function i(){this.state===d.LOADING&&this.setState(d.PLAYING)}function j(a,b){switch(b.schemeIdUri){case"urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed":case"edef8ba9-79d6-4ace-a3c8-27dcd51d21ed":var c="com.widevine.alpha",d=a.widevine.url;return d||(console.log("No licensing server specified for widevine. Defaulting to proxy."),d="http://widevine-proxy.appspot.com/proxy"),new g.player.DrmSchemeInfo(c,d,!1,null,null);default:return console.log("Unrecognized scheme: "+b.schemeIdUri),null}}function k(a){console.error(a);var b="Unknown playback error";G.trigger(e.JWPLAYER_MEDIA_ERROR,{message:"Error playing file:"+b})}function l(){return this.levels}function m(){return this.currentQuality}function n(a){this.setState(d.LOADING);var c=a.sources[0].file,e=a.sources[0].drm||{},f=a.starttime,h=new g.util.EWMABandwidthEstimator,i=b.partial(j,e),k=new g.player.DashVideoSource(c,i,h),l=F.load(k);l.then(o.bind(this)),f&&l.then(function(){this.seek(f)}.bind(this))}function o(){G.trigger(e.JWPLAYER_MEDIA_BUFFER_FULL);var a=F.getVideoTracks();a.length>1&&(this.currentQuality=0,this.levels=b.map(a,function(a){return{label:h(a.bandwidth),level_id:a.id}}),this.levels.unshift({label:"Auto",level_id:"auto"}),G.trigger(e.JWPLAYER_MEDIA_LEVELS,{levels:this.levels,currentQuality:this.currentQuality}))}function p(){G.trigger(e.JWPLAYER_MEDIA_META,{duration:E.duration,height:E.videoHeight,width:E.videoWidth})}function q(a){E.muted=a}function r(){E.pause(),this.setState(d.PAUSED)}function s(){E.play(),this.setState(d.BUFFERING),this.setVisibility(!0)}function t(a){E.currentTime=a,this.trigger(e.JWPLAYER_MEDIA_SEEK,{position:E.currentTime,offset:a})}function u(a){a=!!a,a?c.style(C,{visibility:"visible",opacity:1}):c.style(C,{visibility:"",opacity:0})}function v(){F.unload(),C===E.parentNode&&C.removeChild(E)}function w(){E.pause(),this.setState(d.IDLE)}function x(a){C=a,C.appendChild(E)}function y(){return C}function z(a){if(a=parseInt(a,10),!(this.currentQuality===a||0>a||a>=this.levels.length)){if(0===a)F.enableAdaptation(!0);else{var b=this.levels[a].level_id;F.enableAdaptation(!1),this.setState(d.LOADING),F.selectVideoTrack(b)}this.currentQuality=a,this.trigger(e.JWPLAYER_MEDIA_LEVEL_CHANGED,{currentQuality:this.currentQuality,levels:this.levels})}}function A(){var a=E.currentTime;Math.abs(a-this.position)<1&&G.state===d.BUFFERING&&G.setState(d.PLAYING),this.position=a,G.trigger(e.JWPLAYER_MEDIA_TIME,{position:a,duration:E.duration,quality:1}),a>E.duration-.1&&E.duration>1&&(G.setState(d.IDLE),G.trigger(e.JWPLAYER_MEDIA_COMPLETE))}function B(a){E.volume=a/100}var C,D=document.getElementById(a),E=D?D.querySelector("video"):void 0;E=E||document.createElement("video");var F=new g.player.Player(E),G=this;this.position=0,this.levels=[],this.currentQuality=-1,b.extend(this,f,{load:n,mute:q,pause:r,getQualityLevels:l.bind(this),getCurrentQuality:m.bind(this),play:s,seek:t,remove:v,setContainer:x,getContainer:y,setCurrentQuality:z.bind(this),setVisibility:u,stop:w,volume:B,supportsFullscreen:b.constant(!0),getName:b.constant({name:"shaka"})}),F.addEventListener("error",k),E.addEventListener("loadedmetadata",p.bind(this)),E.addEventListener("timeupdate",A.bind(this)),E.addEventListener("playing",i.bind(this))}return g.polyfill.installAll(),i.supports=function(b,c){var d=a(c);if(b.drm&&!d("drm"))return!1;var e=!1;return b.file?"dash"===b.type||"mpd"===b.type?e=!0:(b.file.indexOf(".mpd")>-1||b.file.indexOf("mpd-time-csf")>-1)&&(e=!0):e=!0,window.MediaSource||(e=!1),e&&d("dash")},i.getName=b.constant({name:"shaka"}),{register:function(a){a.api.registerProvider(i)}}}.apply(b,d),!(void 0!==e&&(a.exports=e))},108:function(a,b,c){var d;!function(){var e={};(function(a){function b(a,b){var c=a.split("."),d=Bd;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b}function c(a,b){function c(){}c.prototype=b.prototype,a.cf=b.prototype,a.prototype=new c,a.prototype.constructor=a,a.$e=function(a,c,d){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}}function d(a){var b=console[a];b?b.bind||(console[a]=function(){b.apply(console,arguments)}):console[a]=function(){}}function e(a){Dd[a]={Bb:Cd(),end:NaN}}function f(a){(a=Dd[a])&&(a.end=Cd())}function g(a){return(a=Dd[a])&&a.end?a.end-a.Bb:NaN}function h(a,b,c){this.id=a,this.bandwidth=b||0,this.lang=c||"unknown",this.active=!1}function i(a,b){this.id=a,this.lang=b||"unknown",this.enabled=this.active=!1}function j(a,b,c,d){this.id=a,this.bandwidth=b||0,this.width=c||0,this.height=d||0,this.active=!1}function k(a,b){var c=a.width*a.height,d=b.width*b.height;return d>c?-1:c>d?1:a.bandwidth<b.bandwidth?-1:a.bandwidth>b.bandwidth?1:0}function l(){var b="CustomEvent"in a;if(b)try{new CustomEvent("")}catch(c){b=!1}b||(a.CustomEvent=m)}function m(a,b){var c=document.createEvent("CustomEvent"),d=b||{bubbles:!1,cancelable:!1,detail:null};return c.initCustomEvent(a,!!d.bubbles,!!d.cancelable,d.detail),c}function n(){var a=Element.prototype;a.requestFullscreen=a.requestFullscreen||a.mozRequestFullScreen||a.msRequestFullscreen||a.webkitRequestFullscreen,a=Document.prototype,a.exitFullscreen=a.exitFullscreen||a.mozCancelFullScreen||a.msExitFullscreen||a.webkitExitFullscreen,"fullscreenElement"in document||Object.defineProperty(document,"fullscreenElement",{get:function(){return document.mozFullScreenElement||document.msFullscreenElement||document.webkitFullscreenElement}}),document.addEventListener("webkitfullscreenchange",o),document.addEventListener("webkitfullscreenerror",o),document.addEventListener("mozfullscreenchange",o),document.addEventListener("mozfullscreenerror",o),document.addEventListener("MSFullscreenChange",o),document.addEventListener("MSFullscreenError",o)}function o(a){var b=a.type.replace(/^(webkit|moz|MS)/,"").toLowerCase(),b=new Event(b,a);a.target.dispatchEvent(b)}function p(){return Promise.reject(Error("The key system specified is not supported."))}function q(a){return null==a?Promise.resolve():Promise.reject(Error("MediaKeys not supported."))}function r(){throw new TypeError("Illegal constructor.")}function s(){throw new TypeError("Illegal constructor.")}function t(){var a=HTMLVideoElement.prototype;a.getVideoPlaybackQuality||(a.getVideoPlaybackQuality=function(){return"webkitDroppedFrameCount"in this?{corruptedVideoFrames:0,droppedVideoFrames:this.webkitDroppedFrameCount,totalVideoFrames:this.webkitDecodedFrameCount,creationTime:null,totalFrameDelay:null}:null})}function u(a,b){for(var c={},d=0;d<a.length;++d){var e=b?b(a[d]):a[d].toString();c[e]=a[d]}var f,d=[];for(f in c)d.push(c[f]);return d}function v(){return Date.now()+Ed}function w(a,b){this.j=a,this.Lc=b==Fd,this.i=0}function x(a){var b=a.j.getUint8(a.i);return a.i+=1,b}function y(a){var b=a.j.getUint32(a.i,a.Lc);return a.i+=4,b}function z(a){var b,c;if(a.Lc?(b=a.j.getUint32(a.i,!0),c=a.j.getUint32(a.i+4,!0)):(c=a.j.getUint32(a.i,!1),b=a.j.getUint32(a.i+4,!1)),c>2097151)throw new RangeError("DataViewReader: Overflow reading 64-bit value.");return a.i+=8,c*Math.pow(2,32)+b}function A(a){if(a.i+16>a.j.byteLength)throw new RangeError("DataViewReader: Read past end of DataView.");var b=new Uint8Array(a.j.buffer,a.i,16);return a.i+=16,b}function B(a,b){if(a.i+b>a.j.byteLength)throw new RangeError("DataViewReader: Skip past end of DataView.");a.i+=b}function C(a){this.j=a,this.Ya=new w(a,0),Gd||(Gd=[new Uint8Array([255]),new Uint8Array([127,255]),new Uint8Array([63,255,255]),new Uint8Array([31,255,255,255]),new Uint8Array([15,255,255,255,255]),new Uint8Array([7,255,255,255,255,255]),new Uint8Array([3,255,255,255,255,255,255]),new Uint8Array([1,255,255,255,255,255,255,255])])}function D(a){var b;if(b=E(a),7<b.length)throw new RangeError("EbmlParser: EBML ID must be at most 7 bytes.");for(var c=0,d=0;d<b.length;d++)c=256*c+b[d];b=c,c=E(a);a:{for(d=0;d<Gd.length;d++)if(fa(c,Gd[d])){d=!0;break a}d=!1}if(d)throw new RangeError("EbmlParser: Element cannot contain dynamically sized data.");if(8==c.length&&224&c[1])throw new RangeError("EbmlParser: Variable sized integer value must be at most 53 bits.");for(var d=c[0]&(1<<8-c.length)-1,e=1;e<c.length;e++)d=256*d+c[e];return c=d,c=a.Ya.i+c<=a.j.byteLength?c:a.j.byteLength-a.Ya.i,d=new DataView(a.j.buffer,a.j.byteOffset+a.Ya.i,c),B(a.Ya,c),new F(b,d)}function E(a){var b,c=x(a.Ya);for(b=1;8>=b&&!(c&1<<8-b);b++);if(b>8)throw new RangeError("EbmlParser: Variable sized integer must fit within 8 bytes.");var d=new Uint8Array(b);for(d[0]=c,c=1;b>c;c++)d[c]=x(a.Ya);return d}function F(a,b){this.id=a,this.j=b}function G(a){if(8<a.j.byteLength)throw new RangeError("EbmlElement: Unsigned integer has too many bytes.");if(8==a.j.byteLength&&224&a.j.getUint8(0))throw new RangeError("EbmlParser: Unsigned integer must be at most 53 bits.");for(var b=0,c=0;c<a.j.byteLength;c++)var d=a.j.getUint8(c),b=256*b+d;return b}function H(a){this.ad=Math.exp(Math.log(.5)/a),this.vc=this.Cc=0}function I(a){return a.Cc/(1-Math.pow(a.ad,a.vc))}function J(a){var b,c=new CustomEvent(a.type,{detail:a.detail,bubbles:!!a.bubbles});for(b in a)b in c||(c[b]=a[b]);return c}function K(a){return new CustomEvent("error",{detail:a,bubbles:!0})}function L(a,b,c){return M(b),M(c),c==b||a>=Hd&&c==b.split("-")[0]||a>=Id&&c.split("-")[0]==b.split("-")[0]?!0:!1}function M(a){a=a.toLowerCase().split("-");var b=Jd[a[0]];return b&&(a[0]=b),a.join("-")}function N(a){return Object.keys(a).map(function(b){return a[b]})}function O(){this.da={}}function P(a){var b,c=[];for(b in a.da)c.push.apply(c,a.da[b]);return c}function Q(){this.fb=new O}function R(a,b,c,d){b=new T(b,c,d),a.fb.push(c,b)}function S(a){for(var b=P(a.fb),c=0;c<b.length;++c)b[c].Yb();a.fb.clear()}function T(a,b,c){this.target=a,this.type=b,this.rd=c,this.target.addEventListener(b,c,!1)}function U(a){this.Kc=new O,this.parent=a}function V(a,b){b.currentTarget=a;for(var c=a.Kc.get(b.type)||[],d=0;d<c.length;++d){var e=c[d];try{e.handleEvent?e.handleEvent(b):e.call(a,b)}catch(f){}}return a.parent&&b.bubbles&&V(a.parent,b),b.defaultPrevented}function W(){U.call(this,null),this.Dc=new H(3),this.Gd=new H(10),this.fe=50,this.Pd=5e5,this.ge=.5,this.ee=65536,this.od=0}function X(){var a,b,c=new Promise(function(c,d){a=c,b=d});return c.resolve=a,c.reject=b,c}function Y(){this.tc=new X,this.ga=!1,this.za=null,this.La=[],this.Ma=null}function Z(a,b){var c,d=a.La[0](b);d?(c=d[0],a.Ma=d[1]):(c=Promise.resolve(),a.Ma=null),c.then(_(a,function(a){this.za?(this.La=[],this.Ma=null,$(this)):(this.La.shift(),this.La.length?Z(this,a):(this.tc.resolve(a),this.Ma=null))}))["catch"](_(a,function(a){this.La=[],this.Ma=null,this.za?$(this):this.tc.reject(a)}))}function $(b){var c=Error("Task aborted.");c.type="aborted",b.tc.reject(c),a.setTimeout(function(){this.za.resolve(),this.za=null}.bind(b),5)}function _(a,b){return b.bind(a)}function aa(a){return String.fromCharCode.apply(null,a)}function ba(a){for(var b=new Uint8Array(a.length),c=0;c<a.length;++c)b[c]=a.charCodeAt(c);return b}function ca(b,c){var d=void 0==c?!0:c,e=a.btoa(aa(b)).replace(/\+/g,"-").replace(/\//g,"_");return d?e:e.replace(/=*$/,"")}function da(b){return ba(a.atob(b.replace(/-/g,"+").replace(/_/g,"/")))}function ea(a){for(var b="",c=0;c<a.length;++c){var d=a[c].toString(16);1==d.length&&(d="0"+d),b+=d}return b}function fa(a,b){if(!a&&!b)return!0;if(!a||!b||a.length!=b.length)return!1;for(var c=0;c<a.length;++c)if(a[c]!=b[c])return!1;return!0}function ga(a,b,c,d,e,f,g,h,i,j,k){this.keySystem=a,this.ce=b,this.withCredentials=c,this.Sa=[],this.pd=e||null,this.qd=f||null,this.Qd=g==Kd,this.Fe=h==Ld,this.Md=i||"",this.Ye=j||"",this.Dd=k||null,d&&this.Sa.push(d)}function ha(a){this.body=a,this.headers={}}function ia(){this.minBandwidth=this.maxBandwidth=this.maxWidth=this.maxHeight=null}function ja(){return new ga("","",!1,null)}function ka(a,b){a.Sa=u(a.Sa.concat(b.Sa),function(a){return Array.prototype.join.apply(a.initData)})}function la(a,b){try{var c=new na(a,b);return Promise.resolve(c)}catch(d){return Promise.reject(d)}}function ma(a){var b=this.mediaKeys;return b&&b!=a&&pa(b,null),delete this.mediaKeys,(this.mediaKeys=a)&&pa(a,this),Promise.resolve()}function na(a,b){this.jb=this.keySystem=a;var c=!0;"org.w3.clearkey"==a&&(this.jb="webkit-org.w3.clearkey",c=!1);var d,e=!1;d=document.getElementsByTagName("video"),d=d.length?d[0]:document.createElement("video");for(var f=0;f<b.length;++f){var g=b[f],h={audioCapabilities:[],videoCapabilities:[],persistentState:"optional",distinctiveIdentifier:"optional",initDataTypes:g.initDataTypes},i=!1;if(g.audioCapabilities)for(var j=0;j<g.audioCapabilities.length;++j){var k=g.audioCapabilities[j];k.contentType&&(i=!0,d.canPlayType(k.contentType.split(";")[0],this.jb)&&(h.audioCapabilities.push(k),e=!0))}if(g.videoCapabilities)for(j=0;j<g.videoCapabilities.length;++j)k=g.videoCapabilities[j],k.contentType&&(i=!0,d.canPlayType(k.contentType,this.jb)&&(h.videoCapabilities.push(k),e=!0));if(i||(e=d.canPlayType("video/mp4",this.jb)||d.canPlayType("video/webm",this.jb)),"required"==g.persistentState&&(c?h.persistentState="required":e=!1),e)return void(this.Od=h)}throw Error("None of the requested configurations were supported.")}function oa(a){this.Ta=a,this.Ia=null,this.l=new Q,this.vd=[],this.Ed={}}function pa(a,b){a.Ia=b,S(a.l),b&&(R(a.l,b,"webkitneedkey",a.Ee.bind(a)),R(a.l,b,"webkitkeymessage",a.De.bind(a)),R(a.l,b,"webkitkeyadded",a.Be.bind(a)),R(a.l,b,"webkitkeyerror",a.Ce.bind(a)))}function qa(a,b){var c=a.Ed[b];return c?c:(c=a.vd.shift())?(c.sessionId=b,a.Ed[b]=c):null}function ra(a,b,c){U.call(this,null),this.Ia=a,this.nd=!1,this.S=this.ba=null,this.Ta=b,this.wc=c,this.sessionId="",this.expiration=NaN,this.closed=new X,this.keyStatuses=new va}function sa(a,b,c){if(a.nd)return Promise.reject(Error("The session is already initialized."));a.nd=!0;try{var d;if("persistent-license"==a.wc)if(c)d=ba("LOAD_SESSION|"+c);else{var e=new Uint8Array(b);d=ba("PERSISTENT|"+aa(e))}else d=new Uint8Array(b)}catch(f){return Promise.reject(f)}a.ba=new X;try{a.Ia.webkitGenerateKeyRequest(a.Ta,d)}catch(g){if("InvalidStateError"!=g.name)return a.ba=null,Promise.reject(g);setTimeout(function(){try{this.Ia.webkitGenerateKeyRequest(this.Ta,d)}catch(a){this.ba.reject(a),this.ba=null}}.bind(a),10)}return a.ba}function ta(a,b){var c=a.keyStatuses;c.size=void 0==b?0:1,c.wa=b,c=J({type:"keystatuseschange"}),a.dispatchEvent(c)}function ua(a){this.Kd=a,this.md=0}function va(){this.size=0,this.wa=void 0}function wa(){Navigator.prototype.requestMediaKeySystemAccess&&MediaKeySystemAccess.prototype.getConfiguration||(HTMLMediaElement.prototype.webkitGenerateKeyRequest?(Md=ba("FAKE_KEY_ID"),Navigator.prototype.requestMediaKeySystemAccess=la,delete HTMLMediaElement.prototype.mediaKeys,HTMLMediaElement.prototype.mediaKeys=null,HTMLMediaElement.prototype.setMediaKeys=ma,a.MediaKeys=oa,a.MediaKeySystemAccess=na):(Navigator.prototype.requestMediaKeySystemAccess=p,delete HTMLMediaElement.prototype.mediaKeys,HTMLMediaElement.prototype.mediaKeys=null,HTMLMediaElement.prototype.setMediaKeys=q,a.MediaKeys=r,a.MediaKeySystemAccess=s))}function xa(a){this.systemIds=[],this.cencKeyIds=[],a=new w(new DataView(a.buffer),0);try{for(;a.Qa();){var b=a.i,c=y(a),d=y(a);if(1==c?c=z(a):0==c&&(c=a.j.byteLength-b),1886614376!=d)B(a,c-(a.i-b));else{var e=x(a);if(e>1)B(a,c-(a.i-b));else{B(a,3);var f=ea(A(a)),g=[];if(e>0)for(var h=y(a),i=0;h>i;++i){var j=ea(A(a));g.push(j)}var k=y(a);B(a,k),this.cencKeyIds.push.apply(this.cencKeyIds,g),this.systemIds.push(f),a.i!=b+c&&B(a,c-(a.i-b))}}}}catch(l){}}function ya(a){var b;a instanceof ya?(za(this,a.Ja),this.cb=a.cb,this.ra=a.ra,Aa(this,a.ub),this.ja=a.ja,Ba(this,a.Xa.clone()),this.Pa=a.Pa):a&&(b=String(a).match(Nd))?(za(this,b[1]||"",!0),this.cb=Ca(b[2]||""),this.ra=Ca(b[3]||"",!0),Aa(this,b[4]),this.ja=Ca(b[5]||"",!0),Ba(this,b[6]||"",!0),this.Pa=Ca(b[7]||"")):this.Xa=new Fa(null)}function za(a,b,c){a.Ja=c?Ca(b,!0):b,a.Ja&&(a.Ja=a.Ja.replace(/:$/,""))}function Aa(a,b){if(b){if(b=Number(b),isNaN(b)||0>b)throw Error("Bad port number "+b);a.ub=b}else a.ub=null}function Ba(a,b,c){b instanceof Fa?a.Xa=b:(c||(b=Da(b,Rd)),a.Xa=new Fa(b))}function Ca(a,b){return a?b?decodeURI(a):decodeURIComponent(a):""}function Da(a,b,c){return"string"==typeof a?(a=encodeURI(a).replace(b,Ea),c&&(a=a.replace(/%25([0-9a-fA-F]{2})/g,"%$1")),a):null}function Ea(a){return a=a.charCodeAt(0),"%"+(a>>4&15).toString(16)+(15&a).toString(16)}function Fa(a){this.Ea=a||null}function Ga(){this.id=this.url=null,this.type="static",this.ua=this.Yc=this.e=null,this.F=5,this.bb=this.pa=this.Mc=null,this.Vc=1,this.v=[]}function Ha(){this.p=this.A=this.H=this.e=this.duration=this.start=this.id=null,this.Aa=[]}function Ia(){this.aa=this.n=this.height=this.width=this.contentType=this.lang=this.id=null,this.Kb=!1,this.p=this.A=this.H=this.e=null,this.qa=[],this.Z=[]}function Ja(){this.value=null}function Ka(){this.contentType=this.lang=this.id=null}function La(){this.p=this.A=this.H=this.e=this.aa=this.n=this.height=this.width=this.bandwidth=this.lang=this.id=null,this.qa=[]}function Ma(){this.value=this.schemeIdUri=null,this.children=[],this.pssh=null}function Na(){this.parsedPssh=this.psshBox=null}function Oa(){this.url=null}function Pa(){this.url=null}function Qa(){this.e=null,this.o=1,this.M=this.Za=this.Ga=this.w=null}function Ra(){this.Y=this.url=null}function Sa(){this.Y=this.url=null}function Ta(){this.e=null,this.o=1,this.q=this.w=null,this.fa=1,this.M=null,this.Ka=[]}function Ua(){this.ob=this.Ob=null}function Va(){this.o=1,this.q=this.w=null,this.fa=1,this.xa=this.ib=this.Ha=this.pb=null}function Wa(){this.uc=[]}function Xa(){this.repeat=this.duration=this.startTime=null}function Ya(a,b){this.Bb=a,this.end=b}function Za(a,b){var c=b?new ya(b):null;return a?c?a.resolve(c):a:c}function $a(a,b,c){var d=db(c);return(b=ab(b,c.constructor.TAG_NAME))&&d.parse(a,b),d}function _a(a,b,c){var d=null;return(b=ab(b,c.TAG_NAME))&&(d=new c,d.parse(a,b)),d}function ab(a,b){for(var c=null,d=0;d<a.childNodes.length;d++)if(a.childNodes[d].tagName==b){if(c)return null;c=a.childNodes[d]}return c}function bb(a,b,c){for(var d=[],e=0;e<b.childNodes.length;e++)if(b.childNodes[e].tagName==c.TAG_NAME){var f=new c;f.parse.call(f,a,b.childNodes[e]),d.push(f)}return d}function cb(a){return a=a.firstChild,a.nodeType!=Node.TEXT_NODE?null:a.nodeValue}function db(a){return a?a.clone():null}function eb(a,b,c,d){return a=c(a.getAttribute(b)),null!=a?a:void 0!==d?d:null}function fb(a){return a?(a=Date.parse(a),isNaN(a)?null:Math.floor(a/1e3)):null}function gb(b){if(!b)return null;var c=/^P(?:([0-9]*)Y)?(?:([0-9]*)M)?(?:([0-9]*)D)?(?:T(?:([0-9]*)H)?(?:([0-9]*)M)?(?:([0-9.]*)S)?)?$/.exec(b);if(!c)return null;b=0;var d=jb(c[1]);return d&&(b+=31536e3*d),(d=jb(c[2]))&&(b+=2592e3*d),(d=jb(c[3]))&&(b+=86400*d),(d=jb(c[4]))&&(b+=3600*d),(d=jb(c[5]))&&(b+=60*d),c=a.parseFloat(c[6]),(c=isNaN(c)?null:c)&&(b+=c),b}function hb(a){var b=/([0-9]+)-([0-9]+)/.exec(a);return b?(a=jb(b[1]),null==a?null:(b=jb(b[2]),null==b?null:new Ya(a,b))):null}function ib(b){return b=a.parseInt(b,10),b>0?b:null}function jb(b){return b=a.parseInt(b,10),b>=0?b:null}function kb(a){return a}function lb(a,b,c){for(var d=a.p,e=[],f=0;c>f;++f){var g=f+b,h=(g-1)*d.q,i=h/d.o,h=(h+d.q)/d.o,g=mb(a,g-1+d.fa,(g-1)*d.q);if(!g)return null;e.push(new ob(i,h,0,null,new ya(g)))}return e}function mb(a,b,c){if(!a.p)return null;var d=a.p.pb;return d?(b=nb(d,a.id,b,a.bandwidth,c))?a.e?a.e.resolve(b):b:null:a.e?new ya(a.e):null}function nb(b,c,d,e,f){var g={RepresentationID:c,Number:d,Bandwidth:e,Time:f};b=b.replace(/\$(RepresentationID|Number|Bandwidth|Time)?(?:%0([0-9]+)d)?\$/g,function(b,c,d){if("$$"==b)return"$";var e=g[c];return null==e?b:("RepresentationID"==c&&d&&(d=void 0),b=e.toString(),d=a.parseInt(d,10)||1,d=Math.max(0,d-b.length),Array(d+1).join("0")+b)});try{return new ya(b)}catch(h){if(h instanceof URIError)return null;throw h}}function ob(a,b,c,d,e){this.startTime=a,this.endTime=b,this.ab=c,this.Fb=d,this.url=e}function pb(a,b,c){return new ob(b,c,a.ab,a.Fb,a.url)}function qb(a,b){for(var c=a.length-1;c>=0;--c){var d=a[c];if(b>=d.startTime&&(null==d.endTime||b<d.endTime))return c}return-1}function rb(a,b){return a.map(function(a){return pb(a,a.startTime+b,null!=a.endTime?a.endTime+b:null)})}function sb(){}function tb(a){this.g=a,this.na=0}function ub(a){if(0==a.g.length)throw new RangeError("SegmentIndex: There is no last SegmentReference.");return a.g[a.g.length-1]}function vb(a,b){if(a.na!=b.na&&(b=new tb(rb(b.g,a.na-b.na))),0==a.length())a.g=b.g.slice(0);else if(0!=b.length()&&null!=ub(a).endTime&&!(null!=ub(b).endTime&&ub(b).endTime<ub(a).endTime)){if(ub(a).endTime<=b.first().startTime)var c=pb(ub(a),ub(a).startTime,b.first().startTime),c=a.g.slice(0,-1).concat([c]);else{var d;for(d=0;d<a.g.length&&!(a.g[d].endTime>=b.first().startTime);++d);a.g[d].startTime<b.first().startTime?(c=pb(a.g[d],a.g[d].startTime,b.first().startTime),c=a.g.slice(0,d).concat([c])):(a.first().startTime>b.first().startTime||b.first(),c=a.g.slice(0,d))}a.g=c.concat(b.g)}}function wb(a,b,c,d){tb.call(this,a),this.Ua=b,this.Ad=c,this.Lb=d,this.ka=this.tb=this.R=null,xb(this)}function xb(a){if(0!=a.length()){a.length();var b=null!=ub(a).endTime?ub(a).endTime:ub(a).startTime;if(a.Ua.pa>a.Lb)a.R=b;else{var c=a.Lb-(a.Ua.pa+a.Ad.start);0>c?a.R=b:c<Math.max(ub(a).startTime,ub(a).endTime||0)?(ub(a),a.R=b):a.R=c}a.tb=ub(a).startTime,a.ka=a.first().startTime}}function yb(a,b){if(zb(a,b),null==a.R||null==a.tb||null==a.ka)return{start:0,end:0};var c=b-a.Lb,d=a.R+c;return null!=a.Ua.bb&&(d=d-a.ka-a.Ua.bb,d>0&&(a.ka+=d)),c=a.tb+c,c=0<a.length()?null!=ub(a).endTime?Math.min(c,ub(a).endTime):c:a.ka,c=Math.max(c,a.ka),{start:a.ka,end:c}}function zb(a,b){if(null!=a.Ua.bb)if(null==a.R)a.length();else{for(var c=a.R+(b-a.Lb),d=0,e=0;e<a.g.length;++e){var f=null;if(e<a.g.length-1?f=a.g[e+1].endTime:(f=a.g[e],f=null!=f.endTime?f.endTime+(f.endTime-f.startTime):null),!(null!=f&&f<c-a.Ua.bb))break;++d}d>0&&a.g.splice(0,d)}}function Ab(a,b,c,d){var e,f=1,g=0;if(a.pa>d)e=null;else{var h=a.Vc||0,i=a.bb||0;e=c.p,e=e.q/e.o;var j=d-(a.pa+b.start);0>j?e=null:(i=j-2*e-i,0>i&&(i=0),i=Math.ceil(i/e)*e,j-=e,0>j?e=null:(h=Math.floor(j/e)*e-h,0>h&&(h=0),h=Math.floor(h/e)*e,e={fd:i/e+1,current:(h>=i?h:i)/e+1}))}if(e&&(f=e.fd,g=e.current-e.fd+1),f=lb(c,f,g),null==f)throw a=Error("Failed to generate SegmentReferences."),a.type="stream",a;wb.call(this,f,a,b,d),this.G=c,this.sb=this.V=0<this.length()?a.pa+b.start+ub(this).endTime:null,this.rb=e?e.current+1:null}function Bb(a,b){if(null!=a.V&&null!=a.sb&&null!=a.rb){var c=a.G.p,c=c.q/c.o,d=Math.floor((a.sb+(b-a.Lb)-a.V)/c);if(0!=d){var e=lb(a.G,a.rb,d);Array.prototype.push.apply(a.g,rb(e,a.na)),a.V+=d*c,a.rb+=d}}}function Cb(a,b,c,d){this.Q=a,this.X=b,this.G=c,this.Mb=d,this.b=null}function Db(a,b,c){this.Q=a,this.X=b,this.G=c,this.b=null}function Eb(a,b,c,d){this.Q=a,this.X=b,this.G=c,this.Mb=d,this.b=null}function Fb(a){this.Ue=a,this.b=null}function Gb(){}function Hb(a){this.url=a,this.h=new Ib,this.Sb=this.Hd=this.Ac=0,this.m=null,this.B=new X,this.L=null}function Ib(){this.body=null,this.Nb=1,this.cd=1e3,this.Ie=2,this.Je=.5,this.kc=0,this.method="GET",this.responseType="arraybuffer",this.jc={},this.Id=this.withCredentials=!1}function Jb(a){Kb(a),a.h.body=null,a.B=null,a.L=null}function Kb(a){a.m&&(a.m.onload=null,a.m.onreadystatechange=null,a.m.onerror=null,a.m.ontimeout=null),a.m=null}function Lb(a){var b=a.url.split("/"),c=parseInt(b[2],10),d=parseInt(b[3],10),e=new fd(null,null);return gd(e).then(function(){return nd(e,c,d)}).then(_(a,function(a){var b=JSON.parse(JSON.stringify(new XMLHttpRequest));return b.response=a,a=this.B,a.resolve(b),hd(e),Jb(this),a}))["catch"](_(a,function(a){return hd(e),Jb(this),Promise.reject(a)}))}function Mb(a,b,c){return b=Error(b),b.type=c,b.status=a.m.status,b.url=a.url,b.method=a.h.method,b.body=a.h.body,b.Ze=a.m,b}function Nb(b){Kb(b),a.setTimeout(b.pc.bind(b),b.Sb*(1+(2*Math.random()-1)*b.h.Je)),b.Sb*=b.h.Ie}function Ob(a){Hb.call(this,a),this.h.responseType="text",this.h.Nb=3,this.h.kc=Td,this.h.Id=!0}function Pb(a,b,c,d){Hb.call(this,a),this.h.body=b,this.h.method="POST",this.h.Nb=3,this.h.withCredentials=c,this.h.kc=Ud,a=d||{};for(var e in a)this.h.jc[e]=a[e]}function Qb(a,b,c,d,e){Hb.call(this,a),(b||c)&&(this.h.jc.Range="bytes="+(b+"-"+(null!=c?c:""))),d&&(this.h.Nb=d),e&&(this.h.cd=e),this.h.kc=Vd}function Rb(a,b,c){this.url=a,this.ab=b,this.Fb=c,this.vb=this.$a=null}function Sb(a){a.$a&&(a.$a.abort(),a.$a=null,a.vb=null)}function Tb(a,b,c,d,e,f){this.Q=a,this.X=b,this.cc=c,this.hb=d,this.gc=e,this.Mb=f,this.b=this.B=null}function Ub(a){this.Pb=a}function Vb(a,b,c){this.sd=a,this.la=b,this.P=c,this.l=new Q,this.ca=[],this.ya=0,this.Va=this.C=null,R(this.l,this.la,"updateend",this.te.bind(this))}function Wb(a,b){for(var c=a.la.buffered,d=0;d<c.length;++d){var e=c.start(d)-Wd,f=c.end(d)+Wd;if(b>=e&&f>=b)return c.end(d)-b}return 0}function Xb(a){return a.C.start(),a.C.tc.then(_(a,function(){this.C=null}))["catch"](_(a,function(a){return this.C=null,Promise.reject(a)}))}function Yb(a,b){try{a.la.appendBuffer(b)}catch(c){return Promise.reject(c)}return a.Va=new X,a.Va}function Zb(){this.K=Xd++,this.id=this.I=this.N=null,this.timestampOffset=0,this.height=this.width=this.bandwidth=null,this.aa=this.n="",this.enabled=!0}function $b(a){var b=a.n||"";return a.aa&&(b+='; codecs="'+a.aa+'"'),b}function _b(){this.K=Yd++,this.id=null,this.contentType=this.lang="",this.Kb=!1,this.f=[],this.Na=[]}function ac(){this.id=null,this.start=0,this.duration=null,this.u=[]}function bc(){this.ia=!1,this.Zb=this.zb=null,this.F=0,this.t=[]}function cc(){this.id=0,this.T=null,this.Fa=this.contentType=""}function dc(a){this.kb=a}function ec(a){if(a.v.length){null==a.v[0].start&&(a.v[0].start=0);var b=function(a){return 0==a||!!a};"dynamic"==a.type&&(a.ua=null),b(a.ua)&&1==a.v.length&&!b(a.v[0].duration)&&(a.v[0].duration=a.ua);for(var c=0,d=!0,e=0;e<a.v.length;++e){var f=a.v[e-1],g=a.v[e],h=a.v[e+1]||{start:a.ua};!b(g.start)&&f&&b(f.start)&&b(f.duration)&&(g.start=f.start+f.duration),!b(g.duration)&&b(h.start)&&(g.duration=h.start-g.start),null!=g.start&&null!=g.duration?c+=g.duration:d=!1}b(a.ua)||(e=a.v[a.v.length-1],d?a.ua=c:b(e.start)&&b(e.duration)?a.ua=e.start+e.duration:"dynamic"!=a.type&&(a.ua=c))}}function fc(a,b,c){return(b=nb(b,a.id,null,a.bandwidth,null))?(c=new c,c.url=a.e&&b?a.e.resolve(b):b,c):null}function gc(a){var b=new ya(a.url),c=0,d=null;return a.Y&&(c=a.Y.Bb,d=a.Y.end),new Rb(b,c,d)}function hc(a,b,c){U.call(this,a),this.a=b,this.c=c,this.D=this.nb=null,this.l=new Q,this.Qb={},this.xb=[],this.wd=0,this.eb=new X,this.$c=null}function ic(a){for(var b=new O,c=a.c.gd(),d=0;d<c.length;++d){var e=c[d];e.T.keySystem||e.Fa&&!Ac(e.Fa)||b.push(e.contentType,e)}for(var d={},e=!1,f=0;f<c.length;++f){var g=c[f];if(g.T.keySystem&&!b.has(g.contentType)){var h=g.T.keySystem,i=d[h];if(i||(i=g.T,i={audioCapabilities:void 0,videoCapabilities:void 0,initDataTypes:void 0,distinctiveIdentifier:i.Qd?"required":"optional",persistentState:i.Fe||a.c.Ib()?"required":"optional"},d[h]=i),g.Fa&&(h=g.contentType+"Capabilities",h in i)){e=!0,i[h]||(i[h]=[]);var j;"audio"==g.contentType?j=g.T.Md:"video"==g.contentType&&(j=g.T.Ye),i[h].push({contentType:g.Fa,robustness:j})}}}if(!e){if(!c.length)throw a=Error("No DRM scheme info provided!"),a.type="drm",a;a.D=c[0].T}return 0==Object.keys(d).length?(a.c.Tc(b),a.eb.resolve(),Promise.resolve()):(j=new X,d=kc(a,d,j),d=d.then(a.Nd.bind(a,c,b)),d=d.then(a.Se.bind(a)),j.reject(null),d)}function jc(b,c){return null==b.$c&&(b.$c=a.setTimeout(function(){var a=Error("Timeout waiting for sessions.");a.type="storage",this.eb.reject(a)}.bind(b),c)),b.eb}function kc(a,b,c){for(var d in b){var e=b[d];c=c["catch"](function(){return navigator.requestMediaKeySystemAccess(d,[e])})}return a.c.Ib()&&(c=c["catch"](function(){throw Error("Either none of the requested key systems are supported or none of the requested key systems support persistent state.")})),c}function lc(a){for(var b=a.c.fc(),c=0;c<b.length;++c){var d=mc(a),e=d.load(b[c]);a.xb.push(d),e["catch"](_(a,function(a){a=K(a),this.dispatchEvent(a)}))}}function mc(a){var b=null;if(a.c.Ib())try{b=a.nb.createSession("persistent-license")}catch(c){throw Error("Persistent licenses are not supported by this key system or platform.")}else b=a.nb.createSession();return R(a.l,b,"message",a.re.bind(a)),R(a.l,b,"keystatuseschange",a.ke.bind(a)),b}function nc(a,b,c,d){d=new ha(d),c.qd&&c.qd(d),new Pb(c.ce,d.body,c.withCredentials,d.headers).send().then(_(a,function(a){return c.pd&&(a=c.pd(a)),b.update(a)})).then(_(a,function(){var a=J({type:"sessionReady",detail:b});this.dispatchEvent(a),this.wd++,this.wd>=this.xb.length&&this.eb.resolve()}))["catch"](_(a,function(a){a.af=b,a=K(a),this.dispatchEvent(a)}))}function oc(a,b,c,d,e){U.call(this,a),this.a=b,this.$=new Vb(c,d,e),this.P=e,this.Ic=this.b=this.O=null,this.td=0,this.Wc=!1,this.ha=null,this.Rb=!1,this.ya=null,this.Nc=this.ec=this.ga=!1}function pc(a,b){a.O&&!a.Rb&&(a.Rb=!0,a.bc(),a.$.abort().then(_(a,function(){var a=this.a.currentTime;return!b&&0<Wb(this.$,a)&&0<=qb(this.$.ca,a)?Promise.resolve():(this.Nc=!0,this.$.clear())})).then(_(a,function(){this.Rb=!1,this.ea(0)}))["catch"](_(a,function(a){this.Rb=!1,a=K(a),this.dispatchEvent(a)})))}function qc(a,b,c){a=a.$;var d=a.ca.length;return a=d>0?a.ca[d-1]:null,null!=a?null!=a.endTime?c.find(a.endTime):null:c.find(b)}function rc(a){if(!a.ga&&null!=a.ya){a.ga=!0,a.$.Ba(a.ya);var b=J({type:"started",bubbles:!1,na:a.ya});a.dispatchEvent(b)}}function sc(a,b){var c=b.n.split("/")[0],c=J({type:"adaptation",bubbles:!0,contentType:c,size:"video"!=c?null:{width:b.width,height:b.height},bandwidth:b.bandwidth});a.dispatchEvent(c)}function tc(a){var b=J({type:"ended"});a.dispatchEvent(b)}function uc(a,b){U.call(this,a),this.a=b,this.Eb=!0,this.oa=this.b=this.O=null}function vc(){this.streamStats=null,this.droppedFrames=this.decodedFrames=NaN,this.bufferingTime=this.playTime=this.estimatedBandwidth=0,this.playbackLatency=NaN,this.bufferingHistory=[],this.bandwidthHistory=[],this.streamHistory=[]}function wc(a,b){var c=new xc(b);a.streamHistory.push(new yc(c)),(c.videoHeight||!a.streamStats)&&(a.streamStats=c)}function xc(a){this.videoWidth=a.width,this.videoHeight=a.height,this.videoMimeType=a.n,this.videoBandwidth=a.bandwidth}function yc(a){this.timestamp=v()/1e3,this.value=a}function zc(a){U.call(this,null),this.a=a,this.c=null,this.l=new Q,this.Oa=null,this.ta="en",this.xc=this.Tb=null,this.gb=!1,this.J=new vc,this.zc=!0,this.ic=1,this.lc=new ia}function Ac(b){return"text/vtt"==b?!!a.VTTCue:MediaSource.isTypeSupported(b)}function Bc(b){b.Tb&&(a.clearTimeout(b.Tb),b.Tb=null)}function Cc(b){Dc(b),b.xc=a.setTimeout(b.Ae.bind(b),100)}function Dc(b){b.xc&&(a.clearTimeout(b.xc),b.xc=null)}function Ec(a){f("buffering");var b=a.J;b.bufferingTime+=g("buffering")/1e3,a.gb=!1,a.dispatchEvent(J({type:"bufferingEnd"}))}function Fc(){}function Gc(a,b){var c=a.bandwidth||Number.MAX_VALUE,d=b.bandwidth||Number.MAX_VALUE;return d>c?-1:c>d?1:0}function Hc(a){this.va=a}function Ic(a){function b(a,b){return a.concat(b)}var c=a.t.map(function(a){return a.u}).reduce(b,[]).map(function(a){return a.f}).reduce(b,[]);return a=c.map(function(a){return a.N.create()}),Promise.all(a).then(function(a){for(var b={},d=0;d<c.length;++d)b[c[d].K]=a[d];return Promise.resolve(b)})}function Jc(a,b,c,d,e){var f=new O;a.t.forEach(function(a,b){f.push(a.id||""+b,a)});var g=new O;for(b.t.forEach(function(a,b){g.push(a.id||""+b,a)}),a=f.keys(),b=0;b<a.length;++b){var h=a[b],i=f.get(h);1<i.length||(h=g.get(h))&&0!=h.length&&1==h.length&&(Kc(i[0],h[0],c,d,e),i[0].duration=h[0].duration)}}function Kc(a,b,c,d,e){var f=new O;a.u.forEach(function(a,b){f.push(a.id||""+b,a)});var g=new O; | for(b.u.forEach(function(a,b){g.push(a.id||""+b,a)}),a=f.keys(),b=0;b<a.length;++b){var h=a[b],i=f.get(h);1<i.length||(h=g.get(h))&&0!=h.length&&1==h.length&&Lc(i[0],h[0],c,d,e)}}function Lc(a,b,c,d,e){var f=new O;a.f.forEach(function(a,b){f.push(a.id||""+b,a)});var g=new O;b.f.forEach(function(a,b){g.push(a.id||""+b,a)}),b={};for(var h=f.keys(),i=0;i<h.length;++i){var j=h[i];b[j]=j;var k=f.get(j);1<k.length||((j=g.get(j))&&0!=j.length?1==j.length&&(Mc(k[0],j[0],c,d),k[0].I=j[0].I,j[0].I=null,k[0].timestampOffset=j[0].timestampOffset):(e.push(k[0]),a.f.splice(a.f.indexOf(k[0]),1)))}for(h=g.keys(),i=0;i<h.length;++i)j=h[i],b[j]||(b[j]=j,j=g.get(j),a.f.push(j[0]))}function Mc(a,b,c,d){a=c[a.K],b=d[b.K],a.length(),a.Jc(b)&&a.length()}function Nc(){this.c=this.P=null,this.l=new Q,this.qb=Number.POSITIVE_INFINITY,this.Eb=!0}function Oc(a){var b=a.c.getVideoTracks();if(0==b.length)return null;b.sort(k);var c;a:{c=a.c.getAudioTracks();for(var d=0;d<c.length;++d)if(c[d].active){c=c[d];break a}c=null}c=c?c.bandwidth:0,a=a.P.getBandwidth();for(var d=b[0],e=0;e<b.length;++e){var f=b[e],g=e+1<b.length?b[e+1]:{bandwidth:Number.POSITIVE_INFINITY};if(f.bandwidth&&(g=(g.bandwidth+c)/.85,a>=(f.bandwidth+c)/.95&&g>=a&&(d=f,d.active)))break}return d}function Pc(a,b,c){U.call(this,null),this.de=a,this.Jd=b,this.D=c?c:ja(),this.ma=null}function Qc(a,b,c){U.call(this,null),this.d=a,this.L=b,this.sa=new Q,this.W=new MediaSource,this.video=null,this.r=new O,this.Ab=c,this.lb=!1,this.ta="",this.sc=!1,this.J=null,this.$b=new X,this.Ra=this.ac=null,this.zd=1,this.s={},this.ud=Number.POSITIVE_INFINITY,this.mb=Number.NEGATIVE_INFINITY,this.Jb=0,this.dd=!1,this.Da={},this.Ub=this.ha=null}function Rc(a,b){var c=b.n.split("/")[0],d=a.s[c];if(d&&d.O==b){var e=a.r.get(b.n.split("/")[0]),f=e.map(function(a){return a.f}).reduce(function(a,b){return a.concat(b)},[]).filter(function(a){return a.enabled});if(0==f.length)return void e.push(b);a.Da[c].Uc==b&&delete a.Da[c],d.Xb(f[0],a.d.F,!0),b.destroy()}b.destroy()}function Sc(a,b,c,d){if(!a.r.has(b)||!a.s[b])return!1;for(var e=a.r.get(b),f=0;f<e.length;++f)for(var g=e[f],h=0;h<g.f.length;++h){var i=g.f[h];if(i.K==c)return"text"==b||a.dd?(wc(a.J,i),a.s[b].Xb(i,a.d.F,d),!0):(c=a.Da[b],a.Da[b]={Uc:i,ed:null!=c&&c.ed||d},!0)}return!1}function Tc(a,b){for(var c=0;2>=c;++c)for(var d=0;d<b.length;++d){var e=b[d];if(L(c,a.ta,e.lang))return b.splice(d,1),void b.splice(0,0,e)}for(d=0;d<b.length;++d)if(e=b[d],e.Kb){b.splice(d,1),b.splice(0,0,e);break}}function Uc(a){for(var b=[],c=["audio","video","text"],d=0;d<c.length;++d){var e=c[d];a.r.has(e)&&b.push(a.r.get(e)[0])}a.Ab.start(a.L,a);var f=Vc(a,b),b=N(f).map(function(a){return a.N.create()});return Promise.all(b).then(_(a,function(a){return a.every(function(a){return a.length()})&&(a=cd(this,a))?Wc(this,f)?(Yc(this,f,a),Promise.resolve()):(a=Error("Failed to create Stream objects."),a.type="stream",Promise.reject(a)):(a=Error("Some streams are not available."),a.type="stream",Promise.reject(a))}))["catch"](_(a,function(a){return"aborted"!=a.type?(Object.keys(this.s),this.d.ia?(this.ea(0),Promise.resolve()):Promise.reject(a)):void 0}))}function Vc(a,b){for(var c={},d=0;d<b.length;++d){var e=b[d],f=e.f[0];if("video"==e.contentType)for(var g=a.Ab.getInitialVideoTrackId(),h=0;h<e.f.length&&(f=e.f[h],f.K!=g);++h);else"audio"==e.contentType&&(f=e.f[Math.floor(e.f.length/2)]);c[e.contentType]=f}return c}function Wc(a,b){var c,d={};for(c in b){var e=b[c],e="text"==c?new uc(a,a.video):Xc(a,e);if(!e)return N(d).forEach(function(a){a.destroy()}),!1;d[c]=e}return a.s=d,!0}function Xc(a,b){var c;try{c=a.W.addSourceBuffer($b(b))}catch(d){return null}try{c.timestampOffset=b.timestampOffset}catch(e){return null}return new oc(a,a.video,a.W,c,a.L)}function Yc(a,b,c){a.zd=a.video.playbackRate,a.video.playbackRate=0;var d;a.d.ia?(a.W.duration=c.end+2592e3,d=c.end):(a.W.duration=c.end-c.start,d=c.start),R(a.sa,a.video,"seeking",a.qe.bind(a)),a.video.currentTime!=d&&(a.video.currentTime=d,a.Ra=d),_c(a,c.start,c.end);for(var e in a.s)c=a.s[e],R(a.sa,c,"started",a.ve.bind(a)),R(a.sa,c,"ended",a.ue.bind(a)),d=b[e],wc(a.J,d),c.Xb(d,a.d.F,!1);a.Db(a.sc)}function Zc(a){for(var b=bd(a),c=0;c<b.length;++c)b[c].Ba(a.mb);$c(a,b),b=P(a.r).map(function(a){return a.f}).reduce(function(a,b){return a.concat(b)},[]).map(function(a){var b=[a.N.create()];return a.I&&b.push(a.I.create()),Promise.all(b)}),Promise.all(b).then(_(a,function(a){for(var b=0;b<a.length;++b)a[b][0].Ba(this.mb);this.dd=!0;for(var c in this.Da)a=this.Da[c],b=this.s[c],wc(this.J,a.Uc),b.Xb(a.Uc,this.d.F,a.ed);this.Da={}}))["catch"](_(a,function(a){"aborted"!=a.type&&(a=K(a),this.dispatchEvent(a))}))}function $c(a,b){var c=cd(a,b);c&&_c(a,c.start,c.end);var d;0!=a.mb?(d=a.video.currentTime+a.mb,a.video.currentTime=d,a.Ra=d):d=a.video.currentTime,a.d.ia&&c&&(a.Jb=c.end-d,a.Jb=Math.max(a.Jb,0)),a.video.playbackRate=a.zd,null!=a.d.zb&&a.ea(a.Jb),dd(a)}function _c(a,b,c){b=J({type:"seekrangechanged",bubbles:!0,start:b,end:c}),a.dispatchEvent(b)}function ad(a,b,c,d){return b>=c-.01?!1:(a.video.currentTime=Math.min(c+(a.d.ia?a.d.F:0),d),!0)}function bd(a){return N(a.s).map(function(a){return a.b}).filter(function(a){return null!=a})}function cd(a,b){for(var c=0,d=Number.POSITIVE_INFINITY,e=0;e<b.length;++e){var f=b[e].Hc(),c=Math.max(c,f.start);null!=f.end&&(d=Math.min(d,f.end))}if(d==Number.POSITIVE_INFINITY){if(d=a.d.t[0],!d.duration)return null;d=(d.start||0)+d.duration}return a.d.ia&&(d=Math.max(d-(a.d.F+a.Jb),c)),c>d?null:{start:c,end:d}}function dd(b){b.d.ia&&(b.Ub=a.setTimeout(b.ye.bind(b),1e3))}function ed(a,b,c,d){c||(c=new W),d||(d=new Nc),Qc.call(this,null,c,d),this.he=a,this.kb=b}function fd(a,b){U.call(this,b),this.Ca=null,this.P=a}function gd(b){if(!a.indexedDB)return Promise.reject(Error("Offline storage requires IndexedDB support."));var c=new X,d=a.indexedDB.open("content_database",1);return d.onupgradeneeded=_(b,function(a){this.Ca=a.target.result,id(this,"group_store",{keyPath:"group_id"}),id(this,"stream_index_store",{keyPath:"stream_id"}),a=id(this,"content_store",{autoIncrement:"true"}),a.createIndex("segment",["stream_id","segment_id"],{unique:!0}),a.createIndex("stream","stream_id",{unique:!1})}),d.onsuccess=_(b,function(a){this.Ca=a.target.result,c.resolve()}),d.onerror=function(){c.reject(d.error)},c}function hd(a){a.Ca&&a.Ca.close()}function id(a,b,c){return a.Ca.objectStoreNames.contains(b)&&a.Ca.deleteObjectStore(b),a.Ca.createObjectStore(b,c)}function jd(a,b,c,d,e){for(var f=[],g=[],h=0,i=0,j=[],k=b.map(function(a){return a.N.create()}),k=Promise.all(k),l=b.map(function(a){return a.I.create()}),l=Promise.all(l),k=Promise.all([k,l]).then(function(a){f=a[0],g=a[1],h=f.reduce(function(a,b){return a+b.length()},0)}),l=0;l<b.length;++l)k=k.then(function(a){return kd(this,b[a],f[a],g[a],h,i)}.bind(a,l)),k=k.then(function(a,b){i+=f[a].length(),j.push(b)}.bind(a,l));return k.then(_(a,function(){return ld(vd(this))})).then(_(a,function(a){var b=new X,f={group_id:a,stream_ids:j,session_ids:c,duration:d,key_system:e.keySystem},g=vd(this).put(f);return g.onsuccess=function(){b.resolve(a)},g.onerror=function(){b.reject(g.error)},b}))}function kd(a,b,c,d,e,f){var g=[ld(ud(a)),ld(td(a).index("stream"))],g=Promise.all(g).then(_(a,function(a){return{Wb:Math.max(a[0],a[1]),wb:new ArrayBuffer(0),Sc:0,g:[],Ec:null,We:e,Bd:f}})),g=g.then(a.$d.bind(a,c));return g=g.then(a.ae.bind(a,b,d))}function ld(a){var b=new X,c=a.openCursor(null,"prev");return c.onsuccess=function(a){a.target.result?b.resolve(a.target.result.key+1):b.resolve(0)},c.onerror=function(){b.reject(c.error)},b}function md(a,b){var c=new Uint8Array(a.byteLength+b.byteLength);return c.set(new Uint8Array(a),0),c.set(new Uint8Array(b),a.byteLength),c.buffer}function nd(a,b,c){return rd(td(a).index("segment"),[b,c]).then(function(a){return Promise.resolve(a.content)})}function od(a,b){return rd(ud(a),b)}function pd(a,b){return rd(vd(a),b).then(function(a){return a.hasOwnProperty("duration")||a.hasOwnProperty("key_system")?Promise.resolve(a):od(this,a.bf[0]).then(function(b){return a.duration=b.duration,a.be=b.be,Promise.resolve(a)})}.bind(a))}function qd(a){var b=new X,c=[],d=vd(a).openCursor();return d.onerror=function(){b.reject(d.error)},d.onsuccess=function(a){(a=a.target.result)?(c.push(a.key),a["continue"]()):b.resolve(c)},b}function rd(a,b){var c=new X,d=a.get(b);return d.onerror=function(){c.reject(d.error)},d.onsuccess=function(){if(d.result)c.resolve(d.result);else{var a=Error("Item not found.");a.type="storage",c.reject(a)}},c}function sd(a,b){var c=new X,d=ud(a)["delete"](b);d.onerror=function(){c.reject(d.error)};var e=td(a);return e.index("stream").openKeyCursor(IDBKeyRange.only(b)).onsuccess=function(a){(a=a.target.result)&&(e["delete"](a.primaryKey),a["continue"]())},e.transaction.oncomplete=function(){c.resolve()},c}function td(a){return wd(a,"content_store")}function ud(a){return wd(a,"stream_index_store")}function vd(a){return wd(a,"group_store")}function wd(a,b){return a.Ca.transaction([b],"readwrite").objectStore(b)}function xd(a){this.mc=a,this.b=null}function yd(a,b,c){b||(b=new W),c||(c=new Nc),Qc.call(this,null,b,c),this.jd=a,this.qc=[],this.timeoutMs=3e4}function zd(a,b,c,d){var e=new fd(a.L,a),f=gd(e);return f=f.then(_(a,function(){return jd(e,b,this.qc,d,c)})).then(function(a){return hd(e),Promise.resolve(a)})["catch"](function(a){return hd(e),Promise.reject(a)})}var Ad,Bd=this;d("error"),d("warn"),d("info"),d("log"),d("debug");var Cd=a.performance&&a.performance.now?a.performance.now.bind(a.performance):Date.now,Dd={};b("shaka.player.AudioTrack.compare",function(a,b){return a.lang<b.lang?-1:a.lang>b.lang?1:a.bandwidth<b.bandwidth?-1:a.bandwidth>b.bandwidth?1:0}),b("shaka.player.TextTrack.compare",function(a,b){return a.lang<b.lang?-1:a.lang>b.lang?1:0}),b("shaka.player.VideoTrack.compare",k),b("shaka.polyfill.CustomEvent.install",l),b("shaka.polyfill.Fullscreen.install",n),r.prototype.createSession=function(){},r.prototype.setServerCertificate=function(){},s.prototype.getConfiguration=function(){},s.prototype.createMediaKeys=function(){},b("shaka.polyfill.VideoPlaybackQuality.install",t);var Ed=0,Fd=1;w.prototype.Qa=function(){return this.i<this.j.byteLength};var Gd;C.prototype.Qa=function(){return this.Ya.Qa()},H.prototype.sample=function(a,b){var c=Math.pow(this.ad,a);this.Cc=b*(1-c)+c*this.Cc,this.vc+=a};var Hd=1,Id=2,Jd={aar:"aa",abk:"ab",afr:"af",aka:"ak",alb:"sq",amh:"am",ara:"ar",arg:"an",arm:"hy",asm:"as",ava:"av",ave:"ae",aym:"ay",aze:"az",bak:"ba",bam:"bm",baq:"eu",bel:"be",ben:"bn",bih:"bh",bis:"bi",bod:"bo",bos:"bs",bre:"br",bul:"bg",bur:"my",cat:"ca",ces:"cs",cha:"ch",che:"ce",chi:"zh",chu:"cu",chv:"cv",cor:"kw",cos:"co",cre:"cr",cym:"cy",cze:"cs",dan:"da",deu:"de",div:"dv",dut:"nl",dzo:"dz",ell:"el",eng:"en",epo:"eo",est:"et",eus:"eu",ewe:"ee",fao:"fo",fas:"fa",fij:"fj",fin:"fi",fra:"fr",fre:"fr",fry:"fy",ful:"ff",geo:"ka",ger:"de",gla:"gd",gle:"ga",glg:"gl",glv:"gv",gre:"el",grn:"gn",guj:"gu",hat:"ht",hau:"ha",heb:"he",her:"hz",hin:"hi",hmo:"ho",hrv:"hr",hun:"hu",hye:"hy",ibo:"ig",ice:"is",ido:"io",iii:"ii",iku:"iu",ile:"ie",ina:"ia",ind:"id",ipk:"ik",isl:"is",ita:"it",jav:"jv",jpn:"ja",kal:"kl",kan:"kn",kas:"ks",kat:"ka",kau:"kr",kaz:"kk",khm:"km",kik:"ki",kin:"rw",kir:"ky",kom:"kv",kon:"kg",kor:"ko",kua:"kj",kur:"ku",lao:"lo",lat:"la",lav:"lv",lim:"li",lin:"ln",lit:"lt",ltz:"lb",lub:"lu",lug:"lg",mac:"mk",mah:"mh",mal:"ml",mao:"mi",mar:"mr",may:"ms",mkd:"mk",mlg:"mg",mlt:"mt",mon:"mn",mri:"mi",msa:"ms",mya:"my",nau:"na",nav:"nv",nbl:"nr",nde:"nd",ndo:"ng",nep:"ne",nld:"nl",nno:"nn",nob:"nb",nor:"no",nya:"ny",oci:"oc",oji:"oj",ori:"or",orm:"om",oss:"os",pan:"pa",per:"fa",pli:"pi",pol:"pl",por:"pt",pus:"ps",que:"qu",roh:"rm",ron:"ro",rum:"ro",run:"rn",rus:"ru",sag:"sg",san:"sa",sin:"si",slk:"sk",slo:"sk",slv:"sl",sme:"se",smo:"sm",sna:"sn",snd:"sd",som:"so",sot:"st",spa:"es",sqi:"sq",srd:"sc",srp:"sr",ssw:"ss",sun:"su",swa:"sw",swe:"sv",tah:"ty",tam:"ta",tat:"tt",tel:"te",tgk:"tg",tgl:"tl",tha:"th",tib:"bo",tir:"ti",ton:"to",tsn:"tn",tso:"ts",tuk:"tk",tur:"tr",twi:"tw",uig:"ug",ukr:"uk",urd:"ur",uzb:"uz",ven:"ve",vie:"vi",vol:"vo",wel:"cy",wln:"wa",wol:"wo",xho:"xh",yid:"yi",yor:"yo",zha:"za",zho:"zh",zul:"zu"};Ad=O.prototype,Ad.push=function(a,b){this.da.hasOwnProperty(a)?this.da[a].push(b):this.da[a]=[b]},Ad.set=function(a,b){this.da[a]=b},Ad.has=function(a){return this.da.hasOwnProperty(a)},Ad.get=function(a){return(a=this.da[a])?a.slice():null},Ad.remove=function(a,b){var c=this.da[a];if(c)for(var d=0;d<c.length;++d)c[d]==b&&(c.splice(d,1),--d)},Ad.keys=function(){var a,b=[];for(a in this.da)b.push(a);return b},Ad.clear=function(){this.da={}},Q.prototype.destroy=function(){S(this),this.fb=null},Q.prototype.Yb=function(a,b){for(var c=this.fb.get(b)||[],d=0;d<c.length;++d){var e=c[d];e.target==a&&(e.Yb(),this.fb.remove(b,e))}},T.prototype.Yb=function(){this.target&&(this.target.removeEventListener(this.type,this.rd,!1),this.rd=this.target=null)},b("shaka.util.FakeEventTarget",U),U.prototype.addEventListener=function(a,b,c){c||this.Kc.push(a,b)},U.prototype.removeEventListener=function(a,b,c){c||this.Kc.remove(a,b)},U.prototype.dispatchEvent=function(a){return delete a.srcElement,delete a.target,delete a.currentTarget,Object.defineProperties(a,{srcElement:{value:null,writable:!0},target:{value:this,writable:!0},currentTarget:{value:null,writable:!0}}),V(this,a)},c(W,U),b("shaka.util.EWMABandwidthEstimator",W),W.prototype.sample=function(a,b){if(!(b<this.ee)){a=Math.max(a,this.fe);var c=8e3*b/a,d=a/1e3;this.Dc.sample(d,c),this.Gd.sample(d,c),this.dispatchEvent(J({type:"bandwidth"})),this.od=Date.now()}},W.prototype.getBandwidth=function(){return this.Dc.vc<this.ge?this.Pd:Math.min(I(this.Dc),I(this.Gd))},W.prototype.getDataAge=function(){return(Date.now()-this.od)/1e3},W.prototype.supportsCaching=function(){return!1},Y.prototype.append=function(a){if(this.ga)throw Error("Cannot append to a running task!");this.La.push(a)},Y.prototype.start=function(){if(this.ga)throw Error("Task already started!");this.ga=!0,this.La.unshift(function(){}),Z(this,void 0)},Y.prototype.abort=function(){return this.za?this.za:this.ga?(this.Ma&&this.Ma(),this.za=new X):(this.ga=!0,Promise.resolve())},Y.prototype.end=function(){this.La.splice(1)},b("shaka.util.Uint8ArrayUtils.toString",aa),b("shaka.util.Uint8ArrayUtils.fromString",ba),b("shaka.util.Uint8ArrayUtils.toBase64",ca),b("shaka.util.Uint8ArrayUtils.fromBase64",da),b("shaka.util.Uint8ArrayUtils.fromHex",function(b){for(var c=new Uint8Array(b.length/2),d=0;d<b.length;d+=2)c[d/2]=a.parseInt(b.substr(d,2),16);return c}),b("shaka.util.Uint8ArrayUtils.toHex",ea),b("shaka.player.DrmSchemeInfo",ga);var Kd=1;ga.DistinctiveIdentifier={OPTIONAL:0,REQUIRED:Kd};var Ld=1;ga.PersistentState={OPTIONAL:0,REQUIRED:Ld},ia.prototype.clone=function(){var a=new ia;return a.maxHeight=this.maxHeight,a.maxWidth=this.maxWidth,a.maxBandwidth=this.maxBandwidth,a.minBandwidth=this.minBandwidth,a},ga.createUnencrypted=ja,ga.combine=ka,ga.prototype.key=function(){return JSON.stringify(this)},na.prototype.createMediaKeys=function(){var a=new oa(this.jb);return Promise.resolve(a)},na.prototype.getConfiguration=function(){return this.Od},Ad=oa.prototype,Ad.createSession=function(a){var b=a||"temporary";if("temporary"!=b&&"persistent-license"!=b)throw new TypeError("Session type "+a+" is unsupported on this platform.");return a=this.Ia||document.createElement("video"),a.src||(a.src="about:blank"),b=new ra(a,this.Ta,b),this.vd.push(b),b},Ad.setServerCertificate=function(){return Promise.reject(Error("setServerCertificate not supported on this platform."))},Ad.Ee=function(a){a=J({type:"encrypted",initDataType:"webm",initData:a.initData}),this.Ia.dispatchEvent(a)},Ad.De=function(a){var b=qa(this,a.sessionId);b&&(a=J({type:"message",messageType:void 0==b.keyStatuses.wa?"licenserequest":"licenserenewal",message:a.message}),b.ba&&(b.ba.resolve(),b.ba=null),b.dispatchEvent(a))},Ad.Be=function(a){(a=qa(this,a.sessionId))&&a.ready()},Ad.Ce=function(a){var b=qa(this,a.sessionId);b&&b.handleError(a)},c(ra,U),Ad=ra.prototype,Ad.ready=function(){ta(this,"usable"),this.S&&this.S.resolve(),this.S=null},Ad.handleError=function(a){var b=Error("EME v0.1b key error");b.errorCode=a.errorCode,b.errorCode.systemCode=a.systemCode,!a.sessionId&&this.ba?(b.method="generateRequest",45==a.systemCode&&(b.message="Unsupported session type."),this.ba.reject(b),this.ba=null):a.sessionId&&this.S?(b.method="update",this.S.reject(b),this.S=null):(b=a.systemCode,a.errorCode.code==MediaKeyError.MEDIA_KEYERR_OUTPUT?ta(this,"output-not-allowed"):1==b?ta(this,"expired"):ta(this,"internal-error"))},Ad.Zc=function(a,b){if(this.S)this.S.then(this.Zc.bind(this,a,b))["catch"](this.Zc.bind(this,a,b));else{this.S=a;var c,d;"webkit-org.w3.clearkey"==this.Ta?(c=aa(new Uint8Array(b)),d=JSON.parse(c),c=d.keys[0].kty,("A128KW"!=d.keys[0].alg||"oct"!=c)&&(this.S.reject(Error("Response is not a valid JSON Web Key Set.")),this.S=null),c=da(d.keys[0].k),d=da(d.keys[0].kid)):(c=new Uint8Array(b),d=null);try{this.Ia.webkitAddKey(this.Ta,c,d,this.sessionId)}catch(e){this.S.reject(e),this.S=null}}},Ad.generateRequest=function(a,b){return sa(this,b,null)},Ad.load=function(a){return"persistent-license"==this.wc?sa(this,null,a):Promise.reject(Error("Not a persistent session."))},Ad.update=function(a){var b=new X;return this.Zc(b,a),b},Ad.close=function(){if("persistent-license"!=this.wc){if(!this.sessionId)return this.closed.reject(Error("The session is not callable.")),this.closed;this.Ia.webkitCancelKeyRequest(this.Ta,this.sessionId)}return this.closed.resolve(),this.closed},Ad.remove=function(){return"persistent-license"!=this.wc?Promise.reject(Error("Not a persistent session.")):this.close()},ua.prototype.next=function(){return this.md>=this.Kd.length?{value:void 0,done:!0}:{value:this.Kd[this.md++],done:!1}};var Md;Ad=va.prototype,Ad.forEach=function(a){this.wa&&a(this.wa)},Ad.get=function(a){return this.has(a)?this.wa:void 0},Ad.has=function(a){var b=Md;return this.wa&&fa(new Uint8Array(a),b)?!0:!1},Ad.keys=function(){var a=Md,b=[];return this.wa&&b.push(a),new ua(b)},Ad.values=function(){var a=[];return this.wa&&a.push(this.wa),new ua(a)},b("shaka.polyfill.MediaKeys.install",wa),b("shaka.polyfill.installAll",function(){l(),n(),wa(),t()});var Nd=/^(?:([^:/?#.]+):)?(?:\/\/(?:([^/?#]*)@)?([^/#?]*?)(?::([0-9]+))?(?=[/#?]|$))?([^?#]+)?(?:\?([^#]*))?(?:#(.*))?$/;Ad=ya.prototype,Ad.Ja="",Ad.cb="",Ad.ra="",Ad.ub=null,Ad.ja="",Ad.Pa="",Ad.toString=function(){var a=[],b=this.Ja;if(b&&a.push(Da(b,Od,!0),":"),b=this.ra){a.push("//");var c=this.cb;c&&a.push(Da(c,Od,!0),"@"),a.push(encodeURIComponent(b).replace(/%25([0-9a-fA-F]{2})/g,"%$1")),b=this.ub,null!=b&&a.push(":",String(b))}return(b=this.ja)&&(this.ra&&"/"!=b.charAt(0)&&a.push("/"),a.push(Da(b,"/"==b.charAt(0)?Qd:Pd,!0))),(b=this.Xa.toString())&&a.push("?",b),(b=this.Pa)&&a.push("#",Da(b,Sd)),a.join("")},Ad.resolve=function(a){var b=this.clone(),c=!!a.Ja;c?za(b,a.Ja):c=!!a.cb,c?b.cb=a.cb:c=!!a.ra,c?b.ra=a.ra:c=null!=a.ub;var d=a.ja;if(c)Aa(b,a.ub);else if(c=!!a.ja){if("/"!=d.charAt(0))if(this.ra&&!this.ja)d="/"+d;else{var e=b.ja.lastIndexOf("/");-1!=e&&(d=b.ja.substr(0,e+1)+d)}if(".."==d||"."==d)d="";else if(-1!=d.indexOf("./")||-1!=d.indexOf("/.")){for(var e=0==d.lastIndexOf("/",0),d=d.split("/"),f=[],g=0;g<d.length;){var h=d[g++];"."==h?e&&g==d.length&&f.push(""):".."==h?((1<f.length||1==f.length&&""!=f[0])&&f.pop(),e&&g==d.length&&f.push("")):(f.push(h),e=!0)}d=f.join("/")}}return c?b.ja=d:c=""!==a.Xa.toString(),c?Ba(b,a.Xa.clone()):c=!!a.Pa,c&&(b.Pa=a.Pa),b},Ad.clone=function(){return new ya(this)};var Od=/[#\/\?@]/g,Pd=/[\#\?:]/g,Qd=/[\#\?]/g,Rd=/[\#\?@]/g,Sd=/#/g;Ad=Fa.prototype,Ad.U=null,Ad.dc=null,Ad.add=function(a,b){if(!this.U&&(this.U={},this.dc=0,this.Ea))for(var c=this.Ea.split("&"),d=0;d<c.length;d++){var e=c[d].indexOf("="),f=null,g=null;e>=0?(f=c[d].substring(0,e),g=c[d].substring(e+1)):f=c[d],f=decodeURIComponent(f.replace(/\+/g," ")),g=g||"",this.add(f,decodeURIComponent(g.replace(/\+/g," ")))}return this.Ea=null,(c=this.U.hasOwnProperty(a)&&this.U[a])||(this.U[a]=c=[]),c.push(b),this.dc++,this},Ad.toString=function(){if(this.Ea)return this.Ea;if(!this.U)return"";var a,b=[];for(a in this.U)for(var c=encodeURIComponent(a),d=this.U[a],e=0;e<d.length;e++){var f=c;""!==d[e]&&(f+="="+encodeURIComponent(d[e])),b.push(f)}return this.Ea=b.join("&")},Ad.clone=function(){var a=new Fa;if(a.Ea=this.Ea,this.U){var b,c={};for(b in this.U)c[b]=this.U[b].concat();a.U=c,a.dc=this.dc}return a},Qa.prototype.clone=function(){var a=new Qa;return a.e=this.e?new ya(this.e):null,a.o=this.o,a.w=this.w,a.Ga=db(this.Ga),a.Za=db(this.Za),a.M=db(this.M),a},Ra.prototype.clone=function(){var a=new Ra;return a.url=this.url?new ya(this.url):null,a.Y=db(this.Y),a},Sa.prototype.clone=function(){var a=new Sa;return a.url=this.url?new ya(this.url):null,a.Y=db(this.Y),a},Ta.prototype.clone=function(){var a=new Ta;return a.e=this.e?new ya(this.e):null,a.o=this.o,a.w=this.w,a.q=this.q,a.fa=this.fa,a.M=db(this.M),a.Ka=this.Ka.map(function(a){return a.clone()}),a},Ua.prototype.clone=function(){var a=new Ua;return a.Ob=this.Ob?new ya(this.Ob):null,a.ob=db(this.ob),a},Va.prototype.clone=function(){var a=new Va;return a.o=this.o,a.w=this.w,a.q=this.q,a.fa=this.fa,a.pb=this.pb,a.Ha=this.Ha,a.ib=this.ib,a.xa=db(this.xa),a},Wa.prototype.clone=function(){var a=new Wa;return a.uc=this.uc.map(function(a){return a.clone()}),a},Xa.prototype.clone=function(){var a=new Xa;return a.startTime=this.startTime,a.duration=this.duration,a.repeat=this.repeat,a},Ya.prototype.clone=function(){return new Ya(this.Bb,this.end)},Ga.TAG_NAME="MPD",Ha.TAG_NAME="Period",Ia.TAG_NAME="AdaptationSet",Ja.TAG_NAME="Role",Ka.TAG_NAME="ContentComponent",La.TAG_NAME="Representation",Ma.TAG_NAME="ContentProtection",Na.TAG_NAME="cenc:pssh",Oa.TAG_NAME="BaseURL",Pa.TAG_NAME="Location",Qa.TAG_NAME="SegmentBase",Ra.TAG_NAME="RepresentationIndex",Sa.TAG_NAME="Initialization",Ta.TAG_NAME="SegmentList",Ua.TAG_NAME="SegmentURL",Va.TAG_NAME="SegmentTemplate",Wa.TAG_NAME="SegmentTimeline",Xa.TAG_NAME="S",Ga.prototype.parse=function(a,b){this.url=new ya(a.e),this.id=eb(b,"id",kb),this.type=eb(b,"type",kb)||"static",this.ua=eb(b,"mediaPresentationDuration",gb),this.F=eb(b,"minBufferTime",gb,this.F),this.Mc=eb(b,"minimumUpdatePeriod",gb,this.Mc),this.pa=eb(b,"availabilityStartTime",fb,this.pa),this.bb=eb(b,"timeShiftBufferDepth",gb,this.bb),this.Vc=eb(b,"suggestedPresentationDelay",gb,this.Vc);var c=_a(this,b,Oa);this.e=Za(a.e,c?c.url:null),(c=_a(this,b,Pa))&&(this.Yc=Za(a.e,c.url)),this.v=bb(this,b,Ha)},Ha.prototype.parse=function(a,b){this.id=eb(b,"id",kb),this.start=eb(b,"start",gb),this.duration=eb(b,"duration",gb);var c=_a(this,b,Oa);this.e=Za(a.e,c?c.url:null),this.H=_a(this,b,Qa),this.A=_a(this,b,Ta),this.p=_a(this,b,Va),this.Aa=bb(this,b,Ia)},Ia.prototype.parse=function(a,b){var c=_a(this,b,Ka)||{},d=_a(this,b,Ja);this.id=eb(b,"id",kb),this.lang=eb(b,"lang",kb,c.lang),this.contentType=eb(b,"contentType",kb,c.contentType),this.width=eb(b,"width",ib),this.height=eb(b,"height",ib),this.n=eb(b,"mimeType",kb),this.aa=eb(b,"codecs",kb),this.Kb=d&&"main"==d.value,this.lang&&(this.lang=M(this.lang)),c=_a(this,b,Oa),this.e=Za(a.e,c?c.url:null),this.qa=bb(this,b,Ma),!this.contentType&&this.n&&(this.contentType=this.n.split("/")[0]),this.H=a.H?$a(this,b,a.H):_a(this,b,Qa),this.A=a.A?$a(this,b,a.A):_a(this,b,Ta),this.p=a.p?$a(this,b,a.p):_a(this,b,Va),this.Z=bb(this,b,La),!this.n&&this.Z.length&&(this.n=this.Z[0].n,!this.contentType&&this.n&&(this.contentType=this.n.split("/")[0]))},Ja.prototype.parse=function(a,b){this.value=eb(b,"value",kb)},Ka.prototype.parse=function(a,b){this.id=eb(b,"id",kb),this.lang=eb(b,"lang",kb),this.contentType=eb(b,"contentType",kb),this.lang&&(this.lang=M(this.lang))},La.prototype.parse=function(a,b){this.id=eb(b,"id",kb),this.bandwidth=eb(b,"bandwidth",ib),this.width=eb(b,"width",ib,a.width),this.height=eb(b,"height",ib,a.height),this.n=eb(b,"mimeType",kb,a.n),this.aa=eb(b,"codecs",kb,a.aa),this.lang=a.lang;var c=_a(this,b,Oa);this.e=Za(a.e,c?c.url:null),this.qa=bb(this,b,Ma),this.H=a.H?$a(this,b,a.H):_a(this,b,Qa),this.A=a.A?$a(this,b,a.A):_a(this,b,Ta),this.p=a.p?$a(this,b,a.p):_a(this,b,Va),0==this.qa.length&&(this.qa=a.qa)},Ma.prototype.parse=function(a,b){this.schemeIdUri=eb(b,"schemeIdUri",kb),this.value=eb(b,"value",kb),this.pssh=_a(this,b,Na),this.children=Array.prototype.slice.call(b.childNodes)},Na.prototype.parse=function(a,b){var c=cb(b);if(c){this.psshBox=da(c);try{this.parsedPssh=new xa(this.psshBox)}catch(d){if(!(d instanceof RangeError))throw d}}},Oa.prototype.parse=function(a,b){this.url=cb(b)},Pa.prototype.parse=function(a,b){this.url=cb(b)},Qa.prototype.parse=function(a,b){this.e=a.e||this.e,this.o=eb(b,"timescale",ib,this.o),this.w=eb(b,"presentationTimeOffset",jb,this.w),this.Ga=eb(b,"indexRange",hb,this.Ga),this.Za=_a(this,b,Ra)||this.Za,this.M=_a(this,b,Sa)||this.M},Ra.prototype.parse=function(a,b){var c=eb(b,"sourceURL",kb);this.url=Za(a.e,c),this.Y=eb(b,"range",hb,db(a.Ga))},Sa.prototype.parse=function(a,b){var c=eb(b,"sourceURL",kb);this.url=Za(a.e,c),this.Y=eb(b,"range",hb)},Ta.prototype.parse=function(a,b){this.e=a.e||this.e,this.o=eb(b,"timescale",ib,this.o),this.w=eb(b,"presentationTimeOffset",jb,this.w),this.q=eb(b,"duration",ib,this.q),this.fa=eb(b,"startNumber",jb,this.fa),this.M=_a(this,b,Sa)||this.M,this.Ka=bb(this,b,Ua)||this.Ka},Ua.prototype.parse=function(a,b){var c=eb(b,"media",kb);this.Ob=Za(a.e,c),this.ob=eb(b,"mediaRange",hb)},Va.prototype.parse=function(a,b){this.o=eb(b,"timescale",ib,this.o),this.w=eb(b,"presentationTimeOffset",jb,this.w),this.q=eb(b,"duration",ib,this.q),this.fa=eb(b,"startNumber",jb,this.fa),this.pb=eb(b,"media",kb,this.pb),this.Ha=eb(b,"index",kb,this.Ha),this.ib=eb(b,"initialization",kb,this.ib),this.xa=_a(this,b,Wa)||this.xa},Wa.prototype.parse=function(a,b){this.uc=bb(this,b,Xa)},Xa.prototype.parse=function(a,b){this.startTime=eb(b,"t",jb),this.duration=eb(b,"d",jb),this.repeat=eb(b,"r",jb)},sb.prototype.parse=function(a,b,c){var d=null;try{d=this.Rc(a,b,c)}catch(e){if(!(e instanceof RangeError))throw e}return d},sb.prototype.Rc=function(a,b,c){var d=[];a=new w(a,0);var e=y(a);if(1936286840!=y(a))return null;1==e&&(e=z(a));var f=x(a);B(a,3),B(a,4);var g=y(a);if(0==g)return null;var h,i;for(0==f?(h=y(a),i=y(a)):(h=z(a),i=z(a)),B(a,2),f=a.j.getUint16(a.i,a.Lc),a.i+=2,b=b+e+i,e=0;f>e;e++){var j=y(a);i=(2147483648&j)>>>31;var j=2147483647&j,k=y(a);if(y(a),1==i)return null;d.push(new ob(h/g,(h+k)/g,b,b+j-1,c)),h+=k,b+=j}return d},tb.prototype.destroy=function(){this.g=null},Ad=tb.prototype,Ad.length=function(){return this.g.length},Ad.first=function(){if(0==this.g.length)throw new RangeError("SegmentIndex: There is no first SegmentReference.");return this.g[0]},Ad.get=function(a){if(0>a||a>=this.g.length)throw new RangeError("SegmentIndex: The specified index is out of range.");return this.g[a]},Ad.find=function(a){return a=qb(this.g,a),a>=0?this.g[a]:null},Ad.Jc=function(a){return vb(this,a),!0},Ad.Ba=function(a){var b=a-this.na;return 0==b?0:(this.g=rb(this.g,b),this.na=a,b)},Ad.Hc=function(){return 0<this.length()?{start:this.first().startTime,end:ub(this).endTime}:{start:0,end:0}},c(wb,tb),wb.prototype.destroy=function(){this.Ad=this.Ua=null,tb.prototype.destroy.call(this)},wb.prototype.find=function(a){return zb(this,v()/1e3),tb.prototype.find.call(this,a)},wb.prototype.Jc=function(a){return vb(this,a),null==this.R&&xb(this),!0},wb.prototype.Ba=function(a){return a=tb.prototype.Ba.call(this,a),null!=this.R&&(this.tb+=a,this.ka+=a,this.tb>this.R&&(this.R+=a)),a},wb.prototype.Hc=function(){return yb(this,v()/1e3)},c(Ab,wb),Ab.prototype.destroy=function(){this.G=null,wb.prototype.destroy.call(this)},Ab.prototype.find=function(a){var b=v()/1e3;return Bb(this,b),zb(this,b),tb.prototype.find.call(this,a)},Ab.prototype.Jc=function(a){return null==this.V&&a instanceof Ab&&null!=a.V?(this.V=a.V,this.sb=a.sb,this.rb=a.rb,vb(this,a),Bb(this,v()/1e3),xb(this),!0):!1},Ab.prototype.Ba=function(a){return a=wb.prototype.Ba.call(this,a),null!=this.V&&(this.V+=a,this.sb+=a),a},Ab.prototype.Hc=function(){var a=v()/1e3;return Bb(this,a),yb(this,a)},Cb.prototype.destroy=function(){this.G=this.X=this.Q=null,this.b&&(this.b.destroy(),this.b=null)},Cb.prototype.create=function(){if(this.b)return Promise.resolve(this.b);if("dynamic"==this.Q.type)try{this.b=new Ab(this.Q,this.X,this.G,this.Mb)}catch(a){return Promise.reject(a)}else{var b=this.G.p,b=lb(this.G,1,Math.ceil(this.X.duration/(b.q/b.o)));if(!b)return b=Error("Failed to generate SegmentReferences"),b.type="stream",Promise.reject(b);this.b=new tb(b)}return Promise.resolve(this.b)},Db.prototype.destroy=function(){this.G=this.X=this.Q=null,this.b&&(this.b.destroy(),this.b=null)},Db.prototype.create=function(){if(this.b)return Promise.resolve(this.b);for(var a=this.G.A,b=0,c=[],d=0;d<a.Ka.length;++d){var e=a.Ka[d],b=0==d?0:b,f=null,g=null,h=b/a.o;a.q?(f=b+a.q,g=f/a.o):(g=h+this.X.duration,f=g*a.o);var b=f,f=0,i=null;e.ob&&(f=e.ob.Bb,i=e.ob.end),c.push(new ob(h,g,f,i,new ya(e.Ob)))}return this.b=new tb(c),Promise.resolve(this.b)},Eb.prototype.destroy=function(){this.G=this.X=this.Q=null,this.b&&(this.b.destroy(),this.b=null)},Eb.prototype.create=function(){if(this.b)return Promise.resolve(this.b);for(var a=this.G.p,b=a.xa.uc,c=0,d=[],e=0;e<b.length&&b[e].duration;++e)for(var f=null!=b[e].startTime?b[e].startTime:c,g=b[e].repeat||0,h=0;g>=h;++h){var i=f+b[e].duration;0<d.length&&f!=c&&(d[d.length-1].end=f),d.push({start:f,end:i}),c=f=i}for(b=[],c=0;c<d.length;++c){if(g=d[c].start,e=g/a.o,f=d[c].end/a.o,g=mb(this.G,c+a.fa,g),!g)return a=Error("Failed to generate media URL."),a.type="dash",Promise.reject(a);b.push(new ob(e,f,0,null,new ya(g)))}return this.b="dynamic"==this.Q.type?new wb(b,this.Q,this.X,this.Mb):new tb(b),Promise.resolve(this.b)},Fb.prototype.destroy=function(){this.b&&(this.b.destroy(),this.b=null)},Fb.prototype.create=function(){if(this.b)return Promise.resolve(this.b);var a=new ob(0,null,0,null,this.Ue);return this.b=new tb([a]),Promise.resolve(this.b)},Gb.prototype.parse=function(a,b,c){var d=null;try{d=this.Rc(a,b,c)}catch(e){if(!(e instanceof RangeError))throw e}return d},Gb.prototype.Rc=function(a,b,c){if(b=new C(b),440786851!=D(b).id)b=null;else{var d=D(b);if(408125543!=d.id)b=null;else{b=d.j.byteOffset;for(var d=new C(d.j),e=null;d.Qa();){var f=D(d);if(357149030==f.id){e=f;break}}if(e){for(d=new C(e.j),e=1e6;d.Qa();)if(f=D(d),2807729==f.id){e=G(f);break}d=e/1e9}else d=null;b=d?{Ke:b,Ve:d}:null}}if(!b)return null;if(e=D(new C(a)),475249515!=e.id)return null;a=b.Ke,b=b.Ve;for(var d=[],e=new C(e.j),g=f=-1;e.Qa();){var h=D(e);if(187==h.id){var i;if(i=new C(h.j),h=D(i),179!=h.id)i=null;else if(h=G(h),i=D(i),183!=i.id)i=null;else{i=new C(i.j);for(var j=0;i.Qa();){var k=D(i);if(241==k.id){j=G(k);break}}i={Xe:h,Ge:j}}i&&(h=b*i.Xe,i=a+i.Ge,f>=0&&d.push(new ob(f,h,g,i-1,c)),f=h,g=i)}}return f>=0&&d.push(new ob(f,null,g,null,c)),d},Ad=Hb.prototype,Ad.pc=function(){if(this.m)return this.B;if(0==this.url.lastIndexOf("data:",0)){var b=this.url.split(":")[1].split(";").pop().split(","),c=b.pop(),c="base64"==b.pop()?a.atob(c.replace(/-/g,"+").replace(/_/g,"/")):a.decodeURIComponent(c);return"arraybuffer"==this.h.responseType&&(c=ba(c).buffer),b=JSON.parse(JSON.stringify(new XMLHttpRequest)),b.response=c,b.responseText=c.toString(),c=this.B,c.resolve(b),Jb(this),c}if(0==this.url.lastIndexOf("idb:",0))return Lb(this);this.Ac++,this.Hd=Date.now(),this.Sb||(this.Sb=this.h.cd),this.m=new XMLHttpRequest,b=this.url,this.L&&!this.L.supportsCaching()&&(b=new ya(b),b.Xa.add("_",Date.now()),b=b.toString()),this.m.open(this.h.method,b,!0),this.m.responseType=this.h.responseType,this.m.timeout=this.h.kc,this.m.withCredentials=this.h.withCredentials,this.m.onload=this.le.bind(this),this.h.Id&&(this.m.onreadystatechange=this.pe.bind(this)),this.m.onerror=this.Pc.bind(this),
this.m.ontimeout=this.we.bind(this);for(c in this.h.jc)this.m.setRequestHeader(c,this.h.jc[c]);return this.m.send(this.h.body),this.B},Ad.abort=function(){if(this.m&&this.m.readyState!=XMLHttpRequest.DONE){this.m.abort();var a=Mb(this,"Request aborted.","aborted");this.B.reject(a),Jb(this)}},Ad.le=function(a){this.L&&this.L.sample(Date.now()-this.Hd,a.loaded),200<=this.m.status&&299>=this.m.status?(this.B.resolve(this.m),Jb(this)):this.Ac<this.h.Nb?Nb(this):(a=Mb(this,"HTTP error.","net"),this.B.reject(a),Jb(this))},Ad.pe=function(){if(this.m.readyState==XMLHttpRequest.HEADERS_RECEIVED){var a=Date.parse(this.m.getResponseHeader("Date"));a&&(Ed=a-Date.now())}},Ad.Pc=function(){var a=Mb(this,"Network failure.","net");this.B.reject(a),Jb(this)},Ad.we=function(){if(this.Ac<this.h.Nb)Nb(this);else{var a=Mb(this,"Request timed out.","net");this.B.reject(a),Jb(this)}},c(Ob,Hb);var Td=0;Ob.prototype.send=function(){var a=this.url;return this.pc().then(function(b){if(b=b.responseText,b=(new DOMParser).parseFromString(b,"text/xml")){var c={e:new ya(a)};b=_a(c,b,Ga)}else b=null;return b?Promise.resolve(b):(b=Error("MPD parse failure."),b.type="mpd",Promise.reject(b))})},c(Pb,Hb);var Ud=0;Pb.prototype.send=function(){return this.pc().then(function(a){return Promise.resolve(new Uint8Array(a.response))})},c(Qb,Hb);var Vd=0;Qb.prototype.send=function(){return this.pc().then(_(this,function(a){return Promise.resolve(a.response)}))},Rb.prototype.Gb=function(){if(this.vb)return this.vb;this.$a=new Qb(this.url.toString(),this.ab,this.Fb);var a=this.$a.send().then(_(this,function(a){return this.$a=null,Promise.resolve(a)}));return this.vb=a=a["catch"](_(this,function(a){return this.vb=this.$a=null,Promise.reject(a)}))},Tb.prototype.destroy=function(){this.X=this.Q=null,Sb(this.hb),this.hb=null,this.gc&&(Sb(this.gc),this.gc=null),this.b&&(this.b.destroy(),this.b=null),this.B=null},Tb.prototype.create=function(){if(this.B)return this.B;var a=[this.hb.Gb()];return"webm"==this.cc&&a.push(this.gc.Gb()),this.B=Promise.all(a).then(_(this,function(a){var b=a[0];a=a[1]||null;var c=null;return"mp4"==this.cc?(c=new sb,c=c.parse(new DataView(b),this.hb.ab,this.hb.url)):"webm"==this.cc&&(c=new Gb,c=c.parse(new DataView(b),new DataView(a),this.hb.url)),c?(b="dynamic"==this.Q.type?new wb(c,this.Q,this.X,this.Mb):new tb(c),Promise.resolve(b)):(b=Error("Failed to parse segment references from",this.cc,"container."),b.type="stream",Promise.reject(b))}))},Ub.prototype.destroy=function(){this.Pb&&(Sb(this.Pb),this.Pb=null)},Ub.prototype.create=function(){return this.Pb?this.Pb.Gb():Promise.resolve(null)};var Wd=1/60;Vb.prototype.destroy=function(){this.abort(),this.ca=this.C=this.Va=null,this.l.destroy(),this.sd=this.la=this.l=null},Ad=Vb.prototype,Ad.Gb=function(a,b){if(this.C){var c=Error("Cannot fetch: previous operation not complete.");return c.type="stream",Promise.reject(c)}this.C=new Y,b&&this.C.append(function(){return[Yb(this,b),this.yc.bind(this)]}.bind(this)),this.C.append(function(){var b=new Qb(a.url.toString(),a.ab,a.Fb,3,1e3*(a.endTime?a.endTime-a.startTime:1));return b.L=this.P,[b.send(),b.abort.bind(b)]}.bind(this)),this.C.append(function(a){return this.P.getBandwidth(),[Yb(this,a),this.yc.bind(this)]}.bind(this));var d=0==this.la.buffered.length&&0==this.ca.length,e=null;return this.C.append(function(){if(d){var b=a.startTime;e=this.la.buffered.start(0)-b}b=qb(this.ca,a.startTime),b>=0?this.ca.splice(b+1,0,a):this.ca.push(a)}.bind(this)),Xb(this).then(function(){return Promise.resolve(e)}.bind(this))},Ad.clear=function(){if(this.C){var a=Error("Cannot clear: previous operation not complete.");return a.type="stream",Promise.reject(a)}return this.C=new Y,this.C.append(function(){var a;a:if(0==this.la.buffered.length)a=Promise.resolve();else{try{this.la.remove(0,Number.POSITIVE_INFINITY)}catch(b){a=Promise.reject(b);break a}this.ca=[],a=this.Va=new X}return[a,this.yc.bind(this)]}.bind(this)),Xb(this)},Ad.abort=function(){return this.C?this.C.abort():Promise.resolve()},Ad.Ba=function(a){var b=a-this.ya;0!=b&&(this.ca=rb(this.ca,b),this.ya=a)},Ad.yc=function(){"open"==this.sd.readyState&&this.la.abort()},Ad.te=function(){this.Va.resolve(),this.Va=null};var Xd=0;Zb.prototype.destroy=function(){this.N&&(this.N.destroy(),this.N=null),this.I&&(this.I.destroy(),this.I=null)};var Yd=0;_b.prototype.destroy=function(){for(var a=0;a<this.f.length;++a)this.f[a].destroy();this.Na=this.f=null},_b.prototype.Fc=function(){for(var a=[],b=0;b<this.Na.length;++b){var c=new cc;c.id=this.K,c.T=this.Na[b],c.contentType=this.contentType,c.Fa=this.f.length?$b(this.f[0]):"",a.push(c)}return a},ac.prototype.Fc=function(){for(var a=[],b=0;b<this.u.length;++b)a.push.apply(a,this.u[b].Fc());return a},ac.prototype.destroy=function(){for(var a=0;a<this.u.length;++a)this.u[a].destroy();this.u=null},bc.prototype.destroy=function(){for(var a=0;a<this.t.length;++a)this.t[a].destroy();this.t=null},dc.prototype.Wa=function(a){for(var b=v()/1e3,c=0;c<a.v.length;++c)for(var d=a.v[c],e=0;e<d.Aa.length;++e){var f=d.Aa[e];if("text"!=f.contentType)for(var g=0;g<f.Z.length;++g){var h=f.Z[g],i=0,i=i+(h.H?1:0),i=i+(h.A?1:0),i=i+(h.p?1:0);0==i?(f.Z.splice(g,1),--g):1!=i&&(h.H?(h.A=null,h.p=null):h.A&&(h.p=null))}}for(ec(a),c=0;c<a.v.length;++c)for(d=a.v[c],e=0;e<d.Aa.length;++e){for(g=f=d.Aa[e],h=null,i=0;i<g.Z.length;++i){var j=g.Z[i].n||"";h?j!=h&&(g.Z.splice(i,1),--i):h=j}0==f.Z.length&&(d.Aa.splice(e,1),--e)}for("dynamic"==a.type&&null==a.pa&&(a.pa=b),c=new bc,"dynamic"==a.type&&(c.ia=!0,c.zb=a.Mc,c.Zb=new ya(a.Yc?a.Yc:a.url)),c.F=a.F||5,d=0;d<a.v.length&&(e=a.v[d],null!=e.start);++d){for(f=new ac,f.id=e.id,f.start=e.start||0,f.duration=e.duration,g=0;g<e.Aa.length;++g){for(h=e.Aa[g],i=new _b,i.id=h.id,i.lang=h.lang||"",i.contentType=h.contentType||"",i.Kb=h.Kb,j=0;j<h.Z.length;++j){var k=h.Z[j],l=i.Na.slice(0),m=k,n=l,o=[];if(0==m.qa.length)o.push(ja());else if(this.kb)for(var p=0;p<m.qa.length;++p){var q=this.kb(m.qa[p]);q&&o.push(q)}if(0==n.length)Array.prototype.push.apply(n,o);else for(m=0;m<n.length;++m){for(p=!1,q=0;q<o.length;++q)if(n[m].key()==o[q].key()){p=!0;break}p||(n.splice(m,1),--m)}if(!(0==l.length&&0<i.Na.length)){if(o=a,n=e,m=b,k.e){var p=null,q=1,r=0;if(k.H)p=k.n.split("/")[1],"mp4"!=p&&"webm"!=p?p=null:(q=k.H,("webm"!=p||q.M)&&(q.Ga||q.Za&&q.Za.Y)?(r=q.Za,r||(r=new Ra,r.url=new ya(k.e),r.Y=q.Ga?q.Ga.clone():null),r=gc(r),q=q.M?gc(q.M):null,n=new Tb(o,n,p,r,q,m),o=new Ub(q),m=new Zb,m.N=n,m.I=o,p=m):p=null),q=k.H.o,r=k.H.w;else if(k.A)m=k.A,!m.q&&1<m.Ka.length?p=null:m.q||n.duration||1!=m.Ka.length?(m=m.M?gc(m.M):null,n=new Db(o,n,k),o=new Ub(m),m=new Zb,m.N=n,m.I=o,p=m):p=null,q=k.A.o,r=k.A.w;else if(k.p){a:if(p=k.p,q=p,r=0,r+=q.Ha?1:0,r+=q.xa?1:0,r+=q.q?1:0,0==r?q=!1:(1!=r&&(q.Ha?(q.xa=null,q.q=null):q.xa&&(q.q=null)),q=!0),q){if(q=null,p.ib&&(q=(p=k.p.ib)?fc(k,p,Sa):null,!q)){p=null;break a}p=q?gc(q):null;var s=n,q=k,r=m,n=q.p;if(n.Ha)if(n=o,o=s,m=q,q=r,r=p,s=m.n.split("/")[1],"mp4"!=s&&"webm"!=s||"webm"==s&&!r||!mb(m,1,0))n=null;else{var t=void 0;(t=(t=m.p.Ha)?fc(m,t,Ra):null)?(m=gc(t),n=new Tb(n,o,s,m,r,q)):n=null}else n=n.pb?n.xa?new Eb(o,s,q,r):n.q?"dynamic"!=o.type&&null==s.duration?null:new Cb(o,s,q,r):void 0:null;n?(o=new Ub(p),m=new Zb,m.N=n,m.I=o,p=m):p=null}else p=null;q=k.p.o,r=k.p.w}else"text"==k.n.split("/")[0]&&(p=new Zb,p.N=new Fb(new ya(k.e)));p?(p.id=k.id,r&&(p.timestampOffset=-1*r/q),p.bandwidth=k.bandwidth,p.width=k.width,p.height=k.height,p.n=k.n||"",p.aa=k.aa||"",k=p):k=null}else k=null;k&&(i.f.push(k),i.Na=l)}}f.u.push(i)}c.t.push(f)}return c},c(hc,U),hc.prototype.destroy=function(){this.parent=null;for(var a=0;a<this.xb.length;++a)this.xb[a].close()["catch"](function(){});this.xb=[],this.Qb=this.D=this.nb=null,this.l.destroy(),this.a=this.c=this.l=null},Ad=hc.prototype,Ad.Nd=function(a,b,c){for(var d=c.keySystem,e=c.getConfiguration(),f=["audio","video"],g=0;g<f.length;++g){var h=f[g];if(!b.has(h)){var i=e[h+"Capabilities"];if(i&&i.length){for(var i=i[0],j=[],k={},l=0;l<a.length;++l){var m=a[l];m.T.keySystem!=d||m.Fa!=i.contentType||m.id in k||(j.push(m),k[m.id]=!0,this.D?ka(this.D,m.T):this.D=m.T)}b.set(h,j)}}}return this.c.Tc(b),c.createMediaKeys()},Ad.Se=function(a){return this.nb=a,this.a.setMediaKeys(this.nb).then(_(this,function(){return this.D.Dd?this.nb.setServerCertificate(this.D.Dd):Promise.resolve()})).then(_(this,function(){if(0<this.c.fc().length)lc(this);else{for(var a=0;a<this.D.Sa.length;++a){var b=this.D.Sa[a];this.xd({type:"encrypted",initDataType:b.initDataType,initData:b.initData})}0==this.D.Sa.length&&R(this.l,this.a,"encrypted",this.xd.bind(this))}}))},Ad.xd=function(a){var b=new Uint8Array(a.initData),c=Array.prototype.join.apply(b);if(!this.Qb[c]){try{var d=mc(this)}catch(e){return d=K(e),this.dispatchEvent(d),void this.eb.reject(e)}a=d.generateRequest(a.initDataType,a.initData),this.Qb[c]=!0,a["catch"](_(this,function(a){if(this.Qb){this.Qb[c]=!1;var b=K(a);this.dispatchEvent(b),this.eb.reject(a)}})),this.xb.push(d)}},Ad.re=function(a){nc(this,a.target,this.D,a.message)},Ad.ke=function(a){var b=a.target.keyStatuses.values();for(a=b.next();!a.done;a=b.next()){var c=Zd[a.value];c&&(c=Error(c),c.type=a.value,a=K(c),this.dispatchEvent(a))}};var Zd={"output-not-allowed":"The required output protection is not available.",expired:"A required key has expired and the content cannot be decrypted.","internal-error":"An unknown error has occurred in the CDM."};c(oc,U);var $d=15;oc.prototype.destroy=function(){this.bc(),this.P=this.O=null,this.$.destroy(),this.parent=this.a=this.$=null},Ad=oc.prototype,Ad.ld=function(){return this.ga},Ad.kd=function(){return this.ec},Ad.Xb=function(a,b,c){if(a!=this.O){var d=[a.N.create(),a.I.create()];Promise.all(d).then(_(this,function(d){var e=this.O;this.O=a,this.b=d[0],this.Ic=d[1],this.td=b,this.Wc=!0,this.Rb||(e?c&&pc(this,!0):this.ea(0))}))["catch"](_(this,function(a){"aborted"!=a.type&&(a=K(a),this.dispatchEvent(a))}))}},Ad.Cd=function(){return pc(this,!1)},Ad.rc=function(){},Ad.Gc=function(){return!0},Ad.ze=function(){var a;if(a=1<this.$.la.buffered.length?!0:!1,a&&this.ga)pc(this,!0);else{this.ha=null,a=this.O;var b=this.b,c=this.a.currentTime,d=Wb(this.$,c),e=this.ga?$d:Math.min(this.td,$d)+(this.ya||0);d>=e?(rc(this),this.ea(1e3/(Math.abs(this.a.playbackRate)||1))):(b=qc(this,c,b))?(b=this.$.Gb(b,this.Ic),this.Ic=null,this.Wc&&(this.Wc=!1,sc(this,a)),b.then(_(this,function(a){this.ec=!1,null==this.ya&&(this.ya=a),this.Nc&&0<Wb(this.$,c)&&(this.Nc=!1,this.a.currentTime+=.001),this.ea(0)}))["catch"](_(this,function(a){if("aborted"!=a.type){var b=K(a);this.dispatchEvent(b),b=[0,404,410],"net"==a.type&&-1!=b.indexOf(a.Ze.status)&&this.O&&this.ea(5e3)}}))):(rc(this),this.ec||(this.ec=!0,tc(this)),this.ea(1e3))}},Ad.ea=function(b){this.ha=a.setTimeout(this.ze.bind(this),b)},Ad.bc=function(){null!=this.ha&&(a.clearTimeout(this.ha),this.ha=null)},c(uc,U),uc.prototype.destroy=function(){this.oa&&this.a.removeChild(this.oa),this.parent=this.a=this.O=this.b=this.oa=null},Ad=uc.prototype,Ad.ld=function(){return!0},Ad.kd=function(){return!0},Ad.Xb=function(a){a.N.create().then(_(this,function(b){if(this.a){if(this.O=a,this.b=b,0==b.length())return Promise.reject(Error("No subtitles URL available."));b=b.first().url.toString();var c=this.Gc();this.oa&&(this.rc(!1),this.a.removeChild(this.oa)),this.oa=document.createElement("track"),this.a.appendChild(this.oa),this.oa.src=b,this.rc(c)}}))},Ad.Cd=function(){},Ad.rc=function(a){this.Eb=a,this.oa&&(this.oa.track.mode=a?"showing":"disabled")},Ad.Gc=function(){return this.Eb},c(zc,U),b("shaka.player.Player",zc),zc.version="v1.4.1",zc.isBrowserSupported=function(){return!!(a.MediaSource&&a.MediaKeys&&a.navigator&&a.navigator.requestMediaKeySystemAccess&&a.MediaKeySystemAccess&&a.MediaKeySystemAccess.prototype.getConfiguration&&a.Promise&&HTMLVideoElement.prototype.getVideoPlaybackQuality&&Element.prototype.requestFullscreen&&document.exitFullscreen&&"fullscreenElement"in document&&a.Uint8Array)},zc.isTypeSupported=Ac,zc.prototype.destroy=function(){return this.Xc().then(_(this,function(){this.l.destroy(),this.a=this.l=null}))["catch"](function(){})},zc.prototype.destroy=zc.prototype.destroy,zc.prototype.Xc=function(){if(!this.c)return Promise.resolve();this.gb&&Ec(this),this.a.pause(),S(this.l),Dc(this),Bc(this),this.Oa.destroy(),this.Oa=null,this.a.src="";var a=this.a.setMediaKeys(null);return this.c&&(this.c.destroy(),this.c=null),this.gb=!1,this.J=new vc,a},zc.prototype.unload=zc.prototype.Xc,zc.prototype.load=function(a){var b=this.Xc();return this.a.autoplay&&(e("load"),R(this.l,this.a,"timeupdate",this.je.bind(this))),a.Cb(this.zc),b.then(_(this,function(){return a.load(this.ta)})).then(_(this,function(){return this.c=a,this.c.yb(this.lc),this.Oa=new hc(this,this.a,this.c),ic(this.Oa)})).then(_(this,function(){return R(this.l,this.a,"error",this.Pc.bind(this)),R(this.l,this.a,"playing",this.oe.bind(this)),R(this.l,this.a,"pause",this.ne.bind(this)),this.c.bd(this,this.a)})).then(_(this,function(){Cc(this)}))["catch"](_(this,function(b){a.destroy(),this.c=null,this.Oa&&(this.Oa.destroy(),this.Oa=null);var c=K(b);return this.dispatchEvent(c),Promise.reject(b)}))},zc.prototype.load=zc.prototype.load,Ad=zc.prototype,Ad.je=function(){f("load"),this.J.playbackLatency=g("load")/1e3,this.l.Yb(this.a,"timeupdate")},Ad.Pc=function(a){this.a.error&&(a=this.a.error.code,a!=MediaError.MEDIA_ERR_ABORTED&&(a=Error(_d[a]||"Unknown playback error."),a.type="playback",a=K(a),this.dispatchEvent(a)))},Ad.oe=function(){e("playing"),!this.Tb&&0>this.ic&&(this.a.playbackRate=0,this.Qc(this.a.currentTime,Date.now(),this.ic)),this.gb&&Ec(this)},Ad.ne=function(){f("playing");var a=g("playing");if(!isNaN(a)){var b=this.J;b.playTime+=a/1e3}Bc(this)},Ad.getStats=function(){if(!this.a.paused){f("playing");var a=g("playing");if(!isNaN(a)){var b=this.J;b.playTime+=a/1e3,e("playing")}}return a=this.J,(b=this.a.getVideoPlaybackQuality())&&(a.decodedFrames=b.totalVideoFrames,a.droppedFrames=b.droppedVideoFrames),this.J},zc.prototype.getStats=zc.prototype.getStats,zc.prototype.Sd=function(){var a=this.a.videoWidth,b=this.a.videoHeight;return a&&b?{width:a,height:b}:null},zc.prototype.getCurrentResolution=zc.prototype.Sd,zc.prototype.getVideoTracks=function(){return this.c?this.c.getVideoTracks():[]},zc.prototype.getVideoTracks=zc.prototype.getVideoTracks,zc.prototype.getAudioTracks=function(){return this.c?this.c.getAudioTracks():[]},zc.prototype.getAudioTracks=zc.prototype.getAudioTracks,zc.prototype.Hb=function(){return this.c?this.c.Hb():[]},zc.prototype.getTextTracks=zc.prototype.Hb,zc.prototype.Vb=function(a,b){return this.c?this.c.Vb(a,void 0==b?!0:b):!1},zc.prototype.selectVideoTrack=zc.prototype.Vb,zc.prototype.nc=function(a,b){return this.c?this.c.nc(a,void 0==b?!0:b):!1},zc.prototype.selectAudioTrack=zc.prototype.nc,zc.prototype.oc=function(a){return this.c?this.c.oc(a,!1):!1},zc.prototype.selectTextTrack=zc.prototype.oc,zc.prototype.Db=function(a){this.c&&this.c.Db(a)},zc.prototype.enableTextTrack=zc.prototype.Db,zc.prototype.Cb=function(a){this.zc=a,this.c&&this.c.Cb(a)},zc.prototype.enableAdaptation=zc.prototype.Cb,zc.prototype.Rd=function(){return this.zc},zc.prototype.getAdaptationEnabled=zc.prototype.Rd,zc.prototype.Td=function(){return this.a.currentTime},zc.prototype.getCurrentTime=zc.prototype.Td,zc.prototype.Ud=function(){return this.a.duration},zc.prototype.getDuration=zc.prototype.Ud,zc.prototype.Vd=function(){return this.a.muted},zc.prototype.getMuted=zc.prototype.Vd,zc.prototype.Zd=function(){return this.a.volume},zc.prototype.getVolume=zc.prototype.Zd,zc.prototype.play=function(){this.Fd(1),this.a.play()},zc.prototype.play=zc.prototype.play,zc.prototype.pause=function(){this.a.pause()},zc.prototype.pause=zc.prototype.pause,zc.prototype.requestFullscreen=function(){this.a.requestFullscreen()},zc.prototype.requestFullscreen=zc.prototype.requestFullscreen,zc.prototype.seek=function(a){this.a.currentTime=a},zc.prototype.seek=zc.prototype.seek,zc.prototype.Qe=function(a){$d=0>a?0:a},zc.prototype.setStreamBufferSize=zc.prototype.Qe,zc.prototype.Yd=function(){return $d},zc.prototype.getStreamBufferSize=zc.prototype.Yd,zc.prototype.Le=function(a){Ud=a},zc.prototype.setLicenseRequestTimeout=zc.prototype.Le,zc.prototype.Me=function(a){Td=a},zc.prototype.setMpdRequestTimeout=zc.prototype.Me,zc.prototype.Pe=function(a){Vd=a},zc.prototype.setRangeRequestTimeout=zc.prototype.Pe,zc.prototype.Ne=function(a){this.a.muted=a},zc.prototype.setMuted=zc.prototype.Ne,zc.prototype.Re=function(a){this.a.volume=a},zc.prototype.setVolume=zc.prototype.Re,zc.prototype.Oe=function(a){this.ta=M(a)},zc.prototype.setPreferredLanguage=zc.prototype.Oe,zc.prototype.Fd=function(a){Bc(this),a>=0?this.a.playbackRate=a:this.a.paused||(this.a.playbackRate=0,this.Qc(this.a.currentTime,Date.now(),a)),this.ic=a},zc.prototype.setPlaybackRate=zc.prototype.Fd,zc.prototype.Wd=function(){return this.ic},zc.prototype.getPlaybackRate=zc.prototype.Wd,zc.prototype.yb=function(a){if(!(a instanceof ia))throw new TypeError("Argument must be a Restrictions instance.");this.lc=a.clone(),this.c&&this.c.yb(this.lc)},zc.prototype.setRestrictions=zc.prototype.yb,zc.prototype.Xd=function(){return this.lc.clone()},zc.prototype.getRestrictions=zc.prototype.Xd,zc.prototype.hc=function(){return this.c?this.c.hc():!1},zc.prototype.isLive=zc.prototype.hc,zc.prototype.Qc=function(b,c,d){var e=.1*Math.abs(d);this.a.buffered.length&&this.a.buffered.start(0)+e<this.a.currentTime?(this.a.currentTime=b+(Date.now()-c)/1e3*d,this.Tb=a.setTimeout(this.Qc.bind(this,b,c,d),100)):this.a.pause()},zc.prototype.Ae=function(){if(Cc(this),!this.a.ended&&!this.a.seeking){var a=this.a.buffered,b=a.length?a.end(a.length-1):0,a=b-this.a.currentTime,b=b+.05;this.gb?a>this.c.hd()&&(Ec(this),this.a.play()):b<this.a.duration&&.1>a&&!this.a.paused&&(this.gb=!0,this.a.pause(),this.J.bufferingHistory.push(v()/1e3),e("buffering"),this.dispatchEvent(J({type:"bufferingStart"})))}};var _d={2:"A network failure occured while loading media content.",3:"The browser failed to decode the media content.",4:"The browser does not support the media content."};Fc.prototype.Wa=function(a){for(var b=0;b<a.length;++b)for(var c=a[b],d=0;d<c.u.length;++d){for(var e=c.u[d],f=e,g=0;g<f.f.length;++g)Ac($b(f.f[g]))||(f.f.splice(g,1),--g);0==e.f.length&&(c.u.splice(d,1),--d)}for(b=0;b<a.length;++b)for(c=a[b],d=0;d<c.u.length;++d)c.u[d].f.sort(Gc)},Hc.prototype.destroy=function(){this.va.destroy(),this.va=null},Hc.prototype.update=function(a){var b=Ic(a),c=Ic(this.va);return Promise.all([b,c]).then(function(b){var c=b[0];b=b[1];var d=new Fc;d.Wa(this.va.t),a.zb=this.va.zb,a.Zb=this.va.Zb?new ya(this.va.Zb):null,a.F=this.va.F;var e=[];return Jc(a,this.va,c,b,e),d.Wa(a.t),Promise.resolve(e)}.bind(this))},b("shaka.media.SimpleAbrManager",Nc),Nc.prototype.destroy=function(){this.l.destroy(),this.c=this.P=this.l=null},Nc.prototype.start=function(a,b){this.P&&this.c||(this.P=a,this.c=b,this.qb=Date.now()+4e3,R(this.l,this.P,"bandwidth",this.Oc.bind(this)),R(this.l,this.c,"adaptation",this.ie.bind(this)))},Nc.prototype.enable=function(a){this.Eb=a},Nc.prototype.getInitialVideoTrackId=function(){if(!this.c||!this.P)return null;var a=Oc(this);return a?a.id:null},Nc.prototype.Oc=function(){if(this.Eb&&!(Date.now()<this.qb)){var a=Oc(this);if(a){if(a.active)return void(this.qb=Date.now()+3e3);this.c.Vb(a.id,!1)}this.qb=Number.POSITIVE_INFINITY}},Nc.prototype.ie=function(){this.qb==Number.POSITIVE_INFINITY&&(this.qb=Date.now()+3e4)},c(Pc,U),b("shaka.player.HttpVideoSource",Pc),Pc.prototype.destroy=function(){this.ma&&(this.ma.parentElement.removeChild(this.ma),this.ma=null),this.parent=this.D=null},Ad=Pc.prototype,Ad.bd=function(a,b){this.parent=a;var c=b.mediaKeys;return b.src=this.de,c=b.setMediaKeys(c),this.Jd&&(this.ma=document.createElement("track"),this.ma.src=this.Jd,b.appendChild(this.ma),this.ma.track.mode="showing"),c},Ad.load=function(){return Promise.resolve()},Ad.getVideoTracks=function(){return[]},Ad.getAudioTracks=function(){return[]},Ad.Hb=function(){return[]},Ad.hd=function(){return 5},Ad.gd=function(){var a=new cc;return a.T=this.D,[a]},Ad.Tc=function(){},Ad.Vb=function(){return!1},Ad.nc=function(){return!1},Ad.oc=function(){return!1},Ad.Db=function(a){this.ma&&(this.ma.track.mode=a?"showing":"disabled")},Ad.Cb=function(){},Ad.yb=function(){},Ad.fc=function(){return[]},Ad.Ib=function(){return!1},Ad.hc=function(){return!1},c(Qc,U),b("shaka.player.StreamVideoSource",Qc),Qc.prototype.destroy=function(){this.Ub&&(a.clearTimeout(this.Ub),this.Ub=null),this.bc(),this.Da=null,this.sa.destroy(),this.sa=null,N(this.s).forEach(function(a){a.destroy()}),this.r=this.s=null,this.d&&(this.d.destroy(),this.d=null),this.Ab.destroy(),this.parent=this.ac=this.$b=this.J=this.video=this.W=this.L=this.Ab=null},Ad=Qc.prototype,Ad.bd=function(b,c){if(!this.lb){var d=Error("Cannot call attach() right now.");return d.type="stream",Promise.reject(d)}return this.parent=b,this.video=c,this.J=b.getStats(),R(this.sa,this.W,"sourceopen",this.me.bind(this)),R(this.sa,this.L,"bandwidth",this.Oc.bind(this)),d=this.video.mediaKeys,this.video.src=a.URL.createObjectURL(this.W),d=this.video.setMediaKeys(d),Promise.all([this.$b,d])},Ad.load=function(a){return this.lb?(a=Error("Cannot call load() right now."),a.type="stream",Promise.reject(a)):this.d&&0!=this.d.t.length?(this.ta=a,(new Fc).Wa(this.d.t),0==this.d.t.length||0==this.d.t[0].u.length?(a=Error("The manifest specifies content that cannot be displayed on this browser/platform."),a.type="stream",Promise.reject(a)):(this.lb=!0,Promise.resolve())):(a=Error("The manifest does not specify any content."),a.type="stream",Promise.reject(a))},Ad.xe=function(){var a=Date.now(),b=this.ha=null;this.yd(this.d.Zb).then(_(this,function(a){return b=new Hc(a),b.update(this.d)})).then(_(this,function(c){b.destroy(),b=null;for(var d=0;d<c.length;++d)Rc(this,c[d]);this.ac&&this.yb(this.ac),0==Object.keys(this.s).length?Uc(this):this.ea((Date.now()-a)/1e3)}))["catch"](_(this,function(a){b&&(b.destroy(),b=null),"aborted"!=a.type&&(a=K(a),this.dispatchEvent(a),this.d&&this.ea(0))}))},Ad.yd=function(){return Promise.reject("Cannot update manifest with this VideoSource implementation.")},Ad.getVideoTracks=function(){if(!this.r.has("video"))return[];for(var a=this.s.video,a=(a=a?a.O:null)?a.K:0,b=[],c=this.r.get("video"),d=0;d<c.length;++d)for(var e=c[d],f=0;f<e.f.length;++f){var g=e.f[f];if(g.enabled){var h=g.K,g=new j(h,g.bandwidth,g.width,g.height);h==a&&(g.active=!0),b.push(g)}}return b},Qc.prototype.getVideoTracks=Qc.prototype.getVideoTracks,Qc.prototype.getAudioTracks=function(){if(!this.r.has("audio"))return[];for(var a=this.s.audio,a=(a=a?a.O:null)?a.K:0,b=[],c=this.r.get("audio"),d=0;d<c.length;++d)for(var e=c[d],f=e.lang,g=0;g<e.f.length;++g){var i=e.f[g],j=i.K,i=new h(j,i.bandwidth,f);j==a&&(i.active=!0),b.push(i)}return b},Qc.prototype.getAudioTracks=Qc.prototype.getAudioTracks,Qc.prototype.Hb=function(){if(!this.r.has("text"))return[];for(var a=this.s.text,b=a?a.O:null,b=b?b.K:0,c=[],d=this.r.get("text"),e=0;e<d.length;++e)for(var f=d[e],g=f.lang,h=0;h<f.f.length;++h){var j=f.f[h].K,k=new i(j,g);j==b&&(k.active=!0,k.enabled=a.Gc()),c.push(k)}return c},Qc.prototype.getTextTracks=Qc.prototype.Hb,Ad=Qc.prototype,Ad.hd=function(){return this.d&&this.d.F||0},Ad.gd=function(){return this.lb?this.d.t[0].Fc():[]},Ad.Tc=function(a){if(this.lb){for(var b={},c=this.d.t[0],d=0;d<c.u.length;++d){var e=c.u[d];b[e.K]=e}for(this.r.clear(),c=a.keys(),d=0;d<c.length;++d){var e=c[d],f=a.get(e);if("video"==e){var g=f[0].id;this.r.push(e,b[g])}else if("audio"==e)for(var g=f[0].Fa.split(";")[0],h=0;h<f.length;++h){var i=f[h];i.Fa.split(";")[0]==g&&this.r.push(e,b[i.id])}else for(h=0;h<f.length;++h)g=f[h].id,this.r.push(e,b[g])}this.sc=!0,(a=this.r.get("audio"))&&(Tc(this,a),this.r.set("audio",a),a=a[0].lang||this.ta,L(2,this.ta,a)&&(this.sc=!1)),(a=this.r.get("text"))&&(Tc(this,a),this.r.set("text",a),a=a[0].lang||this.ta,L(2,this.ta,a)||(this.sc=!1))}},Ad.Vb=function(a,b){return Sc(this,"video",a,b)},Ad.nc=function(a,b){return Sc(this,"audio",a,b)},Ad.oc=function(a,b){return Sc(this,"text",a,b)},Ad.Db=function(a){var b=this.s.text;b&&b.rc(a)},Ad.Cb=function(a){this.Ab.enable(a)},Ad.yb=function(a){if(this.lb){this.ac=a;for(var b=0;b<this.d.t.length;++b)for(var c=this.d.t[b],d=0;d<c.u.length;++d)for(var e=c.u[d],f=0;f<e.f.length;++f){var g=e.f[f];g.enabled=!0,a.maxWidth&&g.width>a.maxWidth&&(g.enabled=!1),a.maxHeight&&g.height>a.maxHeight&&(g.enabled=!1),a.maxBandwidth&&g.bandwidth>a.maxBandwidth&&(g.enabled=!1),a.minBandwidth&&g.bandwidth<a.minBandwidth&&(g.enabled=!1)}}},Ad.fc=function(){return[]},Ad.Ib=function(){return!1},Ad.hc=function(){return this.d?this.d.ia:!1},Ad.me=function(){this.sa.Yb(this.W,"sourceopen"),Uc(this).then(_(this,function(){this.$b.resolve()}))["catch"](_(this,function(a){this.$b.reject(a)}))},Ad.ve=function(a){this.ud=Math.min(this.ud,a.na),this.mb=Math.max(this.mb,a.na);for(var b in this.s)if(!this.s[b].ld())return;Zc(this)},Ad.ye=function(){this.Ub=null,dd(this);var a=cd(this,bd(this));a&&(_c(this,a.start,a.end),this.video.paused||ad(this,this.video.currentTime,a.start,a.end))},Ad.qe=function(){var a=this.video.currentTime;if(null!=this.Ra){if(a>=this.Ra-.01&&a<=this.Ra+.01)return void(this.Ra=null);this.Ra=null}var b=cd(this,bd(this));if(b){var c=b.end;if((b=ad(this,a,b.start,c))||(c+.01>=a?b=!1:(this.video.currentTime=c,b=!0)),!b)for(var d in this.s)this.s[d].Cd()}},Ad.ue=function(){if(!this.d.ia){for(var a in this.s)if(!this.s[a].kd())return;"open"==this.W.readyState&&this.W.endOfStream()}},Ad.Oc=function(){var a=this.J,b=this.L.getBandwidth();a.estimatedBandwidth=b,a.bandwidthHistory.push(new yc(b))},Ad.ea=function(b){if(null!=this.d.zb){var c=Math.max(this.d.zb,3);b=Math.max(c-b,0),this.ha=a.setTimeout(this.xe.bind(this),1e3*b)}},Ad.bc=function(){this.ha&&(a.clearTimeout(this.ha),this.ha=null)},c(ed,Qc),b("shaka.player.DashVideoSource",ed),ed.prototype.destroy=function(){this.kb=null,Qc.prototype.destroy.call(this)},ed.prototype.load=function(a){return new Ob(this.he).send().then(_(this,function(b){return this.d=new dc(this.kb).Wa(b),Qc.prototype.load.call(this,a)}))},ed.prototype.yd=function(a){return new Ob(a.toString()).send().then(_(this,function(a){return a=new dc(this.kb).Wa(a),Promise.resolve(a)}))},c(fd,U),Ad=fd.prototype,Ad.ae=function(a,b,c){var d=new X;a={stream_id:c.Wb,mime_type:a.n,codecs:a.aa,init_segment:b,references:c.g};var e=ud(this).put(a);return e.onsuccess=function(){d.resolve(c.Wb)},e.onerror=function(){d.reject(e.error)},d},Ad.$d=function(a,b){for(var c=Promise.resolve(),d=0;d<a.length();++d)var e=a.get(d),f=this.Ld.bind(this,e,b),c=c.then(this.He.bind(this,e)),c=c.then(f);return c.then(function(){return Promise.resolve(b)})["catch"](_(this,function(a){return sd(this,b.Wb),Promise.reject(a)}))},Ad.Ld=function(a,b,c){var d=new X;0==b.wb.byteLength&&(b.Ec=a),b.wb=md(b.wb,c),b.Bd++;var e=J({type:"progress",detail:b.Bd/b.We*100,bubbles:!0});if(1048576<=b.wb.byteLength||null==a.endTime){c={stream_id:b.Wb,segment_id:b.Sc,content:b.wb};var f=td(this).put(c);b.g.push({start_time:b.Ec.startTime,start_byte:b.Ec.ab,end_time:a.endTime,url:"idb://"+b.Wb+"/"+b.Sc}),b.Sc++,b.wb=new ArrayBuffer(0),f.onerror=function(){d.reject(f.error)},f.onsuccess=_(this,function(){this.dispatchEvent(e),d.resolve()})}else this.dispatchEvent(e),d.resolve();return d},Ad.He=function(a){return a=new Qb(a.url.toString(),a.ab,a.Fb),a.L=this.P,a.send()},Ad.Bc=function(a){return pd(this,a).then(_(this,function(b){var c,d=[];for(c in b.stream_ids)d.push(sd(this,b.stream_ids[c]));return b=vd(this),d.push(b["delete"](a)),Promise.all(d)}))},xd.prototype.destroy=function(){this.mc=null,this.b&&(this.b.destroy(),this.b=null)},xd.prototype.create=function(){if(this.b)return Promise.resolve(this.b);for(var a=[],b=0;b<this.mc.length;++b){var c=this.mc[b];a.push(new ob(c.start_time,c.end_time,c.start_byte,null,new ya(c.url)))}return this.mc=null,this.b=new tb(a),Promise.resolve(this.b)},c(yd,Qc),b("shaka.player.OfflineVideoSource",yd),yd.retrieveGroupIds=function(){var a=new fd(null,null),b=gd(a).then(function(){return qd(a)});return b.then(function(){hd(a)})["catch"](function(){hd(a)}),b},yd.prototype.Te=function(b,c,d,e){var f,g={},h=[];b=new Ob(b);var i=M(c);return b.send().then(_(this,function(a){return this.d=new dc(d).Wa(a),this.d.ia?Promise.reject(Error("Unable to store live streams offline.")):Qc.prototype.load.call(this,i)})).then(_(this,function(){var b=document.createElement("video");return b.src=a.URL.createObjectURL(this.W),f=new hc(null,b,this),R(this.sa,f,"sessionReady",this.se.bind(this)),ic(f)})).then(_(this,function(){for(var a=P(this.r),b=0;b<a.length;++b)for(var c=a[b],d=0;d<c.f.length;++d){var f=c.f[d];g[f.K]=f}return e()})).then(_(this,function(a){for(var b=0;b<a.length;++b){var c=a[b],d=g[c];if(!d)return Promise.reject(Error("Invalid stream ID chosen: "+c));h.push(d)}var e=["audio","video"];return h=h.filter(function(a){return 0>e.indexOf(a.n.split("/")[0])?!1:!0}),a=h.map(function(a){return a.I.create()}),Promise.all(a)})).then(_(this,function(a){for(var b=h,c=[],d=0;d<b.length;++d)try{c[d]=this.W.addSourceBuffer($b(b[d]))}catch(e){}if(b.length!=c.length)a=Error("Error initializing streams."),a.type="storage",a=Promise.reject(a);else{for(d=0;d<a.length;++d)(b=a[d])&&c[d].appendBuffer(b);a=Promise.resolve()}return a})).then(_(this,function(){return jc(f,this.timeoutMs)})).then(_(this,function(){return zd(this,h,f.D,this.d.t[0].duration)}))},yd.prototype.store=yd.prototype.Te,yd.prototype.se=function(a){this.qc.push(a.detail.sessionId)},yd.prototype.load=function(a){var b,c,d=new fd(null,null);return gd(d).then(_(this,function(){return pd(d,this.jd)})).then(_(this,function(a){var e=[];this.qc=a.session_ids,b=a.duration,c=a.key_system;for(var f=0;f<a.stream_ids.length;++f)e.push(od(d,a.stream_ids[f]));return Promise.all(e)})).then(_(this,function(d){var e=b,f=c,g=new bc;g.F=5;for(var h=new ac,i=0;i<d.length;++i){var j=d[i],k=new Zb,l=new xd(j.references),m=new Uint8Array(j.init_segment),m=new ya("data:application/octet-stream;base64,"+ca(m)),m=new Ub(new Rb(m,0,null));k.N=l,k.I=m,k.n=j.mime_type,k.aa=j.codecs,j=new ga(f,"",!1,null),l=new _b,l.f.push(k),l.Na.push(j),l.contentType=k.n.split("/")[0],h.u.push(l),h.duration=e}return g.t.push(h),this.d=g,Qc.prototype.load.call(this,a)})).then(function(){return hd(d),Promise.resolve()})["catch"](function(a){return hd(d),Promise.reject(a)})},yd.prototype.Bc=function(){var a=new fd(null,null);return gd(a).then(a.Bc.bind(a,this.jd)).then(function(){return hd(a),Promise.resolve()})["catch"](function(b){return hd(a),Promise.reject(b)})},yd.prototype.deleteGroup=yd.prototype.Bc,yd.prototype.fc=function(){return this.qc},yd.prototype.Ib=function(){return!0}}).bind(e,this)(),"undefined"!=typeof a&&a.exports?a.exports=e.shaka:(d=function(){return e.shaka}.call(b,c,b,a),!(void 0!==d&&(a.exports=d)))}()}}); |
|
simple_jump_npe.go | /*
package main
import "fmt"
func main() {
fmt.Println(foo())
}
*/
package empty
func foo(i int) int {
var p *int = nil
if i == 7 {
goto label
}
return 42
label:
return *p // errror
}
func fom(i int) int {
var p *int = nil
label:
if i == 7 |
return 42
}
func foz() int {
var p *int = nil
for i := 0; i < 7; i++ {
continue
return *p // no error
}
return 42
}
func fok() int {
var p *int = nil
for i := 0; i < 7; i++ {
if i < 7 {
continue
}
return *p // no error
}
return 42
}
func bar() int {
var p *int = nil
for i := 0; i < 7; i++ {
if i < 1 {
continue
}
return *p // error
}
return 42
}
func baz() int {
var p *int = nil
for i := 0; i < 7; i++ {
break
return *p // no error
}
return 42
}
func bak() int {
var p *int = nil
for i := 0; i < 7; i++ {
if i > 7 {
break
}
return *p // error (should it be?)
}
return 42
}
func bam() int {
var p *int = nil
for i := 0; i < 7; i++ {
if i > 1 {
break
}
return *p // error
}
return 42
}
func moo() int {
var p *int = nil
outer:
for i := 0; i < 7; i++ {
for j := 0; j < 7; j++ {
break outer
}
return *p // no error
}
return 42
}
func noo() int {
var p *int = nil
outer:
for i := 0; i < 7; i++ {
for j := 0; j < 7; j++ {
continue outer
}
return *p // no error
}
return 42
}
func qoo(i int) int {
var p *int = nil
if i == 7 {
goto label
return *p // no errror
label:
}
return 42
}
| {
i = 42
goto label
return *p // no errror
} |
layer.py | from o3seespy.base_model import OpenSeesObject
class LayerBase(OpenSeesObject):
op_base_type = "layer"
class Straight(LayerBase):
"""
The Straight Layer Class
The layer command is used to generate a number of fibers along a line or a circular arc.
"""
op_type = 'straight'
def __init__(self, osi, mat, num_fiber, area_fiber, start, end):
"""
Initial method for Straight
Parameters
----------
mat: obj
Material tag associated with this fiber (uniaxialmaterial tag for a fibersection and ndmaterial tag for use
in an ndfibersection).
num_fiber: int
Number of fibers along line
area_fiber: float
Area of each fiber
start: list
Y & z-coordinates of first fiber in line (local coordinate system)
end: list
Y & z-coordinates of last fiber in line (local coordinate system)
"""
self.mat = mat
self.num_fiber = int(num_fiber)
self.area_fiber = float(area_fiber)
self.start = start
self.end = end
self._parameters = [self.op_type, self.mat.tag, self.num_fiber, self.area_fiber, *self.start, *self.end]
self.to_process(osi)
class | (LayerBase):
"""
The Circ Layer Class
This command is used to construct a line of fibers along a circular arc
"""
op_type = 'circ'
def __init__(self, osi, mat, num_fiber, area_fiber, center, radius, ang=None):
"""
Initial method for Circ
Parameters
----------
mat: obj
Material tag associated with this fiber (uniaxialmaterial tag for a fibersection and ndmaterial tag for use
in an ndfibersection).
num_fiber: int
Number of fibers along line
area_fiber: float
Area of each fiber
center: listf
Y & z-coordinates of center of circular arc
radius: float
Radius of circlular arc
ang: listf
Starting and ending angle (optional) [0.0, 360.0-360/num_fibres]
"""
self.mat = mat
self.num_fiber = int(num_fiber)
self.area_fiber = float(area_fiber)
self.center = center
self.radius = float(radius)
self.ang = ang
self._parameters = [self.op_type, self.mat.tag, self.num_fiber, self.area_fiber, *self.center, self.radius]
if self.ang is not None:
self._parameters += self.ang
self.to_process(osi)
| Circ |
uint_macros.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![macro_escape]
#![doc(hidden)]
#![allow(unsigned_negate)]
macro_rules! uint_module (($T:ty) => (
// String conversion functions and impl str -> num
/// Parse a byte slice as a number in the given base
///
/// Yields an `Option` because `buf` may or may not actually be parseable.
///
/// # Examples
///
/// ```
/// let num = std::uint::parse_bytes([49,50,51,52,53,54,55,56,57], 10);
/// assert!(num == Some(123456789));
/// ```
#[inline]
pub fn parse_bytes(buf: &[u8], radix: uint) -> Option<$T> {
strconv::from_str_bytes_common(buf, radix, false, false, false,
strconv::ExpNone, false, false)
}
impl FromStr for $T {
#[inline]
fn from_str(s: &str) -> Option<$T> {
strconv::from_str_common(s, 10u, false, false, false,
strconv::ExpNone, false, false)
}
}
impl FromStrRadix for $T {
#[inline]
fn from_str_radix(s: &str, radix: uint) -> Option<$T> {
strconv::from_str_common(s, radix, false, false, false,
strconv::ExpNone, false, false)
}
}
// String conversion functions and impl num -> str
/// Convert to a string as a byte slice in a given base.
///
/// Use in place of x.to_str() when you do not need to store the string permanently
///
/// # Examples
///
/// ```
/// std::uint::to_str_bytes(123, 10, |v| {
/// assert!(v == "123".as_bytes());
/// });
/// ```
#[inline]
pub fn to_str_bytes<U>(n: $T, radix: uint, f: |v: &[u8]| -> U) -> U {
use io::{Writer, Seek};
// The radix can be as low as 2, so we need at least 64 characters for a
// base 2 number, and then we need another for a possible '-' character.
let mut buf = [0u8, ..65];
let amt = {
let mut wr = ::io::BufWriter::new(buf);
(write!(&mut wr, "{}", ::fmt::radix(n, radix as u8))).unwrap();
wr.tell().unwrap() as uint
};
f(buf.slice(0, amt))
}
impl ToStrRadix for $T {
/// Convert to a string in a given base.
#[inline]
fn to_str_radix(&self, radix: uint) -> ~str {
format!("{}", ::fmt::radix(*self, radix as u8))
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use super::*;
use num::ToStrRadix;
use str::StrSlice;
use u16; | #[test]
pub fn test_to_str() {
assert_eq!((0 as $T).to_str_radix(10u), "0".to_owned());
assert_eq!((1 as $T).to_str_radix(10u), "1".to_owned());
assert_eq!((2 as $T).to_str_radix(10u), "2".to_owned());
assert_eq!((11 as $T).to_str_radix(10u), "11".to_owned());
assert_eq!((11 as $T).to_str_radix(16u), "b".to_owned());
assert_eq!((255 as $T).to_str_radix(16u), "ff".to_owned());
assert_eq!((0xff as $T).to_str_radix(10u), "255".to_owned());
}
#[test]
pub fn test_from_str() {
assert_eq!(from_str::<$T>("0"), Some(0u as $T));
assert_eq!(from_str::<$T>("3"), Some(3u as $T));
assert_eq!(from_str::<$T>("10"), Some(10u as $T));
assert_eq!(from_str::<u32>("123456789"), Some(123456789 as u32));
assert_eq!(from_str::<$T>("00100"), Some(100u as $T));
assert!(from_str::<$T>("").is_none());
assert!(from_str::<$T>(" ").is_none());
assert!(from_str::<$T>("x").is_none());
}
#[test]
pub fn test_parse_bytes() {
use str::StrSlice;
assert_eq!(parse_bytes("123".as_bytes(), 10u), Some(123u as $T));
assert_eq!(parse_bytes("1001".as_bytes(), 2u), Some(9u as $T));
assert_eq!(parse_bytes("123".as_bytes(), 8u), Some(83u as $T));
assert_eq!(u16::parse_bytes("123".as_bytes(), 16u), Some(291u as u16));
assert_eq!(u16::parse_bytes("ffff".as_bytes(), 16u), Some(65535u as u16));
assert_eq!(parse_bytes("z".as_bytes(), 36u), Some(35u as $T));
assert!(parse_bytes("Z".as_bytes(), 10u).is_none());
assert!(parse_bytes("_".as_bytes(), 2u).is_none());
}
#[test]
fn test_uint_to_str_overflow() {
let mut u8_val: u8 = 255_u8;
assert_eq!(u8_val.to_str(), "255".to_owned());
u8_val += 1 as u8;
assert_eq!(u8_val.to_str(), "0".to_owned());
let mut u16_val: u16 = 65_535_u16;
assert_eq!(u16_val.to_str(), "65535".to_owned());
u16_val += 1 as u16;
assert_eq!(u16_val.to_str(), "0".to_owned());
let mut u32_val: u32 = 4_294_967_295_u32;
assert_eq!(u32_val.to_str(), "4294967295".to_owned());
u32_val += 1 as u32;
assert_eq!(u32_val.to_str(), "0".to_owned());
let mut u64_val: u64 = 18_446_744_073_709_551_615_u64;
assert_eq!(u64_val.to_str(), "18446744073709551615".to_owned());
u64_val += 1 as u64;
assert_eq!(u64_val.to_str(), "0".to_owned());
}
#[test]
fn test_uint_from_str_overflow() {
let mut u8_val: u8 = 255_u8;
assert_eq!(from_str::<u8>("255"), Some(u8_val));
assert!(from_str::<u8>("256").is_none());
u8_val += 1 as u8;
assert_eq!(from_str::<u8>("0"), Some(u8_val));
assert!(from_str::<u8>("-1").is_none());
let mut u16_val: u16 = 65_535_u16;
assert_eq!(from_str::<u16>("65535"), Some(u16_val));
assert!(from_str::<u16>("65536").is_none());
u16_val += 1 as u16;
assert_eq!(from_str::<u16>("0"), Some(u16_val));
assert!(from_str::<u16>("-1").is_none());
let mut u32_val: u32 = 4_294_967_295_u32;
assert_eq!(from_str::<u32>("4294967295"), Some(u32_val));
assert!(from_str::<u32>("4294967296").is_none());
u32_val += 1 as u32;
assert_eq!(from_str::<u32>("0"), Some(u32_val));
assert!(from_str::<u32>("-1").is_none());
let mut u64_val: u64 = 18_446_744_073_709_551_615_u64;
assert_eq!(from_str::<u64>("18446744073709551615"), Some(u64_val));
assert!(from_str::<u64>("18446744073709551616").is_none());
u64_val += 1 as u64;
assert_eq!(from_str::<u64>("0"), Some(u64_val));
assert!(from_str::<u64>("-1").is_none());
}
#[test]
#[should_fail]
pub fn to_str_radix1() {
100u.to_str_radix(1u);
}
#[test]
#[should_fail]
pub fn to_str_radix37() {
100u.to_str_radix(37u);
}
}
)) | |
binary_sensor.py | """Binary sensor platform for Pandora Car Alarm System."""
__all__ = ["ENTITY_TYPES", "async_setup_entry"]
import logging
from functools import partial
from typing import Any, Dict
import attr
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_MOTION,
DOMAIN as PLATFORM_DOMAIN,
BinarySensorEntity,
ENTITY_ID_FORMAT,
)
from homeassistant.const import ATTR_NAME, ATTR_ICON, ATTR_DEVICE_CLASS
from . import PandoraCASBooleanEntity, async_platform_setup_entry
from .api import BitStatus
from .const import *
_LOGGER = logging.getLogger(__name__)
_car_door_icons = ("mdi:car-door-lock", "mdi:car-door")
_car_glass_icons = ("mdi:car-windshield", "mdi:car-windshield-outline")
ENTITY_TYPES = {
"connection_state": {
ATTR_NAME: "Connection state",
ATTR_DEVICE_CLASS: DEVICE_CLASS_CONNECTIVITY,
ATTR_ATTRIBUTE: "is_online",
ATTR_ATTRIBUTE_SOURCE: True,
},
"moving": {
ATTR_NAME: "Moving",
ATTR_DEVICE_CLASS: DEVICE_CLASS_MOTION,
ATTR_STATE_SENSITIVE: True,
ATTR_ATTRIBUTE: "is_moving",
},
# Status-related sensors
"left_front_door": {
ATTR_NAME: "Left Front Door",
ATTR_ICON: _car_door_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.DOOR_FRONT_LEFT_OPEN,
ATTR_STATE_SENSITIVE: True,
},
"right_front_door": {
ATTR_NAME: "Right Front Door",
ATTR_ICON: _car_door_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.DOOR_FRONT_RIGHT_OPEN,
ATTR_STATE_SENSITIVE: True,
},
"left_back_door": {
ATTR_NAME: "Left Back Door",
ATTR_ICON: _car_door_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.DOOR_BACK_LEFT_OPEN,
ATTR_STATE_SENSITIVE: True,
},
"right_back_door": {
ATTR_NAME: "Right Back Door",
ATTR_ICON: _car_door_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.DOOR_BACK_RIGHT_OPEN,
ATTR_STATE_SENSITIVE: True,
},
"left_front_glass": {
ATTR_NAME: "Left Front Glass",
ATTR_ICON: _car_glass_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "can_glass_front_left",
ATTR_STATE_SENSITIVE: True,
ATTR_DISABLED_BY_DEFAULT: True,
},
"right_front_glass": {
ATTR_NAME: "Right Front Glass",
ATTR_ICON: _car_glass_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "can_glass_front_right",
ATTR_STATE_SENSITIVE: True,
ATTR_DISABLED_BY_DEFAULT: True,
},
"left_back_glass": {
ATTR_NAME: "Left Back Glass",
ATTR_ICON: _car_glass_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "can_glass_back_left",
ATTR_STATE_SENSITIVE: True,
ATTR_DISABLED_BY_DEFAULT: True,
},
"right_back_glass": {
ATTR_NAME: "Right Back Glass",
ATTR_ICON: _car_glass_icons,
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "can_glass_back_right",
ATTR_STATE_SENSITIVE: True,
ATTR_DISABLED_BY_DEFAULT: True,
},
"trunk": {
ATTR_NAME: "Trunk",
ATTR_ICON: "mdi:car-back",
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.TRUNK_OPEN,
ATTR_STATE_SENSITIVE: True,
},
"hood": {
ATTR_NAME: "Hood",
ATTR_ICON: "mdi:car",
ATTR_DEVICE_CLASS: DEVICE_CLASS_DOOR,
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.HOOD_OPEN,
ATTR_STATE_SENSITIVE: True, | ATTR_ICON: "mdi:car-brake-parking",
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.HANDBRAKE_ENGAGED,
ATTR_STATE_SENSITIVE: True,
},
"brakes": {
ATTR_NAME: "Brakes",
ATTR_ICON: "mdi:car-brake-hold",
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.BRAKES_ENGAGED,
ATTR_STATE_SENSITIVE: True,
},
"ignition": {
ATTR_NAME: "Ignition",
ATTR_ICON: "mdi:key-variant",
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.IGNITION,
},
"exterior_lights": {
ATTR_NAME: "Exterior Lights",
ATTR_ICON: "mdi:car-light-high",
ATTR_ATTRIBUTE: "bit_state",
ATTR_FLAG: BitStatus.EXTERIOR_LIGHTS_ACTIVE,
},
"ev_charging_connected": {
ATTR_NAME: "EV Charging Connected",
ATTR_ICON: "mdi:ev-station",
ATTR_ATTRIBUTE: "ev_charging_connected",
ATTR_STATE_SENSITIVE: True,
ATTR_DISABLED_BY_DEFAULT: True,
},
}
class PandoraCASBinarySensor(PandoraCASBooleanEntity, BinarySensorEntity):
ENTITY_TYPES = ENTITY_TYPES
ENTITY_ID_FORMAT = ENTITY_ID_FORMAT
@property
def is_on(self) -> bool:
"""Return current state of"""
return bool(self._state)
@property
def device_state_attributes(self) -> Dict[str, Any]:
existing_attributes = super().device_state_attributes
entity_type = self._entity_type
if entity_type == "connection_state":
state = self._device.state
if state is not None:
existing_attributes.update(attr.asdict(state, True))
elif entity_type == "ev_charging_connected":
if not self._device.is_online:
return existing_attributes
state = self._device.state
existing_attributes["slow_charging"] = state.ev_charging_slow
existing_attributes["fast_charging"] = state.ev_charging_fast
existing_attributes["ready_status"] = state.ev_status_ready
return existing_attributes
async_setup_entry = partial(
async_platform_setup_entry, PLATFORM_DOMAIN, PandoraCASBinarySensor, logger=_LOGGER
) | },
"parking": {
ATTR_NAME: "Parking Mode", |
adapter.go | package encryption
import (
"crypto/x509"
"encoding/base64"
)
type adapter struct {
builder Builder
}
func createAdapter(builder Builder) Adapter |
// FromBytes converts []byte to PrivateKey
func (app *adapter) FromBytes(bytes []byte) (PrivateKey, error) {
pk, err := x509.ParsePKCS1PrivateKey(bytes)
if err != nil {
return nil, err
}
return app.builder.WithPK(*pk).Now()
}
// FromEncoded converts an encoded string to PrivateKey
func (app *adapter) FromEncoded(encoded string) (PrivateKey, error) {
decoded, err := base64.StdEncoding.DecodeString(encoded)
if err != nil {
return nil, err
}
return app.FromBytes(decoded)
}
// ToBytes converts a PrivateKey to []byte
func (app *adapter) ToBytes(pk PrivateKey) []byte {
key := pk.Key()
return x509.MarshalPKCS1PrivateKey(&key)
}
// ToEncoded converts a PrivateKey to an encoded string
func (app *adapter) ToEncoded(pk PrivateKey) string {
bytes := app.ToBytes(pk)
return base64.StdEncoding.EncodeToString(bytes)
}
| {
out := adapter{
builder: builder,
}
return &out
} |
test_graph.py | from unittest import TestCase
from followthemoney import model
from followthemoney.types import registry
from followthemoney.graph import Graph, Node
ENTITY = {
"id": "ralph",
"schema": "Person",
"properties": {
"name": ["Ralph Tester"],
"birthDate": ["1972-05-01"],
"idNumber": ["9177171", "8e839023"],
"website": ["https://ralphtester.me"],
"phone": ["+12025557612"],
"email": ["[email protected]"],
"topics": ["role.spy"],
},
}
ENTITY2 = {
"id": "jodie",
"schema": "Person",
"properties": {"name": ["Jodie Tester"], "birthDate": ["1972-05-01"]},
}
REL = {
"id": "jodie2ralph",
"schema": "Family",
"properties": {"person": ["jodie"], "relative": ["ralph"]},
}
PASS = {
"id": "passpoat",
"schema": "Passport",
"properties": {"holder": ["jodie"], "passportNumber": ["HJSJHAS"]},
}
class GraphTestCase(TestCase):
def test_basic_graph(self):
proxy = model.get_proxy(ENTITY, cleaned=False)
graph = Graph(edge_types=registry.pivots)
graph.add(proxy)
assert len(graph.iternodes()) > 1, graph.to_dict()
assert len(graph.proxies) == 1, graph.proxies
assert len(graph.queued) == 0, graph.proxies
graph.add(None)
assert len(graph.proxies) == 1, graph.proxies
assert len(graph.queued) == 0, graph.proxies
def test_adjacent(self):
graph = Graph(edge_types=registry.pivots)
graph.add(model.get_proxy(ENTITY, cleaned=False))
graph.add(model.get_proxy(ENTITY2, cleaned=False))
graph.add(model.get_proxy(REL, cleaned=False))
graph.add(model.get_proxy(PASS, cleaned=False))
node = Node(registry.entity, "jodie")
adj = list(graph.get_adjacent(node))
assert len(adj) == 3, adj
node = Node(registry.entity, "ralph")
adj = list(graph.get_adjacent(node))
assert len(adj) == 7, adj
node = Node(registry.entity, "passpoat")
adj = list(graph.get_adjacent(node))
assert len(adj) == 2, adj
node = Node(registry.entity, "passpoat")
prop = model.get_qname("Identification:holder")
adj = list(graph.get_adjacent(node, prop))
assert len(adj) == 1, adj
assert adj[0].source_prop == prop, adj[0].source_prop
assert adj[0].target_prop == prop.reverse, adj[0].target_prop
node = Node(registry.entity, "jodie")
prop = model.get_qname("Person:familyPerson")
adj = list(graph.get_adjacent(node, prop))
assert len(adj) == 1, adj
assert adj[0].source_prop == prop, adj[0].source_prop
node = Node(registry.entity, "ralph")
prop = model.get_qname("Person:familyRelative")
adj2 = list(graph.get_adjacent(node, prop))
assert len(adj2) == 1, adj2
assert adj2[0].target_prop == prop, adj2[0].target_prop
assert adj[0] == adj2[0], (adj[0], adj2[0])
assert adj[0].id in repr(adj[0]), repr(adj[0])
def | (self):
proxy = model.get_proxy(ENTITY, cleaned=False)
graph = Graph(edge_types=registry.pivots)
graph.add(proxy)
data = graph.to_dict()
assert "nodes" in data, data
assert "edges" in data, data
def test_nodes(self):
node = Node(registry.phone, "+4917778271717")
assert "+49177" in repr(node), repr(node)
assert node == node, repr(node)
assert node.caption == str(node), str(node)
assert hash(node) == hash(node.id), repr(node)
| test_to_dict |
config.py | """ Module containing a class for encapsulating the settings of the tree search
"""
import os
import yaml
from aizynthfinder.utils.logging import logger
from aizynthfinder.utils.paths import data_path
from aizynthfinder.mcts.policy import Policy
from aizynthfinder.mcts.stock import Stock, MongoDbInchiKeyQuery
class Configuration:
"""
Encapsulating the settings of the tree search, including the policy,
the stock and various parameters.
All the parameters can be retrieved as attributes of the Configuration
object, e.g.
| config.iteration_limit # The maximum number of iterations
On instantiation it will read default parameters from a config.yml
file located in the `data` folder of the package.
"""
def __init__(self):
self._properties = {}
filename = os.path.join(data_path(), "config.yml")
with open(filename, "r") as fileobj:
_config = yaml.load(fileobj.read(), Loader=yaml.SafeLoader)
self._update_from_config(_config)
self.stock = Stock()
self.policy = Policy(self)
self._logger = logger()
def __eq__(self, other):
return self._properties == other._properties
@classmethod
def from_file(cls, filename):
"""
Loads a configuration from a yaml file.
The parameters not set in the yaml file are taken from the default values.
The policies and stocks specified in the yaml file are directly loaded.
:param filename: the path to a yaml file
:type filename: str
:return: a Configuration object with settings from the yaml file
:rtype: Configuration
"""
config_obj = Configuration()
with open(filename, "r") as fileobj:
_config = yaml.load(fileobj.read(), Loader=yaml.SafeLoader)
config_obj._update_from_config(_config)
for key, policy_spec in _config.get("policy", {}).get("files", {}).items():
modelfile, templatefile = policy_spec
config_obj.policy.load_policy(modelfile, templatefile, key)
for key, stockfile in _config.get("stock", {}).get("files", {}).items():
config_obj.stock.load_stock(stockfile, key)
if "mongodb" in _config.get("stock", {}):
query_obj = MongoDbInchiKeyQuery(**(_config["stock"]["mongodb"] or {}))
config_obj.stock.load_stock(query_obj, "mongodb_stock")
return config_obj
def update(self, **settings):
""" Update the configuration using dictionary of parameters
"""
for setting, value in settings.items():
setattr(self, setting, value)
self._logger.info(f"Setting {setting.replace('_', ' ')} to {value}")
def _update_from_config(self, config):
self._properties.update(config.get("finder", {}).get("properties", {}))
self._properties.update(config.get("policy", {}).get("properties", {}))
self._properties.update(config.get("properties", {}))
self.__dict__.update(self._properties) | .. code-block::
config.max_transforms # The maximum of transform |
jailkur.js | module.exports = {
komut: "jailkur",
async run(Client, msg, args) {
const starlix = require("../models/starlix")
if(!msg.guild.members.cache.get(msg.author.id).permissions.has("ADMINISTRATOR")) return msg.channel.send("Bu İşlemi Yapabilmek İçin `ADMINISTRATOR` Yetkisine Sahip Olmalısınız.")
var db = await starlix.findOne({guildID: msg.guild.id})
if(db["jailkurulum"]){
msg.channel.send("JAIL Zaten Kurulu")
}
else{
msg.guild.roles.create({data:{name:"👮♂️ POLICE 👮♂️"}}).then(a => {
msg.guild.channels.create("Hapishane",{type:"category",permissionOverwrites:[{type:"role",id:a.id,allow:["VIEW_CHANNEL","SEND_MESSAGES","READ_MESSAGE_HISTORY"]},{type:"role",id:msg.guild.id,deny:["VIEW_CHANNEL","READ_MESSAGE_HISTORY"]}]}).then(b => {
msg.guild.channels.create("hapis-log",{type: "text", parent:b.id ,permissionOverwrites:[{type:"role",id:a.id,allow:["VIEW_CHANNEL", "SEND_MESSAGES", "READ_MESSAGE_HISTORY"]},{type:"role",id:msg.guild.id,deny:["READ_MESSAGE_HISTORY","SEND_MESSAGES","VIEW_CHANNEL"]}]})
| })
})
starlix.updateOne({guildID:msg.guild.id},{jailkurulum:true}, () => {})
msg.channel.send("JAIL Kuruldu.")
}
}
} | |
backend_test.go | package appId
import (
"fmt"
"testing"
"github.com/hashicorp/vault/logical"
logicaltest "github.com/hashicorp/vault/logical/testing"
)
func TestBackend_basic(t *testing.T) {
logicaltest.Test(t, logicaltest.TestCase{
AcceptanceTest: true,
Factory: Factory,
Steps: []logicaltest.TestStep{
testAccStepMapAppId(t),
testAccStepMapUserId(t),
testAccLogin(t, ""),
testAccLoginAppIDInPath(t, ""),
testAccLoginInvalid(t),
testAccStepDeleteUserId(t),
testAccLoginDeleted(t),
},
})
}
func TestBackend_cidr(t *testing.T) {
logicaltest.Test(t, logicaltest.TestCase{
AcceptanceTest: true,
Factory: Factory, | testAccLoginCidr(t, "10.0.1.5", true),
testAccLoginCidr(t, "", true),
},
})
}
func TestBackend_displayName(t *testing.T) {
logicaltest.Test(t, logicaltest.TestCase{
AcceptanceTest: true,
Factory: Factory,
Steps: []logicaltest.TestStep{
testAccStepMapAppIdDisplayName(t),
testAccStepMapUserId(t),
testAccLogin(t, "tubbin"),
testAccLoginAppIDInPath(t, "tubbin"),
testAccLoginInvalid(t),
testAccStepDeleteUserId(t),
testAccLoginDeleted(t),
},
})
}
// Verify that we are able to update from non-salted (<0.2) to
// using a Salt for the paths
func TestBackend_upgradeToSalted(t *testing.T) {
inm := new(logical.InmemStorage)
// Create some fake keys
se, _ := logical.StorageEntryJSON("struct/map/app-id/foo",
map[string]string{"value": "test"})
inm.Put(se)
se, _ = logical.StorageEntryJSON("struct/map/user-id/bar",
map[string]string{"value": "foo"})
inm.Put(se)
// Initialize the backend, this should do the automatic upgrade
conf := &logical.BackendConfig{
StorageView: inm,
}
backend, err := Factory(conf)
if err != nil {
t.Fatalf("err: %v", err)
}
// Check the keys have been upgraded
out, err := inm.Get("struct/map/app-id/foo")
if err != nil {
t.Fatalf("err: %v", err)
}
if out != nil {
t.Fatalf("unexpected key")
}
out, err = inm.Get("struct/map/user-id/bar")
if err != nil {
t.Fatalf("err: %v", err)
}
if out != nil {
t.Fatalf("unexpected key")
}
// Backend should still be able to resolve
req := logical.TestRequest(t, logical.ReadOperation, "map/app-id/foo")
req.Storage = inm
resp, err := backend.HandleRequest(req)
if err != nil {
t.Fatalf("err: %v", err)
}
if resp.Data["value"] != "test" {
t.Fatalf("bad: %#v", resp)
}
req = logical.TestRequest(t, logical.ReadOperation, "map/user-id/bar")
req.Storage = inm
resp, err = backend.HandleRequest(req)
if err != nil {
t.Fatalf("err: %v", err)
}
if resp.Data["value"] != "foo" {
t.Fatalf("bad: %#v", resp)
}
}
func testAccStepMapAppId(t *testing.T) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "map/app-id/foo",
Data: map[string]interface{}{
"value": "foo,bar",
},
}
}
func testAccStepMapAppIdDisplayName(t *testing.T) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "map/app-id/foo",
Data: map[string]interface{}{
"display_name": "tubbin",
"value": "foo,bar",
},
}
}
func testAccStepMapUserId(t *testing.T) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "map/user-id/42",
Data: map[string]interface{}{
"value": "foo",
},
}
}
func testAccStepDeleteUserId(t *testing.T) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.DeleteOperation,
Path: "map/user-id/42",
}
}
func testAccStepMapUserIdCidr(t *testing.T, cidr string) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "map/user-id/42",
Data: map[string]interface{}{
"value": "foo",
"cidr_block": cidr,
},
}
}
func testAccLogin(t *testing.T, display string) logicaltest.TestStep {
checkTTL := func(resp *logical.Response) error {
if resp.Auth.LeaseOptions.TTL.String() != "720h0m0s" {
return fmt.Errorf("invalid TTL")
}
return nil
}
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "login",
Data: map[string]interface{}{
"app_id": "foo",
"user_id": "42",
},
Unauthenticated: true,
Check: logicaltest.TestCheckMulti(
logicaltest.TestCheckAuth([]string{"bar", "default", "foo"}),
logicaltest.TestCheckAuthDisplayName(display),
checkTTL,
),
}
}
func testAccLoginAppIDInPath(t *testing.T, display string) logicaltest.TestStep {
checkTTL := func(resp *logical.Response) error {
if resp.Auth.LeaseOptions.TTL.String() != "720h0m0s" {
return fmt.Errorf("invalid TTL")
}
return nil
}
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "login/foo",
Data: map[string]interface{}{
"user_id": "42",
},
Unauthenticated: true,
Check: logicaltest.TestCheckMulti(
logicaltest.TestCheckAuth([]string{"bar", "default", "foo"}),
logicaltest.TestCheckAuthDisplayName(display),
checkTTL,
),
}
}
func testAccLoginCidr(t *testing.T, ip string, err bool) logicaltest.TestStep {
check := logicaltest.TestCheckError()
if !err {
check = logicaltest.TestCheckAuth([]string{"bar", "default", "foo"})
}
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "login",
Data: map[string]interface{}{
"app_id": "foo",
"user_id": "42",
},
ErrorOk: err,
Unauthenticated: true,
RemoteAddr: ip,
Check: check,
}
}
func testAccLoginInvalid(t *testing.T) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "login",
Data: map[string]interface{}{
"app_id": "foo",
"user_id": "48",
},
ErrorOk: true,
Unauthenticated: true,
Check: logicaltest.TestCheckError(),
}
}
func testAccLoginDeleted(t *testing.T) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.UpdateOperation,
Path: "login",
Data: map[string]interface{}{
"app_id": "foo",
"user_id": "42",
},
ErrorOk: true,
Unauthenticated: true,
Check: logicaltest.TestCheckError(),
}
} | Steps: []logicaltest.TestStep{
testAccStepMapAppIdDisplayName(t),
testAccStepMapUserIdCidr(t, "192.168.1.0/16"),
testAccLoginCidr(t, "192.168.1.5", false), |
barchart.rs | use std::cmp::{max, min};
use unicode_width::UnicodeWidthStr;
use crate::buffer::Buffer;
use crate::layout::Rect;
use crate::style::Style;
use crate::symbols::bar;
use crate::widgets::{Block, Widget};
/// Display multiple bars in a single widgets
///
/// # Examples
///
/// ```
/// # use tui::widgets::{Block, Borders, BarChart};
/// # use tui::style::{Style, Color, Modifier};
/// # fn main() {
/// BarChart::default()
/// .block(Block::default().title("BarChart").borders(Borders::ALL))
/// .bar_width(3)
/// .bar_gap(1)
/// .style(Style::default().fg(Color::Yellow).bg(Color::Red))
/// .value_style(Style::default().fg(Color::Red).modifier(Modifier::Bold))
/// .label_style(Style::default().fg(Color::White))
/// .data(&[("B0", 0), ("B1", 2), ("B2", 4), ("B3", 3)])
/// .max(4);
/// # }
/// ```
pub struct BarChart<'a> {
/// Block to wrap the widget in
block: Option<Block<'a>>,
/// The width of each bar
bar_width: u16,
/// The gap between each bar
bar_gap: u16,
/// Style of the values printed at the bottom of each bar
value_style: Style,
/// Style of the labels printed under each bar
label_style: Style,
/// Style for the widget
style: Style,
/// Slice of (label, value) pair to plot on the chart
data: &'a [(&'a str, u64)],
/// Value necessary for a bar to reach the maximum height (if no value is specified,
/// the maximum value in the data is taken as reference)
max: Option<u64>,
/// Values to display on the bar (computed when the data is passed to the widget)
values: Vec<String>,
}
impl<'a> Default for BarChart<'a> {
fn default() -> BarChart<'a> {
BarChart {
block: None,
max: None,
data: &[],
values: Vec::new(),
bar_width: 1,
bar_gap: 1,
value_style: Default::default(),
label_style: Default::default(),
style: Default::default(),
}
}
}
impl<'a> BarChart<'a> {
pub fn data(mut self, data: &'a [(&'a str, u64)]) -> BarChart<'a> {
self.data = data;
self.values = Vec::with_capacity(self.data.len());
for &(_, v) in self.data {
self.values.push(format!("{}", v));
}
self
}
pub fn block(mut self, block: Block<'a>) -> BarChart<'a> {
self.block = Some(block);
self
}
pub fn max(mut self, max: u64) -> BarChart<'a> {
self.max = Some(max);
self
}
pub fn bar_width(mut self, width: u16) -> BarChart<'a> {
self.bar_width = width;
self
}
pub fn bar_gap(mut self, gap: u16) -> BarChart<'a> {
self.bar_gap = gap;
self
}
pub fn value_style(mut self, style: Style) -> BarChart<'a> {
self.value_style = style;
self
}
pub fn label_style(mut self, style: Style) -> BarChart<'a> {
self.label_style = style;
self
}
pub fn style(mut self, style: Style) -> BarChart<'a> {
self.style = style;
self
}
}
impl<'a> Widget for BarChart<'a> {
fn draw(&mut self, area: Rect, buf: &mut Buffer) {
let chart_area = match self.block {
Some(ref mut b) => {
b.draw(area, buf);
b.inner(area)
}
None => area,
};
if chart_area.height < 2 {
return;
}
self.background(chart_area, buf, self.style.bg);
let max = self
.max
.unwrap_or_else(|| self.data.iter().fold(0, |acc, &(_, v)| max(v, acc)));
let max_index = min(
(chart_area.width / (self.bar_width + self.bar_gap)) as usize,
self.data.len(),
);
let mut data = self
.data
.iter()
.take(max_index)
.map(|&(l, v)| (l, v * u64::from(chart_area.height) * 8 / max))
.collect::<Vec<(&str, u64)>>();
for j in (0..chart_area.height - 1).rev() {
for (i, d) in data.iter_mut().enumerate() {
let symbol = match d.1 {
0 => " ",
1 => bar::ONE_EIGHTH,
2 => bar::ONE_QUATER,
3 => bar::THREE_EIGHTHS,
4 => bar::HALF,
5 => bar::FIVE_EIGHTHS,
6 => bar::THREE_QUATERS,
7 => bar::SEVEN_EIGHTHS,
_ => bar::FULL,
};
for x in 0..self.bar_width {
buf.get_mut(
chart_area.left() + i as u16 * (self.bar_width + self.bar_gap) + x,
chart_area.top() + j,
)
.set_symbol(symbol)
.set_style(self.style);
}
if d.1 > 8 {
d.1 -= 8;
} else |
}
}
for (i, &(label, value)) in self.data.iter().take(max_index).enumerate() {
if value != 0 {
let value_label = &self.values[i];
let width = value_label.width() as u16;
if width < self.bar_width {
buf.set_string(
chart_area.left()
+ i as u16 * (self.bar_width + self.bar_gap)
+ (self.bar_width - width) / 2,
chart_area.bottom() - 2,
value_label,
self.value_style,
);
}
}
buf.set_stringn(
chart_area.left() + i as u16 * (self.bar_width + self.bar_gap),
chart_area.bottom() - 1,
label,
self.bar_width as usize,
self.label_style,
);
}
}
}
| {
d.1 = 0;
} |
index.js | 'use strict';
exports.__esModule = true;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _path = require('path');
var _fs = require('fs');
var _stripBom = require('strip-bom');
var _stripBom2 = _interopRequireDefault(_stripBom);
var _sourceMapSupport = require('source-map-support');
var _sourceMapSupport2 = _interopRequireDefault(_sourceMapSupport);
var _loadBabelLibs3 = require('./load-babel-libs');
var _loadBabelLibs4 = _interopRequireDefault(_loadBabelLibs3);
var _nodeVersion = require('../../utils/node-version');
var _nodeVersion2 = _interopRequireDefault(_nodeVersion);
var _globals = require('../../api/globals');
var _globals2 = _interopRequireDefault(_globals);
var _runtime = require('../../errors/runtime');
var _stackCleaningHook = require('../../errors/stack-cleaning-hook');
var _stackCleaningHook2 = _interopRequireDefault(_stackCleaningHook);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var COMMON_API_PATH = (0, _path.join)(__dirname, '../../api/common');
var CWD = process.cwd();
var FIXTURE_RE = /(^|;|\s+)fixture\s*(\.|\(|`)/;
var TEST_RE = /(^|;|\s+)test\s*(\.|\()/;
var BABEL_RUNTIME_RE = /^babel-runtime(\\|\/|$)/;
var Module = module.constructor;
var ESNextCompiler = function () {
function | () {
(0, _classCallCheck3.default)(this, ESNextCompiler);
this.sourceMaps = {};
this.cache = {};
this._setupSourceMapsSupport();
}
ESNextCompiler._getNodeModulesLookupPath = function _getNodeModulesLookupPath(filename) {
var dir = (0, _path.dirname)(filename);
return Module._nodeModulePaths(dir);
};
ESNextCompiler._getBabelOptions = function _getBabelOptions(filename) {
var _loadBabelLibs = (0, _loadBabelLibs4.default)(),
presetStage2 = _loadBabelLibs.presetStage2,
transformRuntime = _loadBabelLibs.transformRuntime,
presetES2015Loose = _loadBabelLibs.presetES2015Loose,
presetES2015Node4 = _loadBabelLibs.presetES2015Node4;
var presetES2015 = _nodeVersion2.default < 4 ? presetES2015Loose : presetES2015Node4;
// NOTE: passPrePreset and complex presets is a workaround for https://github.com/babel/babel/issues/2877
// Fixes https://github.com/DevExpress/testcafe/issues/969
return {
passPerPreset: true,
presets: [{ plugins: transformRuntime }, {
passPerPreset: false,
presets: [presetStage2, presetES2015]
}],
filename: filename,
sourceMaps: true,
retainLines: true,
ast: false,
babelrc: false,
highlightCode: false,
resolveModuleSource: function resolveModuleSource(source) {
if (source === 'testcafe') return COMMON_API_PATH;
if (BABEL_RUNTIME_RE.test(source)) {
try {
return require.resolve(source);
} catch (err) {
return source;
}
}
return source;
}
};
};
ESNextCompiler._isNodeModulesDep = function _isNodeModulesDep(filename) {
return (0, _path.relative)(CWD, filename).split(_path.sep).indexOf('node_modules') >= 0;
};
ESNextCompiler._execAsModule = function _execAsModule(code, filename) {
var mod = new Module(filename, module.parent);
mod.filename = filename;
mod.paths = ESNextCompiler._getNodeModulesLookupPath(filename);
mod._compile(code, filename);
};
ESNextCompiler.prototype._setupSourceMapsSupport = function _setupSourceMapsSupport() {
var _this = this;
_sourceMapSupport2.default.install({
handleUncaughtExceptions: false,
environment: 'node',
retrieveSourceMap: function retrieveSourceMap(filename) {
var map = _this.sourceMaps[filename];
return map ? { url: filename, map: map } : null;
}
});
};
ESNextCompiler.prototype._compileES = function _compileES(code, filename) {
var _loadBabelLibs2 = (0, _loadBabelLibs4.default)(),
babel = _loadBabelLibs2.babel;
if (this.cache[filename]) return this.cache[filename];
var opts = ESNextCompiler._getBabelOptions(filename);
var compiled = babel.transform(code, opts);
this.cache[filename] = compiled.code;
this.sourceMaps[filename] = compiled.map;
return compiled.code;
};
ESNextCompiler.prototype._setupRequireHook = function _setupRequireHook(globals) {
var _this2 = this;
var origRequireExtension = require.extensions['.js'];
require.extensions['.js'] = function (mod, filename) {
// NOTE: remove global API so that it will be unavailable for the dependencies
globals.remove();
if (ESNextCompiler._isNodeModulesDep(filename)) origRequireExtension(mod, filename);else {
var code = (0, _fs.readFileSync)(filename);
var compiledCode = _this2._compileES((0, _stripBom2.default)(code), filename);
mod.paths = ESNextCompiler._getNodeModulesLookupPath(filename);
mod._compile(compiledCode, filename);
}
globals.setup();
};
return origRequireExtension;
};
ESNextCompiler.prototype._compileESForTestFile = function _compileESForTestFile(code, filename) {
var compiledCode = null;
_stackCleaningHook2.default.enabled = true;
try {
compiledCode = this._compileES(code, filename);
} catch (err) {
throw new _runtime.TestCompilationError(err);
} finally {
_stackCleaningHook2.default.enabled = false;
}
return compiledCode;
};
ESNextCompiler.prototype.canCompile = function canCompile(code, filename) {
return (/\.js$/.test(filename) && FIXTURE_RE.test(code) && TEST_RE.test(code)
);
};
ESNextCompiler.prototype.compile = function compile(code, filename) {
var compiledCode = this._compileESForTestFile(code, filename);
var globals = new _globals2.default(filename);
globals.setup();
_stackCleaningHook2.default.enabled = true;
var origRequireExtension = this._setupRequireHook(globals);
try {
ESNextCompiler._execAsModule(compiledCode, filename);
} catch (err) {
// HACK: workaround for the `instanceof` problem
// (see: http://stackoverflow.com/questions/33870684/why-doesnt-instanceof-work-on-instances-of-error-subclasses-under-babel-node)
if (err.constructor !== _runtime.APIError) throw new _runtime.TestCompilationError(err);
throw err;
} finally {
require.extensions['.js'] = origRequireExtension;
_stackCleaningHook2.default.enabled = false;
globals.remove();
}
return globals.collectedTests;
};
ESNextCompiler.prototype.cleanUpCache = function cleanUpCache() {
this.cache = null;
};
return ESNextCompiler;
}();
exports.default = ESNextCompiler;
module.exports = exports['default']; | ESNextCompiler |
extensions.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Utility mixins that apply to all Readers and Writers
#[allow(missing_doc)];
// FIXME: Not sure how this should be structured
// FIXME: Iteration should probably be considered separately
use container::Container;
use iter::Iterator;
use option::{Option, Some, None};
use result::{Ok, Err};
use io;
use io::{IoError, IoResult, Reader};
use vec::{OwnedVector, ImmutableVector};
use ptr::RawPtr;
/// An iterator that reads a single byte on each iteration,
/// until `.read_byte()` returns `EndOfFile`.
///
/// # Notes about the Iteration Protocol
///
/// The `Bytes` may yield `None` and thus terminate
/// an iteration, but continue to yield elements if iteration
/// is attempted again.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
pub struct Bytes<'r, T> {
priv reader: &'r mut T,
}
impl<'r, R: Reader> Bytes<'r, R> {
pub fn new(r: &'r mut R) -> Bytes<'r, R> {
Bytes { reader: r }
}
}
impl<'r, R: Reader> Iterator<IoResult<u8>> for Bytes<'r, R> {
#[inline]
fn next(&mut self) -> Option<IoResult<u8>> {
match self.reader.read_byte() {
Ok(x) => Some(Ok(x)),
Err(IoError { kind: io::EndOfFile, .. }) => None,
Err(e) => Some(Err(e))
}
}
}
pub fn u64_to_le_bytes<T>(n: u64, size: uint, f: |v: &[u8]| -> T) -> T {
use mem::{to_le16, to_le32, to_le64};
use cast::transmute;
// LLVM fails to properly optimize this when using shifts instead of the to_le* intrinsics
assert!(size <= 8u);
match size {
1u => f(&[n as u8]),
2u => f(unsafe { transmute::<i16, [u8, ..2]>(to_le16(n as i16)) }),
4u => f(unsafe { transmute::<i32, [u8, ..4]>(to_le32(n as i32)) }),
8u => f(unsafe { transmute::<i64, [u8, ..8]>(to_le64(n as i64)) }),
_ => {
let mut bytes: ~[u8] = ~[];
let mut i = size;
let mut n = n;
while i > 0u {
bytes.push((n & 255_u64) as u8);
n >>= 8_u64;
i -= 1u;
}
f(bytes)
}
}
}
pub fn u64_to_be_bytes<T>(n: u64, size: uint, f: |v: &[u8]| -> T) -> T {
use mem::{to_be16, to_be32, to_be64};
use cast::transmute;
// LLVM fails to properly optimize this when using shifts instead of the to_be* intrinsics
assert!(size <= 8u);
match size {
1u => f(&[n as u8]),
2u => f(unsafe { transmute::<i16, [u8, ..2]>(to_be16(n as i16)) }),
4u => f(unsafe { transmute::<i32, [u8, ..4]>(to_be32(n as i32)) }),
8u => f(unsafe { transmute::<i64, [u8, ..8]>(to_be64(n as i64)) }),
_ => {
let mut bytes: ~[u8] = ~[];
let mut i = size;
while i > 0u {
let shift = ((i - 1u) * 8u) as u64;
bytes.push((n >> shift) as u8);
i -= 1u;
}
f(bytes)
}
}
}
pub fn u64_from_be_bytes(data: &[u8],
start: uint,
size: uint)
-> u64 {
use ptr::{copy_nonoverlapping_memory};
use mem::from_be64;
use vec::MutableVector;
assert!(size <= 8u);
if data.len() - start < size {
fail!("index out of bounds");
}
let mut buf = [0u8, ..8];
unsafe {
let ptr = data.as_ptr().offset(start as int);
let out = buf.as_mut_ptr();
copy_nonoverlapping_memory(out.offset((8 - size) as int), ptr, size);
from_be64(*(out as *i64)) as u64
}
}
#[cfg(test)]
mod test {
use prelude::*;
use io;
use io::{MemReader, MemWriter};
struct InitialZeroByteReader {
count: int,
}
impl Reader for InitialZeroByteReader {
fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {
if self.count == 0 {
self.count = 1;
Ok(0)
} else {
buf[0] = 10;
Ok(1)
}
}
}
struct EofReader;
impl Reader for EofReader {
fn read(&mut self, _: &mut [u8]) -> io::IoResult<uint> {
Err(io::standard_error(io::EndOfFile))
}
}
struct ErroringReader;
impl Reader for ErroringReader {
fn read(&mut self, _: &mut [u8]) -> io::IoResult<uint> {
Err(io::standard_error(io::InvalidInput))
}
}
struct PartialReader {
count: int,
}
impl Reader for PartialReader {
fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {
if self.count == 0 {
self.count = 1;
buf[0] = 10;
buf[1] = 11;
Ok(2)
} else {
buf[0] = 12;
buf[1] = 13;
Ok(2)
}
}
}
struct ErroringLaterReader {
count: int,
}
impl Reader for ErroringLaterReader {
fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {
if self.count == 0 {
self.count = 1;
buf[0] = 10;
Ok(1)
} else {
Err(io::standard_error(io::InvalidInput))
}
}
}
struct ThreeChunkReader {
count: int,
}
impl Reader for ThreeChunkReader {
fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {
if self.count == 0 {
self.count = 1;
buf[0] = 10;
buf[1] = 11;
Ok(2)
} else if self.count == 1 {
self.count = 2;
buf[0] = 12;
buf[1] = 13;
Ok(2)
} else {
Err(io::standard_error(io::EndOfFile))
}
}
}
#[test]
fn read_byte() {
let mut reader = MemReader::new(~[10]);
let byte = reader.read_byte();
assert!(byte == Ok(10));
}
#[test]
fn read_byte_0_bytes() {
let mut reader = InitialZeroByteReader {
count: 0,
};
let byte = reader.read_byte();
assert!(byte == Ok(10));
}
#[test]
fn | () {
let mut reader = EofReader;
let byte = reader.read_byte();
assert!(byte.is_err());
}
#[test]
fn read_byte_error() {
let mut reader = ErroringReader;
let byte = reader.read_byte();
assert!(byte.is_err());
}
#[test]
fn bytes_0_bytes() {
let mut reader = InitialZeroByteReader {
count: 0,
};
let byte = reader.bytes().next();
assert!(byte == Some(Ok(10)));
}
#[test]
fn bytes_eof() {
let mut reader = EofReader;
let byte = reader.bytes().next();
assert!(byte.is_none());
}
#[test]
fn bytes_error() {
let mut reader = ErroringReader;
let mut it = reader.bytes();
let byte = it.next();
assert!(byte.unwrap().is_err());
}
#[test]
fn read_bytes() {
let mut reader = MemReader::new(~[10, 11, 12, 13]);
let bytes = reader.read_bytes(4).unwrap();
assert!(bytes == ~[10, 11, 12, 13]);
}
#[test]
fn read_bytes_partial() {
let mut reader = PartialReader {
count: 0,
};
let bytes = reader.read_bytes(4).unwrap();
assert!(bytes == ~[10, 11, 12, 13]);
}
#[test]
fn read_bytes_eof() {
let mut reader = MemReader::new(~[10, 11]);
assert!(reader.read_bytes(4).is_err());
}
#[test]
fn push_bytes() {
let mut reader = MemReader::new(~[10, 11, 12, 13]);
let mut buf = ~[8, 9];
reader.push_bytes(&mut buf, 4).unwrap();
assert!(buf == ~[8, 9, 10, 11, 12, 13]);
}
#[test]
fn push_bytes_partial() {
let mut reader = PartialReader {
count: 0,
};
let mut buf = ~[8, 9];
reader.push_bytes(&mut buf, 4).unwrap();
assert!(buf == ~[8, 9, 10, 11, 12, 13]);
}
#[test]
fn push_bytes_eof() {
let mut reader = MemReader::new(~[10, 11]);
let mut buf = ~[8, 9];
assert!(reader.push_bytes(&mut buf, 4).is_err());
assert!(buf == ~[8, 9, 10, 11]);
}
#[test]
fn push_bytes_error() {
let mut reader = ErroringLaterReader {
count: 0,
};
let mut buf = ~[8, 9];
assert!(reader.push_bytes(&mut buf, 4).is_err());
assert!(buf == ~[8, 9, 10]);
}
#[test]
fn read_to_end() {
let mut reader = ThreeChunkReader {
count: 0,
};
let buf = reader.read_to_end().unwrap();
assert!(buf == ~[10, 11, 12, 13]);
}
#[test]
#[should_fail]
fn read_to_end_error() {
let mut reader = ThreeChunkReader {
count: 0,
};
let buf = reader.read_to_end().unwrap();
assert!(buf == ~[10, 11]);
}
#[test]
fn test_read_write_le_mem() {
let uints = [0, 1, 2, 42, 10_123, 100_123_456, ::u64::MAX];
let mut writer = MemWriter::new();
for i in uints.iter() {
writer.write_le_u64(*i).unwrap();
}
let mut reader = MemReader::new(writer.unwrap());
for i in uints.iter() {
assert!(reader.read_le_u64().unwrap() == *i);
}
}
#[test]
fn test_read_write_be() {
let uints = [0, 1, 2, 42, 10_123, 100_123_456, ::u64::MAX];
let mut writer = MemWriter::new();
for i in uints.iter() {
writer.write_be_u64(*i).unwrap();
}
let mut reader = MemReader::new(writer.unwrap());
for i in uints.iter() {
assert!(reader.read_be_u64().unwrap() == *i);
}
}
#[test]
fn test_read_be_int_n() {
let ints = [::i32::MIN, -123456, -42, -5, 0, 1, ::i32::MAX];
let mut writer = MemWriter::new();
for i in ints.iter() {
writer.write_be_i32(*i).unwrap();
}
let mut reader = MemReader::new(writer.unwrap());
for i in ints.iter() {
// this tests that the sign extension is working
// (comparing the values as i32 would not test this)
assert!(reader.read_be_int_n(4).unwrap() == *i as i64);
}
}
#[test]
fn test_read_f32() {
//big-endian floating-point 8.1250
let buf = ~[0x41, 0x02, 0x00, 0x00];
let mut writer = MemWriter::new();
writer.write(buf).unwrap();
let mut reader = MemReader::new(writer.unwrap());
let f = reader.read_be_f32().unwrap();
assert!(f == 8.1250);
}
#[test]
fn test_read_write_f32() {
let f:f32 = 8.1250;
let mut writer = MemWriter::new();
writer.write_be_f32(f).unwrap();
writer.write_le_f32(f).unwrap();
let mut reader = MemReader::new(writer.unwrap());
assert!(reader.read_be_f32().unwrap() == 8.1250);
assert!(reader.read_le_f32().unwrap() == 8.1250);
}
#[test]
fn test_u64_from_be_bytes() {
use super::u64_from_be_bytes;
let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09];
// Aligned access
assert_eq!(u64_from_be_bytes(buf, 0, 0), 0);
assert_eq!(u64_from_be_bytes(buf, 0, 1), 0x01);
assert_eq!(u64_from_be_bytes(buf, 0, 2), 0x0102);
assert_eq!(u64_from_be_bytes(buf, 0, 3), 0x010203);
assert_eq!(u64_from_be_bytes(buf, 0, 4), 0x01020304);
assert_eq!(u64_from_be_bytes(buf, 0, 5), 0x0102030405);
assert_eq!(u64_from_be_bytes(buf, 0, 6), 0x010203040506);
assert_eq!(u64_from_be_bytes(buf, 0, 7), 0x01020304050607);
assert_eq!(u64_from_be_bytes(buf, 0, 8), 0x0102030405060708);
// Unaligned access
assert_eq!(u64_from_be_bytes(buf, 1, 0), 0);
assert_eq!(u64_from_be_bytes(buf, 1, 1), 0x02);
assert_eq!(u64_from_be_bytes(buf, 1, 2), 0x0203);
assert_eq!(u64_from_be_bytes(buf, 1, 3), 0x020304);
assert_eq!(u64_from_be_bytes(buf, 1, 4), 0x02030405);
assert_eq!(u64_from_be_bytes(buf, 1, 5), 0x0203040506);
assert_eq!(u64_from_be_bytes(buf, 1, 6), 0x020304050607);
assert_eq!(u64_from_be_bytes(buf, 1, 7), 0x02030405060708);
assert_eq!(u64_from_be_bytes(buf, 1, 8), 0x0203040506070809);
}
}
#[cfg(test)]
mod bench {
extern crate test;
use self::test::BenchHarness;
use container::Container;
macro_rules! u64_from_be_bytes_bench_impl(
($size:expr, $stride:expr, $start_index:expr) =>
({
use vec;
use super::u64_from_be_bytes;
let data = vec::from_fn($stride*100+$start_index, |i| i as u8);
let mut sum = 0u64;
bh.iter(|| {
let mut i = $start_index;
while i < data.len() {
sum += u64_from_be_bytes(data, i, $size);
i += $stride;
}
});
})
)
#[bench]
fn u64_from_be_bytes_4_aligned(bh: &mut BenchHarness) {
u64_from_be_bytes_bench_impl!(4, 4, 0);
}
#[bench]
fn u64_from_be_bytes_4_unaligned(bh: &mut BenchHarness) {
u64_from_be_bytes_bench_impl!(4, 4, 1);
}
#[bench]
fn u64_from_be_bytes_7_aligned(bh: &mut BenchHarness) {
u64_from_be_bytes_bench_impl!(7, 8, 0);
}
#[bench]
fn u64_from_be_bytes_7_unaligned(bh: &mut BenchHarness) {
u64_from_be_bytes_bench_impl!(7, 8, 1);
}
#[bench]
fn u64_from_be_bytes_8_aligned(bh: &mut BenchHarness) {
u64_from_be_bytes_bench_impl!(8, 8, 0);
}
#[bench]
fn u64_from_be_bytes_8_unaligned(bh: &mut BenchHarness) {
u64_from_be_bytes_bench_impl!(8, 8, 1);
}
}
| read_byte_eof |
main.ts | import { TransformInterceptor } from './transform.interceptor';
import { ValidationPipe } from '@nestjs/common';
import { NestFactory } from '@nestjs/core';
import { AppModule } from './app.module';
import { Logger } from '@nestjs/common';
async function bootstrap() {
const logger = new Logger();
const app = await NestFactory.create(AppModule);
app.useGlobalPipes(new ValidationPipe());
app.useGlobalInterceptors(new TransformInterceptor());
const port = 3000;
await app.listen(port);
logger.log(`Application listening port ${port}`);
}
bootstrap(); | ||
DescribeSimulationJobCommand.ts | import { RoboMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../RoboMakerClient";
import { DescribeSimulationJobRequest, DescribeSimulationJobResponse } from "../models/index";
import {
deserializeAws_restJson1DescribeSimulationJobCommand,
serializeAws_restJson1DescribeSimulationJobCommand,
} from "../protocols/Aws_restJson1";
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
export type DescribeSimulationJobCommandInput = DescribeSimulationJobRequest;
export type DescribeSimulationJobCommandOutput = DescribeSimulationJobResponse & __MetadataBearer;
export class | extends $Command<
DescribeSimulationJobCommandInput,
DescribeSimulationJobCommandOutput,
RoboMakerClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: DescribeSimulationJobCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: RoboMakerClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<DescribeSimulationJobCommandInput, DescribeSimulationJobCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const handlerExecutionContext: HandlerExecutionContext = {
logger,
inputFilterSensitiveLog: DescribeSimulationJobRequest.filterSensitiveLog,
outputFilterSensitiveLog: DescribeSimulationJobResponse.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: DescribeSimulationJobCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_restJson1DescribeSimulationJobCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<DescribeSimulationJobCommandOutput> {
return deserializeAws_restJson1DescribeSimulationJobCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
}
| DescribeSimulationJobCommand |
adx_error.pb.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.1
// protoc v3.17.3
// source: google/ads/googleads/v6/errors/adx_error.proto
package errors
import (
reflect "reflect"
sync "sync"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Enum describing possible adx errors.
type AdxErrorEnum_AdxError int32
const (
// Enum unspecified.
AdxErrorEnum_UNSPECIFIED AdxErrorEnum_AdxError = 0
// The received error code is not known in this version.
AdxErrorEnum_UNKNOWN AdxErrorEnum_AdxError = 1
// Attempt to use non-AdX feature by AdX customer.
AdxErrorEnum_UNSUPPORTED_FEATURE AdxErrorEnum_AdxError = 2
)
// Enum value maps for AdxErrorEnum_AdxError.
var (
AdxErrorEnum_AdxError_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "UNSUPPORTED_FEATURE",
}
AdxErrorEnum_AdxError_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"UNSUPPORTED_FEATURE": 2,
}
)
func (x AdxErrorEnum_AdxError) Enum() *AdxErrorEnum_AdxError {
p := new(AdxErrorEnum_AdxError)
*p = x
return p
}
func (x AdxErrorEnum_AdxError) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (AdxErrorEnum_AdxError) Descriptor() protoreflect.EnumDescriptor {
return file_google_ads_googleads_v6_errors_adx_error_proto_enumTypes[0].Descriptor()
}
func (AdxErrorEnum_AdxError) Type() protoreflect.EnumType {
return &file_google_ads_googleads_v6_errors_adx_error_proto_enumTypes[0]
}
func (x AdxErrorEnum_AdxError) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use AdxErrorEnum_AdxError.Descriptor instead.
func (AdxErrorEnum_AdxError) EnumDescriptor() ([]byte, []int) {
return file_google_ads_googleads_v6_errors_adx_error_proto_rawDescGZIP(), []int{0, 0}
}
// Container for enum describing possible adx errors.
type AdxErrorEnum struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *AdxErrorEnum) Reset() {
*x = AdxErrorEnum{}
if protoimpl.UnsafeEnabled |
}
func (x *AdxErrorEnum) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AdxErrorEnum) ProtoMessage() {}
func (x *AdxErrorEnum) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v6_errors_adx_error_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AdxErrorEnum.ProtoReflect.Descriptor instead.
func (*AdxErrorEnum) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v6_errors_adx_error_proto_rawDescGZIP(), []int{0}
}
var File_google_ads_googleads_v6_errors_adx_error_proto protoreflect.FileDescriptor
var file_google_ads_googleads_v6_errors_adx_error_proto_rawDesc = []byte{
0x0a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x36, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x61, 0x64, 0x78, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x12, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x36, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e,
0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x51,
0x0a, 0x0c, 0x41, 0x64, 0x78, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x22, 0x41,
0x0a, 0x08, 0x41, 0x64, 0x78, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e,
0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55,
0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x55, 0x4e, 0x53, 0x55,
0x50, 0x50, 0x4f, 0x52, 0x54, 0x45, 0x44, 0x5f, 0x46, 0x45, 0x41, 0x54, 0x55, 0x52, 0x45, 0x10,
0x02, 0x42, 0xe8, 0x01, 0x0a, 0x22, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x36, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x42, 0x0d, 0x41, 0x64, 0x78, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x44, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x65, 0x6e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73,
0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76,
0x36, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x3b, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0xa2,
0x02, 0x03, 0x47, 0x41, 0x41, 0xaa, 0x02, 0x1e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x41,
0x64, 0x73, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x2e, 0x56, 0x36, 0x2e,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0xca, 0x02, 0x1e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c,
0x41, 0x64, 0x73, 0x5c, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x5c, 0x56, 0x36,
0x5c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0xea, 0x02, 0x22, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x3a, 0x3a, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73,
0x3a, 0x3a, 0x56, 0x36, 0x3a, 0x3a, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x62, 0x06, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x33,
}
var (
file_google_ads_googleads_v6_errors_adx_error_proto_rawDescOnce sync.Once
file_google_ads_googleads_v6_errors_adx_error_proto_rawDescData = file_google_ads_googleads_v6_errors_adx_error_proto_rawDesc
)
func file_google_ads_googleads_v6_errors_adx_error_proto_rawDescGZIP() []byte {
file_google_ads_googleads_v6_errors_adx_error_proto_rawDescOnce.Do(func() {
file_google_ads_googleads_v6_errors_adx_error_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ads_googleads_v6_errors_adx_error_proto_rawDescData)
})
return file_google_ads_googleads_v6_errors_adx_error_proto_rawDescData
}
var file_google_ads_googleads_v6_errors_adx_error_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_google_ads_googleads_v6_errors_adx_error_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_google_ads_googleads_v6_errors_adx_error_proto_goTypes = []interface{}{
(AdxErrorEnum_AdxError)(0), // 0: google.ads.googleads.v6.errors.AdxErrorEnum.AdxError
(*AdxErrorEnum)(nil), // 1: google.ads.googleads.v6.errors.AdxErrorEnum
}
var file_google_ads_googleads_v6_errors_adx_error_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_google_ads_googleads_v6_errors_adx_error_proto_init() }
func file_google_ads_googleads_v6_errors_adx_error_proto_init() {
if File_google_ads_googleads_v6_errors_adx_error_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_google_ads_googleads_v6_errors_adx_error_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AdxErrorEnum); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_google_ads_googleads_v6_errors_adx_error_proto_rawDesc,
NumEnums: 1,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_google_ads_googleads_v6_errors_adx_error_proto_goTypes,
DependencyIndexes: file_google_ads_googleads_v6_errors_adx_error_proto_depIdxs,
EnumInfos: file_google_ads_googleads_v6_errors_adx_error_proto_enumTypes,
MessageInfos: file_google_ads_googleads_v6_errors_adx_error_proto_msgTypes,
}.Build()
File_google_ads_googleads_v6_errors_adx_error_proto = out.File
file_google_ads_googleads_v6_errors_adx_error_proto_rawDesc = nil
file_google_ads_googleads_v6_errors_adx_error_proto_goTypes = nil
file_google_ads_googleads_v6_errors_adx_error_proto_depIdxs = nil
}
| {
mi := &file_google_ads_googleads_v6_errors_adx_error_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
} |
__init__.py | import pytest
from pytest_mock_resources.fixture.database.generic import assign_fixture_credentials
from pytest_mock_resources.fixture.database.relational.generic import EngineManager
from pytest_mock_resources.fixture.database.relational.postgresql import (
_create_clean_database,
get_sqlalchemy_engine,
)
from pytest_mock_resources.patch.redshift import psycopg2, sqlalchemy
def create_redshift_fixture(*ordered_actions, scope="function", tables=None, session=None):
"""Produce a Redshift fixture.
Any number of fixture functions can be created. Under the hood they will all share the same
database server.
Arguments:
ordered_actions: Any number of ordered actions to be run on test setup.
scope: Passthrough pytest's fixture scope.
tables: Subsets the tables created by `ordered_actions`. This is generally
most useful when a model-base was specified in `ordered_actions`.
session: Whether to return a session instead of an engine directly. This can
either be a bool or a callable capable of producing a session.
"""
from pytest_mock_resources.fixture.database.relational.redshift.udf import REDSHIFT_UDFS
ordered_actions = ordered_actions + (REDSHIFT_UDFS,)
@pytest.fixture(scope=scope)
def | (_redshift_container, pmr_postgres_config):
database_name = _create_clean_database(pmr_postgres_config)
engine = get_sqlalchemy_engine(pmr_postgres_config, database_name)
assign_fixture_credentials(
engine,
drivername="postgresql+psycopg2",
host=pmr_postgres_config.host,
port=pmr_postgres_config.port,
database=database_name,
username=pmr_postgres_config.username,
password=pmr_postgres_config.password,
)
engine = sqlalchemy.substitute_execute_with_custom_execute(engine)
engine_manager = EngineManager(
engine, ordered_actions, tables=tables, default_schema="public"
)
with psycopg2.patch_connect(pmr_postgres_config):
for engine in engine_manager.manage(session=session):
yield engine
return _
| _ |
is-number-value.js | "use strict";
var isValue = require("./is-value");
module.exports = function (value) { | try { return !isNaN(value); }
catch (e) { return false; }
}; | if (!isValue(value)) return false; |
ChevronController.py | __author__ = 'Michael Andrew [email protected]'
from nz.co.hazardmedia.sgdialer.models.ChevronModel import ChevronModel
class ChevronController(object):
chevron_model = None
id = 0
def __init__(self, id):
self.id = id
if id == 1:
name = "Chevron 1"
elif id == 2: | elif id == 4:
name = "Chevron 4"
elif id == 5:
name = "Chevron 5"
elif id == 6:
name = "Chevron 6"
elif id == 7:
name = "Chevron 7"
elif id == 8:
name = "Chevron 8"
elif id == 9:
name = "Chevron 9"
self.chevron_model = ChevronModel(name)
print "ChevronController initialized."
def lock(self):
self.chevron_model.locked = True | name = "Chevron 2"
elif id == 3:
name = "Chevron 3" |
AccelerationGrid-spec.js | /*
* Copyright (C) 2017-2018 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { shallow, mount } from 'enzyme';
import Immutable from 'immutable';
import { Table } from 'fixed-data-table-2';
import { AccelerationGrid } from './AccelerationGrid';
const context = {
context: {
reflectionSaveErrors: Immutable.fromJS({}),
lostFieldsByReflection: {}
}
};
describe('AccelerationGrid', () => {
let minimalProps;
let commonProps;
let wrapper;
let instance;
beforeEach(() => {
minimalProps = {
columns: Immutable.List(),
shouldShowDistribution: true,
layoutFields: [
{
id:{value:'b'},
name: {value: 'foo'},
partitionDistributionStrategy: {value: 'CONSOLIDATED'},
shouldDelete: {value: false},
enabled: {value: true}
}
],
location: {
state: {}
},
activeTab: 'aggregation',
reflections: Immutable.fromJS({
a: {id:'a', type: 'AGGREGATION', name:'foo', partitionDistributionStrategy: 'CONSOLIDATED'},
b: {id:'b', type: 'AGGREGATION', name:'foo', partitionDistributionStrategy: 'STRIPED'},
c: {id:'c', type: 'RAW', name:'foo', partitionDistributionStrategy: 'CONSOLIDATED'}
})
};
commonProps = {
...minimalProps,
renderBodyCell: sinon.spy(),
columns: Immutable.List([
{ name: 'columnA', type: 'TEXT' },
{ name: 'columnB', type: 'TEXT' }
])
};
wrapper = shallow(<AccelerationGrid {...commonProps}/>, context);
instance = wrapper.instance();
});
it('should render with minimal props without exploding', () => {
wrapper = shallow(<AccelerationGrid {...minimalProps}/>, context);
expect(wrapper).to.have.length(1);
});
it.skip('should correct render Columns and cells', () => {
wrapper = mount(<AccelerationGrid {...commonProps}/>, context);
expect(wrapper.find(Table)).to.have.length(1);
expect(wrapper.find('.fixedDataTableRowLayout_body .fixedDataTableCellGroupLayout_cellGroupWrapper')).to.have.length(2);
});
describe('#renderSubCellHeaders', () => {
it('should render Dimension and Measure when activeTab != raw and Display when activeTab == raw', () => {
let result = shallow(instance.renderSubCellHeaders());
expect(result.find('div')).to.have.length(6);
expect(result.find('div').at(1).text()).to.eql('Dimension');
expect(result.find('div').at(2).text()).to.eql('Measure');
wrapper.setProps({activeTab: 'raw'});
result = shallow(instance.renderSubCellHeaders());
expect(result.find('div')).to.have.length(5);
expect(result.find('div').at(1).text()).to.eql('Display');
});
it('should only render Distribution when props.shouldShowDistribution', () => {
let result = shallow(instance.renderSubCellHeaders());
expect(result.find('div')).to.have.length(6);
expect(result.find('div').at(5).text()).to.eql('Distribution');
wrapper.setProps({shouldShowDistribution: false});
result = shallow(instance.renderSubCellHeaders());
expect(result.find('div')).to.have.length(5);
expect(result.find('div').at(4).text()).to.eql('Partition');
});
});
it('#renderExtraLayoutSettingsModal', () => {
let result = shallow(instance.renderExtraLayoutSettingsModal(0, 'name'));
expect(result.find('Modal').props().isOpen).to.equal(false);
instance.setState({visibleLayoutExtraSettingsIndex: 0});
result = shallow(instance.renderExtraLayoutSettingsModal(0, 'name'));
expect(result.find('Modal').props().isOpen).to.equal(true);
instance.renderExtraLayoutSettingsModal(0, 'name').props.hide();
result = shallow(instance.renderExtraLayoutSettingsModal(0, 'name'));
expect(result.find('Modal').props().isOpen).to.equal(false);
});
describe('#componentWillReceiveProps()', () => {
it('resets focusedColumn if activeTab changes', () => {
instance.focusedColumn = 1;
instance.componentWillReceiveProps({
activeTab: 'raw',
layoutFields: [
{id: {value: 'c'}}
]
});
expect(instance.focusedColumn).to.equal(undefined);
});
it('sets focusedColumn for newly added columns', () => {
instance.componentWillReceiveProps({
activeTab: 'aggregation',
layoutFields: [
{id: {value: 'b'}},
{id: {value: 'c'}} | ]
});
expect(instance.focusedColumn).to.equal(1);
});
it('does nothing with focusedColumn for newly removed columns (so no jump)', () => {
instance.focusedColumn = 1;
instance.componentWillReceiveProps({
activeTab: 'aggregation',
layoutFields: []
});
expect(instance.focusedColumn).to.equal(1);
});
});
}); | |
color.rs | pub trait Color {
const WHITE: Self;
const BLACK: Self;
const RED: Self;
const GREEN: Self;
const BLUE: Self;
}
/// Widens a 5 bit color channel into an 8 bit.
/// The top 3 bits of value5 must be 0 (this isn't enforced by this private function, but e.g.
/// RGBA1555 does enforce this)
fn widen_5_to_8(value5: u8) -> u8 {
// What to put into the lowest bits?
// - 0: Colors will be nicely smoothed out, but we can never reach 0xFF (e.g. full white)
// - Repeat lowest bit: We can reach white, but colors are unevenly spaced
// - Dither: Some hardware does that, but it wouldn't be deterministic
// - Repeat highest 3 bits: Seems to have nice properties: Colors are smooth and we can reach
// full black and full white
(value5 << 3) | (value5 >> 2)
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct RGBA1555 {
raw_value: u16,
}
impl RGBA1555 {
pub const fn new_with_raw_value(value: u16) -> RGBA1555 { RGBA1555 { raw_value: value } }
pub const fn alpha(&self) -> bool { (self.raw_value & (1u16 << 0usize)) != 0 }
pub const fn with_alpha(&self, field_value: bool) -> Self {
Self {
raw_value: if field_value { self.raw_value | (1u16 << 0usize) } else { self.raw_value & !(1u16 << 0usize) }
}
}
pub const fn blue(&self) -> u8 { ((self.raw_value >> 1usize) & ((1u16 << 5usize) - 1u16)) as u8 }
pub const fn with_blue(&self, field_value: u8) -> Self {
Self {
raw_value: (self.raw_value & !(((1u16 << 5usize) - 1u16) << 1usize)) |
((field_value as u16) << 1usize)
}
}
pub const fn green(&self) -> u8 { ((self.raw_value >> 6usize) & ((1u16 << 5usize) - 1u16)) as u8 }
pub const fn with_green(&self, field_value: u8) -> Self {
Self {
raw_value: (self.raw_value & !(((1u16 << 5usize) - 1u16) << 6usize)) |
((field_value as u16) << 6usize)
}
}
pub const fn red(&self) -> u8 { ((self.raw_value >> 11usize) & ((1u16 << 5usize) - 1u16)) as u8 }
pub const fn with_red(&self, field_value: u8) -> Self {
Self {
raw_value: (self.raw_value & !(((1u16 << 5usize) - 1u16) << 11usize)) |
((field_value as u16) << 11usize)
}
}
}
impl RGBA1555 {
pub const fn new(red: u8, green: u8, blue: u8, alpha: bool) -> Self {
assert!(red < 32);
assert!(green < 32);
assert!(blue < 32);
Self::new_with_raw_value(0)
.with_red(red)
.with_green(green)
.with_blue(blue)
.with_alpha(alpha)
}
pub const fn from_argb8888(value: ARGB8888) -> Self {
Self::new(
value.red() >> 3,
value.green() >> 3,
value.blue() >> 3,
value.alpha() > 127)
}
}
impl Color for RGBA1555 {
const WHITE: Self = Self::from_argb8888(ARGB8888::WHITE);
const BLACK: Self = Self::from_argb8888(ARGB8888::BLACK);
const RED: Self = Self::from_argb8888(ARGB8888::RED);
const GREEN: Self = Self::from_argb8888(ARGB8888::GREEN);
const BLUE: Self = Self::from_argb8888(ARGB8888::BLUE);
}
impl From<ARGB8888> for RGBA1555 {
fn from(value: ARGB8888) -> Self {
Self::from_argb8888(value)
}
}
impl From<RGBA1555> for ARGB8888 {
fn from(value: RGBA1555) -> Self {
ARGB8888::new(
widen_5_to_8(value.red()),
widen_5_to_8(value.green()),
widen_5_to_8(value.blue()),
if value.alpha() { 0xFF } else | )
}
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct ARGB8888 {
raw_value: u32,
}
impl ARGB8888
{
pub const fn new_with_raw_value(value: u32) -> ARGB8888 { ARGB8888 { raw_value: value } }
pub const fn blue(&self) -> u8 { ((self.raw_value >> 0usize) & ((1u32 << 8usize) - 1u32)) as u8 }
pub const fn with_blue(&self, field_value: u8) -> Self {
Self {
raw_value:
(self.raw_value & !(((1u32 << 8usize) - 1u32) << 0usize)) |
((field_value as u32) << 0usize)
}
}
pub const fn green(&self) -> u8 { ((self.raw_value >> 8usize) & ((1u32 << 8usize) - 1u32)) as u8 }
pub const fn with_green(&self, field_value: u8) -> Self {
Self {
raw_value:
(self.raw_value & !(((1u32 << 8usize) - 1u32) << 8usize)) |
((field_value as u32) << 8usize)
}
}
pub const fn red(&self) -> u8 { ((self.raw_value >> 16usize) & ((1u32 << 8usize) - 1u32)) as u8 }
pub const fn with_red(&self, field_value: u8) -> Self
{
Self
{
raw_value:
(self.raw_value & !(((1u32 << 8usize) - 1u32) << 16usize)) |
((field_value as u32) << 16usize)
}
}
pub const fn alpha(&self) -> u8 { ((self.raw_value >> 24usize) & ((1u32 << 8usize) - 1u32)) as u8 }
pub const fn with_alpha(&self, field_value: u8) -> Self {
Self {
raw_value:
(self.raw_value & !(((1u32 << 8usize) - 1u32) << 24usize)) |
((field_value as u32) << 24usize)
}
}
}
impl ARGB8888 {
pub const fn new(red: u8, green: u8, blue: u8, alpha: u8) -> Self {
Self::new_with_raw_value(0)
.with_red(red)
.with_green(green)
.with_blue(blue)
.with_alpha(alpha)
}
}
impl Color for ARGB8888 {
const WHITE: Self = Self::new(255, 255, 255, 0);
const BLACK: Self = Self::new(0, 0, 0, 0);
const RED: Self = Self::new(255, 0, 0, 0);
const GREEN: Self = Self::new(0, 255, 0, 0);
const BLUE: Self = Self::new(0, 0, 255, 0);
}
| { 0x00 } |
index.ts | import { NgModule } from '@angular/core';
export * from './piwik-functions';
export * from './angular-piwik-components';
@NgModule({}) | export class Angular2PiwikModule {} |
|
create_file_conversion.py | from typing import Any, Dict, Optional, Union, cast
import httpx
from ...client import Client
from ...models.file_conversion_with_output import FileConversionWithOutput
from ...models.error import Error
from ...models.file_conversion_output_format import FileConversionOutputFormat
from ...models.file_conversion_source_format import FileConversionSourceFormat
from ...types import Response
def _get_kwargs(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/file/conversion/{src_format}/{output_format}".format(client.base_url, output_format=output_format, src_format=src_format)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"content": body,
}
def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
if response.status_code == 201:
response_201 = FileConversionWithOutput.from_dict(response.json())
return response_201
if response.status_code == 400:
response_4XX = Error.from_dict(response.json())
return response_4XX
if response.status_code == 500:
response_5XX = Error.from_dict(response.json())
return response_5XX
return None
def _build_response(*, response: httpx.Response) -> Response[Union[Any, FileConversionWithOutput, Error]]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client, | response = httpx.post(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Convert a CAD file from one format to another. If the file being converted is larger than 30MB, it will be performed asynchronously.
If the conversion is performed synchronously, the contents of the converted file (`output`) will be returned as a base64 encoded string.
If the conversion is performed asynchronously, the `id` of the conversion will be returned. You can use the `id` returned from the request to get status information about the async conversion from the `/file/conversions/{id}` endpoint. """
return sync_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
).parsed
async def asyncio_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
async def asyncio(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Convert a CAD file from one format to another. If the file being converted is larger than 30MB, it will be performed asynchronously.
If the conversion is performed synchronously, the contents of the converted file (`output`) will be returned as a base64 encoded string.
If the conversion is performed asynchronously, the `id` of the conversion will be returned. You can use the `id` returned from the request to get status information about the async conversion from the `/file/conversions/{id}` endpoint. """
return (
await asyncio_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
).parsed | )
|
invites.go | package handler
import (
"context"
"math/rand"
"regexp"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/micro/micro/v3/service/errors"
"github.com/micro/micro/v3/service/logger"
pb "github.com/m3o/distributed-api/invites/proto"
"gorm.io/gorm"
)
var (
ErrMissingID = errors.BadRequest("MISSING_ID", "Missing ID")
ErrMissingGroupID = errors.BadRequest("MISSING_GROUP_ID", "Missing GroupID")
ErrInvalidEmail = errors.BadRequest("INVALID_EMAIL", "The email provided was invalid")
ErrMissingEmail = errors.BadRequest("MISSING_EMAIL", "Missing Email")
ErrMissingIDAndCode = errors.BadRequest("ID_OR_CODE_REQUIRED", "An email address code is required to read an invite")
ErrMissingGroupIDAndEmail = errors.BadRequest("GROUP_ID_OR_EMAIL_REQUIRED", "An email address or group id is needed to list invites")
ErrInviteNotFound = errors.NotFound("NOT_FOUND", "Invite not found")
emailRegex = regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$")
)
type Invite struct {
ID string
Email string `gorm:"uniqueIndex:group_email"`
GroupID string `gorm:"uniqueIndex:group_email"`
Code string `gorm:"uniqueIndex"`
}
func (i *Invite) Serialize() *pb.Invite {
return &pb.Invite{
Id: i.ID,
Email: i.Email,
GroupId: i.GroupID,
Code: i.Code,
}
}
type Invites struct {
DB *gorm.DB
}
// Create an invite
func (i *Invites) Create(ctx context.Context, req *pb.CreateRequest, rsp *pb.CreateResponse) error {
// validate the request
if len(req.GroupId) == 0 {
return ErrMissingGroupID
}
if len(req.Email) == 0 {
return ErrMissingEmail
}
if !isEmailValid(req.Email) {
return ErrInvalidEmail
}
// construct the invite and write to the db
invite := &Invite{
ID: uuid.New().String(),
Code: generateCode(),
GroupID: req.GroupId,
Email: strings.ToLower(req.Email),
}
if err := i.DB.Create(invite).Error; err != nil && strings.Contains(err.Error(), "group_email") {
} else if err != nil {
logger.Errorf("Error writing to the store: %v", err)
return errors.InternalServerError("DATABASE_ERROR", "Error connecting to the database")
}
// serialize the response
rsp.Invite = invite.Serialize()
return nil
}
// Read an invite using ID or code
func (i *Invites) Read(ctx context.Context, req *pb.ReadRequest, rsp *pb.ReadResponse) error {
// validate the request
var query Invite
if req.Id != nil {
query.ID = req.Id.Value
} else if req.Code != nil {
query.Code = req.Code.Value
} else {
return ErrMissingIDAndCode
}
// query the database
var invite Invite
if err := i.DB.Where(&query).First(&invite).Error; err == gorm.ErrRecordNotFound {
return ErrInviteNotFound
} else if err != nil {
logger.Errorf("Error reading from the store: %v", err)
return errors.InternalServerError("DATABASE_ERROR", "Error connecting to the database")
}
// serialize the response
rsp.Invite = invite.Serialize()
return nil
}
// List invited for a group or specific email
func (i *Invites) List(ctx context.Context, req *pb.ListRequest, rsp *pb.ListResponse) error {
// validate the request
if req.Email == nil && req.GroupId == nil {
return ErrMissingGroupIDAndEmail
}
| }
if req.Email != nil {
query.Email = strings.ToLower(req.Email.Value)
}
// query the database
var invites []Invite
if err := i.DB.Where(&query).Find(&invites).Error; err != nil {
logger.Errorf("Error reading from the store: %v", err)
return errors.InternalServerError("DATABASE_ERROR", "Error connecting to the database")
}
// serialize the response
rsp.Invites = make([]*pb.Invite, len(invites))
for i, inv := range invites {
rsp.Invites[i] = inv.Serialize()
}
return nil
}
// Delete an invite
func (i *Invites) Delete(ctx context.Context, req *pb.DeleteRequest, rsp *pb.DeleteResponse) error {
// validate the request
if len(req.Id) == 0 {
return ErrMissingID
}
// delete from the database
if err := i.DB.Where(&Invite{ID: req.Id}).Delete(&Invite{}).Error; err != nil {
logger.Errorf("Error deleting from the store: %v", err)
return errors.InternalServerError("DATABASE_ERROR", "Error connecting to the database")
}
return nil
}
// isEmailValid checks if the email provided passes the required structure and length.
func isEmailValid(e string) bool {
if len(e) < 3 && len(e) > 254 {
return false
}
return emailRegex.MatchString(e)
}
// generateCode generates a random 8 digit code
func generateCode() string {
v := rand.Intn(89999999) + 10000000
return strconv.Itoa(v)
} | // construct the query
var query Invite
if req.GroupId != nil {
query.GroupID = req.GroupId.Value |
moving_average_f64.rs | #[derive(Debug, Clone)]
pub struct MovingAverage {
period: usize,
sum: f64,
deque: std::collections::VecDeque<f64>,
}
impl MovingAverage {
pub fn new(period: usize) -> Self {
Self { | }
pub fn latest(&mut self, new_val: f64) -> Option<f64> {
self.deque.push_back(new_val);
let old_val = match self.deque.len() > self.period {
true => self.deque.pop_front().unwrap(),
false => 0.0,
};
self.sum += new_val - old_val;
match self.deque.len() == self.period {
true => Some(self.sum / self.period as f64),
false => None,
}
}
}
fn calc_stream(average_length: usize) -> Vec<f64> {
let input_data = 1..=10;
let mut ma = MovingAverage::new(average_length);
let moving_averages = input_data
// .map(|n| n as f64) // このf64への変換を入れれば型があうが・・・
.filter_map(|new_val| ma.latest(new_val))
.collect::<Vec<_>>();
moving_averages
}
pub fn main() -> () {
let ma = calc_stream(2);
println!("{:?}", ma);
} | period,
sum: 0.0,
deque: std::collections::VecDeque::new(),
} |
LamSlotWind.py | # -*- coding: utf-8 -*-
# File generated according to Generator/ClassesRef/Machine/LamSlotWind.csv
# WARNING! All changes made in this file will be lost!
"""Method code available at https://github.com/Eomys/pyleecan/tree/master/pyleecan/Methods/Machine/LamSlotWind
"""
from os import linesep
from sys import getsizeof
from logging import getLogger
from ._check import check_var, raise_
from ..Functions.get_logger import get_logger
from ..Functions.save import save
from ..Functions.copy import copy
from ..Functions.load import load_init_dict
from ..Functions.Load.import_class import import_class
from .LamSlot import LamSlot
# Import all class method
# Try/catch to remove unnecessary dependencies in unused method
try:
from ..Methods.Machine.LamSlotWind.build_geometry import build_geometry
except ImportError as error:
build_geometry = error
try:
from ..Methods.Machine.LamSlotWind.check import check
except ImportError as error:
check = error
try:
from ..Methods.Machine.LamSlotWind.comp_masses import comp_masses
except ImportError as error:
comp_masses = error | from ..Methods.Machine.LamSlotWind.comp_surfaces import comp_surfaces
except ImportError as error:
comp_surfaces = error
try:
from ..Methods.Machine.LamSlotWind.comp_volumes import comp_volumes
except ImportError as error:
comp_volumes = error
try:
from ..Methods.Machine.LamSlotWind.get_pole_pair_number import get_pole_pair_number
except ImportError as error:
get_pole_pair_number = error
try:
from ..Methods.Machine.LamSlotWind.get_name_phase import get_name_phase
except ImportError as error:
get_name_phase = error
try:
from ..Methods.Machine.LamSlotWind.plot import plot
except ImportError as error:
plot = error
try:
from ..Methods.Machine.LamSlotWind.plot_winding import plot_winding
except ImportError as error:
plot_winding = error
try:
from ..Methods.Machine.LamSlotWind.comp_fill_factor import comp_fill_factor
except ImportError as error:
comp_fill_factor = error
try:
from ..Methods.Machine.LamSlotWind.comp_output_geo import comp_output_geo
except ImportError as error:
comp_output_geo = error
try:
from ..Methods.Machine.LamSlotWind.get_polar_eq import get_polar_eq
except ImportError as error:
get_polar_eq = error
try:
from ..Methods.Machine.LamSlotWind.comp_wind_function import comp_wind_function
except ImportError as error:
comp_wind_function = error
try:
from ..Methods.Machine.LamSlotWind.plot_mmf_unit import plot_mmf_unit
except ImportError as error:
plot_mmf_unit = error
try:
from ..Methods.Machine.LamSlotWind.comp_resistance_wind import comp_resistance_wind
except ImportError as error:
comp_resistance_wind = error
try:
from ..Methods.Machine.LamSlotWind.comp_angle_d_axis import comp_angle_d_axis
except ImportError as error:
comp_angle_d_axis = error
try:
from ..Methods.Machine.LamSlotWind.comp_mmf_unit import comp_mmf_unit
except ImportError as error:
comp_mmf_unit = error
try:
from ..Methods.Machine.LamSlotWind.comp_rot_dir import comp_rot_dir
except ImportError as error:
comp_rot_dir = error
try:
from ..Methods.Machine.LamSlotWind.comp_lengths_winding import comp_lengths_winding
except ImportError as error:
comp_lengths_winding = error
try:
from ..Methods.Machine.LamSlotWind.comp_number_phase_eq import comp_number_phase_eq
except ImportError as error:
comp_number_phase_eq = error
try:
from ..Methods.Machine.LamSlotWind.comp_periodicity import comp_periodicity
except ImportError as error:
comp_periodicity = error
from ._check import InitUnKnowClassError
from .Winding import Winding
from .Slot import Slot
from .Material import Material
from .Hole import Hole
from .Notch import Notch
from .Bore import Bore
class LamSlotWind(LamSlot):
"""Lamination with Slot filled with winding"""
VERSION = 1
# Check ImportError to remove unnecessary dependencies in unused method
# cf Methods.Machine.LamSlotWind.build_geometry
if isinstance(build_geometry, ImportError):
build_geometry = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method build_geometry: "
+ str(build_geometry)
)
)
)
else:
build_geometry = build_geometry
# cf Methods.Machine.LamSlotWind.check
if isinstance(check, ImportError):
check = property(
fget=lambda x: raise_(
ImportError("Can't use LamSlotWind method check: " + str(check))
)
)
else:
check = check
# cf Methods.Machine.LamSlotWind.comp_masses
if isinstance(comp_masses, ImportError):
comp_masses = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_masses: " + str(comp_masses)
)
)
)
else:
comp_masses = comp_masses
# cf Methods.Machine.LamSlotWind.comp_surfaces
if isinstance(comp_surfaces, ImportError):
comp_surfaces = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_surfaces: " + str(comp_surfaces)
)
)
)
else:
comp_surfaces = comp_surfaces
# cf Methods.Machine.LamSlotWind.comp_volumes
if isinstance(comp_volumes, ImportError):
comp_volumes = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_volumes: " + str(comp_volumes)
)
)
)
else:
comp_volumes = comp_volumes
# cf Methods.Machine.LamSlotWind.get_pole_pair_number
if isinstance(get_pole_pair_number, ImportError):
get_pole_pair_number = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_pole_pair_number: "
+ str(get_pole_pair_number)
)
)
)
else:
get_pole_pair_number = get_pole_pair_number
# cf Methods.Machine.LamSlotWind.get_name_phase
if isinstance(get_name_phase, ImportError):
get_name_phase = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_name_phase: "
+ str(get_name_phase)
)
)
)
else:
get_name_phase = get_name_phase
# cf Methods.Machine.LamSlotWind.plot
if isinstance(plot, ImportError):
plot = property(
fget=lambda x: raise_(
ImportError("Can't use LamSlotWind method plot: " + str(plot))
)
)
else:
plot = plot
# cf Methods.Machine.LamSlotWind.plot_winding
if isinstance(plot_winding, ImportError):
plot_winding = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method plot_winding: " + str(plot_winding)
)
)
)
else:
plot_winding = plot_winding
# cf Methods.Machine.LamSlotWind.comp_fill_factor
if isinstance(comp_fill_factor, ImportError):
comp_fill_factor = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_fill_factor: "
+ str(comp_fill_factor)
)
)
)
else:
comp_fill_factor = comp_fill_factor
# cf Methods.Machine.LamSlotWind.comp_output_geo
if isinstance(comp_output_geo, ImportError):
comp_output_geo = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_output_geo: "
+ str(comp_output_geo)
)
)
)
else:
comp_output_geo = comp_output_geo
# cf Methods.Machine.LamSlotWind.get_polar_eq
if isinstance(get_polar_eq, ImportError):
get_polar_eq = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_polar_eq: " + str(get_polar_eq)
)
)
)
else:
get_polar_eq = get_polar_eq
# cf Methods.Machine.LamSlotWind.comp_wind_function
if isinstance(comp_wind_function, ImportError):
comp_wind_function = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_wind_function: "
+ str(comp_wind_function)
)
)
)
else:
comp_wind_function = comp_wind_function
# cf Methods.Machine.LamSlotWind.plot_mmf_unit
if isinstance(plot_mmf_unit, ImportError):
plot_mmf_unit = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method plot_mmf_unit: " + str(plot_mmf_unit)
)
)
)
else:
plot_mmf_unit = plot_mmf_unit
# cf Methods.Machine.LamSlotWind.comp_resistance_wind
if isinstance(comp_resistance_wind, ImportError):
comp_resistance_wind = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_resistance_wind: "
+ str(comp_resistance_wind)
)
)
)
else:
comp_resistance_wind = comp_resistance_wind
# cf Methods.Machine.LamSlotWind.comp_angle_d_axis
if isinstance(comp_angle_d_axis, ImportError):
comp_angle_d_axis = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_angle_d_axis: "
+ str(comp_angle_d_axis)
)
)
)
else:
comp_angle_d_axis = comp_angle_d_axis
# cf Methods.Machine.LamSlotWind.comp_mmf_unit
if isinstance(comp_mmf_unit, ImportError):
comp_mmf_unit = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_mmf_unit: " + str(comp_mmf_unit)
)
)
)
else:
comp_mmf_unit = comp_mmf_unit
# cf Methods.Machine.LamSlotWind.comp_rot_dir
if isinstance(comp_rot_dir, ImportError):
comp_rot_dir = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_rot_dir: " + str(comp_rot_dir)
)
)
)
else:
comp_rot_dir = comp_rot_dir
# cf Methods.Machine.LamSlotWind.comp_lengths_winding
if isinstance(comp_lengths_winding, ImportError):
comp_lengths_winding = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_lengths_winding: "
+ str(comp_lengths_winding)
)
)
)
else:
comp_lengths_winding = comp_lengths_winding
# cf Methods.Machine.LamSlotWind.comp_number_phase_eq
if isinstance(comp_number_phase_eq, ImportError):
comp_number_phase_eq = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_number_phase_eq: "
+ str(comp_number_phase_eq)
)
)
)
else:
comp_number_phase_eq = comp_number_phase_eq
# cf Methods.Machine.LamSlotWind.comp_periodicity
if isinstance(comp_periodicity, ImportError):
comp_periodicity = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_periodicity: "
+ str(comp_periodicity)
)
)
)
else:
comp_periodicity = comp_periodicity
# save and copy methods are available in all object
save = save
copy = copy
# get_logger method is available in all object
get_logger = get_logger
def __init__(
self,
Ksfill=None,
winding=-1,
slot=-1,
L1=0.35,
mat_type=-1,
Nrvd=0,
Wrvd=0,
Kf1=0.95,
is_internal=True,
Rint=0,
Rext=1,
is_stator=True,
axial_vent=-1,
notch=-1,
yoke_notch=-1,
bore=None,
init_dict=None,
init_str=None,
):
"""Constructor of the class. Can be use in three ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for pyleecan type, -1 will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary with property names as keys
- __init__ (init_str = s) s must be a string
s is the file path to load
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object"""
if init_str is not None: # Load from a file
init_dict = load_init_dict(init_str)[1]
if init_dict is not None: # Initialisation by dict
assert type(init_dict) is dict
# Overwrite default value with init_dict content
if "Ksfill" in list(init_dict.keys()):
Ksfill = init_dict["Ksfill"]
if "winding" in list(init_dict.keys()):
winding = init_dict["winding"]
if "slot" in list(init_dict.keys()):
slot = init_dict["slot"]
if "L1" in list(init_dict.keys()):
L1 = init_dict["L1"]
if "mat_type" in list(init_dict.keys()):
mat_type = init_dict["mat_type"]
if "Nrvd" in list(init_dict.keys()):
Nrvd = init_dict["Nrvd"]
if "Wrvd" in list(init_dict.keys()):
Wrvd = init_dict["Wrvd"]
if "Kf1" in list(init_dict.keys()):
Kf1 = init_dict["Kf1"]
if "is_internal" in list(init_dict.keys()):
is_internal = init_dict["is_internal"]
if "Rint" in list(init_dict.keys()):
Rint = init_dict["Rint"]
if "Rext" in list(init_dict.keys()):
Rext = init_dict["Rext"]
if "is_stator" in list(init_dict.keys()):
is_stator = init_dict["is_stator"]
if "axial_vent" in list(init_dict.keys()):
axial_vent = init_dict["axial_vent"]
if "notch" in list(init_dict.keys()):
notch = init_dict["notch"]
if "yoke_notch" in list(init_dict.keys()):
yoke_notch = init_dict["yoke_notch"]
if "bore" in list(init_dict.keys()):
bore = init_dict["bore"]
# Set the properties (value check and convertion are done in setter)
self.Ksfill = Ksfill
self.winding = winding
# Call LamSlot init
super(LamSlotWind, self).__init__(
slot=slot,
L1=L1,
mat_type=mat_type,
Nrvd=Nrvd,
Wrvd=Wrvd,
Kf1=Kf1,
is_internal=is_internal,
Rint=Rint,
Rext=Rext,
is_stator=is_stator,
axial_vent=axial_vent,
notch=notch,
yoke_notch=yoke_notch,
bore=bore,
)
# The class is frozen (in LamSlot init), for now it's impossible to
# add new properties
def __str__(self):
"""Convert this object in a readeable string (for print)"""
LamSlotWind_str = ""
# Get the properties inherited from LamSlot
LamSlotWind_str += super(LamSlotWind, self).__str__()
LamSlotWind_str += "Ksfill = " + str(self.Ksfill) + linesep
if self.winding is not None:
tmp = self.winding.__str__().replace(linesep, linesep + "\t").rstrip("\t")
LamSlotWind_str += "winding = " + tmp
else:
LamSlotWind_str += "winding = None" + linesep + linesep
return LamSlotWind_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
# Check the properties inherited from LamSlot
if not super(LamSlotWind, self).__eq__(other):
return False
if other.Ksfill != self.Ksfill:
return False
if other.winding != self.winding:
return False
return True
def compare(self, other, name="self"):
"""Compare two objects and return list of differences"""
if type(other) != type(self):
return ["type(" + name + ")"]
diff_list = list()
# Check the properties inherited from LamSlot
diff_list.extend(super(LamSlotWind, self).compare(other, name=name))
if other._Ksfill != self._Ksfill:
diff_list.append(name + ".Ksfill")
if (other.winding is None and self.winding is not None) or (
other.winding is not None and self.winding is None
):
diff_list.append(name + ".winding None mismatch")
elif self.winding is not None:
diff_list.extend(
self.winding.compare(other.winding, name=name + ".winding")
)
return diff_list
def __sizeof__(self):
"""Return the size in memory of the object (including all subobject)"""
S = 0 # Full size of the object
# Get size of the properties inherited from LamSlot
S += super(LamSlotWind, self).__sizeof__()
S += getsizeof(self.Ksfill)
S += getsizeof(self.winding)
return S
def as_dict(self, **kwargs):
"""
Convert this object in a json serializable dict (can be use in __init__).
Optional keyword input parameter is for internal use only
and may prevent json serializability.
"""
# Get the properties inherited from LamSlot
LamSlotWind_dict = super(LamSlotWind, self).as_dict(**kwargs)
LamSlotWind_dict["Ksfill"] = self.Ksfill
if self.winding is None:
LamSlotWind_dict["winding"] = None
else:
LamSlotWind_dict["winding"] = self.winding.as_dict(**kwargs)
# The class name is added to the dict for deserialisation purpose
# Overwrite the mother class name
LamSlotWind_dict["__class__"] = "LamSlotWind"
return LamSlotWind_dict
def _set_None(self):
"""Set all the properties to None (except pyleecan object)"""
self.Ksfill = None
if self.winding is not None:
self.winding._set_None()
# Set to None the properties inherited from LamSlot
super(LamSlotWind, self)._set_None()
def _get_Ksfill(self):
"""getter of Ksfill"""
return self._Ksfill
def _set_Ksfill(self, value):
"""setter of Ksfill"""
check_var("Ksfill", value, "float", Vmin=0, Vmax=1)
self._Ksfill = value
Ksfill = property(
fget=_get_Ksfill,
fset=_set_Ksfill,
doc=u"""Imposed Slot Fill factor (if None, will be computed according to the winding and the slot)
:Type: float
:min: 0
:max: 1
""",
)
def _get_winding(self):
"""getter of winding"""
return self._winding
def _set_winding(self, value):
"""setter of winding"""
if isinstance(value, str): # Load from file
value = load_init_dict(value)[1]
if isinstance(value, dict) and "__class__" in value:
class_obj = import_class(
"pyleecan.Classes", value.get("__class__"), "winding"
)
value = class_obj(init_dict=value)
elif type(value) is int and value == -1: # Default constructor
value = Winding()
check_var("winding", value, "Winding")
self._winding = value
if self._winding is not None:
self._winding.parent = self
winding = property(
fget=_get_winding,
fset=_set_winding,
doc=u"""Lamination's Winding
:Type: Winding
""",
) |
try: |
ex110.py | from modulos import moeda
p = float(input('Digite o preço: R$ '))
moeda.resumo(p, 80, 35) | ||
List.test.ts | import JsonResponse from "../../../src/Interfaces/JsonResponse"
import { Collection } from "../../../src/Models/Models"
import { NetworkConnections } from "../../../src/Services/Services"
import nock from "nock"
import Config from "../../../src/Config/Config"
import Axios, { AxiosInstance } from "axios"
import { assert } from "chai"
import WorkflowType from "../../../src/Enums/WorkflowType"
import State from "../../../src/Enums/State"
import NetworkConnection from "../../../src/Models/NetworkConnection";
describe('NetworkConnections@list', () => {
it('should return a list of networkconnections', async () => {
const positiveResponse: JsonResponse<Collection<NetworkConnection>> = {
status: { | data: [
{
_id: "networkConnection-id",
partnership_id: "test-partnetship-id",
to_mailbox: "test-mailbox",
from_mailbox: 'test-mailbox',
network_status: 'active',
connection_type: 'FTP'
},
{
_id: "networkConnection-id-2",
partnership_id: "test-partnetship-id",
to_mailbox: "test-mailbox",
from_mailbox: 'test-mailbox',
network_status: 'active',
connection_type: 'FTP'
}
]
};
nock('http://api-gateway.localhost')
.get('/1/networkconnections?page=1')
.reply(200, positiveResponse);
const mockConfig = new Config();
const axios: AxiosInstance = Axios.create({
baseURL: 'http://api-gateway.localhost'
});
const networkconnectionsService = new NetworkConnections(mockConfig, axios);
const response = await networkconnectionsService.list();
assert.isArray(response.data);
assert.lengthOf(response.data, 2);
});
}); | code: 200,
success: true
}, |
crypto.rs | // Copyright 2017 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Cryptography related types and functions.
//!
//! [Sodium library](https://github.com/jedisct1/libsodium) is used under the hood through
//! [sodiumoxide rust bindings](https://github.com/dnaq/sodiumoxide).
use std::default::Default;
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
use std::fmt;
use sodiumoxide::crypto::sign::ed25519::{gen_keypair as gen_keypair_sodium, keypair_from_seed,
sign_detached, verify_detached,
PublicKey as PublicKeySodium,
SecretKey as SecretKeySodium, Seed as SeedSodium,
Signature as SignatureSodium, State as SignState};
use sodiumoxide::crypto::hash::sha256::{hash as hash_sodium, Digest, State as HashState};
use sodiumoxide;
use serde::{Serialize, Serializer};
use serde::de::{self, Deserialize, Deserializer, Visitor};
use encoding::serialize::FromHex;
pub use sodiumoxide::crypto::sign::ed25519::{PUBLICKEYBYTES as PUBLIC_KEY_LENGTH,
SECRETKEYBYTES as SECRET_KEY_LENGTH,
SEEDBYTES as SEED_LENGTH,
SIGNATUREBYTES as SIGNATURE_LENGTH};
pub use sodiumoxide::crypto::hash::sha256::DIGESTBYTES as HASH_SIZE;
/// The size to crop the string in debug messages.
const BYTES_IN_DEBUG: usize = 4;
/// Signs slice of bytes using the signer's secret key. Returns the resulting `Signature`.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// # crypto::init();
/// let (public_key, secret_key) = crypto::gen_keypair();
/// let data = [1, 2, 3];
/// let signature = crypto::sign(&data, &secret_key);
/// assert!(crypto::verify(&signature, &data, &public_key));
/// ```
pub fn sign(data: &[u8], secret_key: &SecretKey) -> Signature {
let sodium_signature = sign_detached(data, &secret_key.0);
Signature(sodium_signature)
}
/// Computes a secret key and a corresponding public key from a `Seed`.
///
/// # Examples
///
/// ```
/// use exonum::crypto::{self, Seed};
///
/// # crypto::init();
/// let (public_key, secret_key) = crypto::gen_keypair_from_seed(&Seed::new([1; 32]));
/// # drop(public_key);
/// # drop(secret_key);
/// ```
pub fn gen_keypair_from_seed(seed: &Seed) -> (PublicKey, SecretKey) {
let (sod_pub_key, sod_secr_key) = keypair_from_seed(&seed.0);
(PublicKey(sod_pub_key), SecretKey(sod_secr_key))
}
/// Generates a secret key and a corresponding public key using a cryptographically secure
/// pseudo-random number generator.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// # crypto::init();
/// let (public_key, secret_key) = crypto::gen_keypair();
/// # drop(public_key);
/// # drop(secret_key);
/// ```
pub fn gen_keypair() -> (PublicKey, SecretKey) {
let (pubkey, secrkey) = gen_keypair_sodium();
(PublicKey(pubkey), SecretKey(secrkey))
}
/// Verifies that `data` is signed with a secret key corresponding to the given public key.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// # crypto::init();
/// let (public_key, secret_key) = crypto::gen_keypair();
/// let data = [1, 2, 3];
/// let signature = crypto::sign(&data, &secret_key);
/// assert!(crypto::verify(&signature, &data, &public_key));
/// ```
pub fn verify(sig: &Signature, data: &[u8], pubkey: &PublicKey) -> bool {
verify_detached(&sig.0, data, &pubkey.0)
}
/// Calculates an SHA-256 hash digest of a bytes slice.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// # crypto::init();
/// let data = [1, 2, 3];
/// let hash = crypto::hash(&data);
/// # drop(hash);
/// ```
pub fn hash(data: &[u8]) -> Hash {
let dig = hash_sodium(data);
Hash(dig)
}
/// Initializes the sodium library and chooses faster versions of the primitives if possible.
///
/// # Panics
///
/// Panics if sodium initialization is failed.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// crypto::init();
/// ```
pub fn init() {
if !sodiumoxide::init() {
panic!("Cryptographic library hasn't initialized.");
}
}
/// This structure provides a possibility to calculate a SHA-256 hash digest
/// for a stream of data.
///
/// # Example
///
/// ```rust
/// use exonum::crypto::HashStream;
///
/// let data: Vec<[u8; 5]> = vec![[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]];
/// let mut hash_stream = HashStream::new();
/// for chunk in data {
/// hash_stream = hash_stream.update(&chunk);
/// }
/// let _ = hash_stream.hash();
/// ```
#[derive(Debug, Default)]
pub struct HashStream(HashState);
impl HashStream {
/// Creates a new instance of `HashStream`.
pub fn new() -> Self {
HashStream(HashState::init())
}
/// Processes a chunk of stream and returns a `HashStream` with the updated internal state.
pub fn update(mut self, chunk: &[u8]) -> Self {
self.0.update(chunk);
self
}
/// Returns the hash of data supplied to the stream so far.
pub fn hash(self) -> Hash {
let dig = self.0.finalize();
Hash(dig)
}
}
/// This structure provides a possibility to create and/or verify Ed25519 digital signatures
/// for a stream of data.
///
/// # Example
///
/// ```rust
/// use exonum::crypto::{SignStream, gen_keypair};
///
/// let data: Vec<[u8; 5]> = vec![[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]];
/// let (pk, sk) = gen_keypair();
/// let mut create_stream = SignStream::new();
/// let mut verify_stream = SignStream::new();
/// for chunk in data {
/// create_stream = create_stream.update(&chunk);
/// verify_stream = verify_stream.update(&chunk);
/// }
/// let file_sign = create_stream.sign(&sk);
/// assert!(verify_stream.verify(&file_sign, &pk));
/// ```
#[derive(Debug, Default)]
pub struct SignStream(SignState);
impl SignStream {
/// Creates a new instance of `SignStream`.
pub fn new() -> Self {
SignStream(SignState::init())
}
/// Adds a new `chunk` to the message that will eventually be signed and/or verified.
pub fn update(mut self, chunk: &[u8]) -> Self {
self.0.update(chunk);
self
}
/// Computes and returns a signature for the previously supplied message
/// using the given `secret_key`.
pub fn sign(&mut self, secret_key: &SecretKey) -> Signature {
Signature(self.0.finalize(&secret_key.0))
}
/// Verifies that `sig` is a valid signature for the previously supplied message
/// using the given `public_key`.
pub fn verify(&mut self, sig: &Signature, public_key: &PublicKey) -> bool {
self.0.verify(&sig.0, &public_key.0)
}
}
macro_rules! implement_public_sodium_wrapper {
($(#[$attr:meta])* struct $name:ident, $name_from:ident, $size:expr) => (
#[derive(PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Hash)]
$(#[$attr])*
pub struct $name($name_from);
impl $name {
/// Creates a new instance filled with zeros.
pub fn zero() -> Self {
$name::new([0; $size])
}
}
impl $name {
/// Creates a new instance from bytes array.
pub fn new(ba: [u8; $size]) -> Self {
$name($name_from(ba))
}
/// Creates a new instance from bytes slice.
pub fn from_slice(bs: &[u8]) -> Option<Self> {
$name_from::from_slice(bs).map($name)
}
/// Returns the hex representation of the binary data.
/// Lower case letters are used (e.g. f9b4ca).
pub fn to_hex(&self) -> String {
$crate::encoding::serialize::encode_hex(self)
}
}
impl AsRef<[u8]> for $name {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl ::std::str::FromStr for $name {
type Err = ::encoding::serialize::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
$name::from_hex(s)
}
}
impl ToString for $name {
fn to_string(&self) -> String {
self.to_hex()
}
}
impl fmt::Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, stringify!($name))?;
write!(f, "(")?;
for i in &self[0..BYTES_IN_DEBUG] {
write!(f, "{:02X}", i)?
}
write!(f, ")")
}
}
)
}
macro_rules! implement_private_sodium_wrapper {
($(#[$attr:meta])* struct $name:ident, $name_from:ident, $size:expr) => (
#[derive(Clone, PartialEq, Eq)]
$(#[$attr])*
pub struct $name($name_from);
impl $name {
/// Creates a new instance filled with zeros.
pub fn zero() -> Self {
$name::new([0; $size])
}
}
impl $name {
/// Creates a new instance from bytes array.
pub fn new(ba: [u8; $size]) -> Self {
$name($name_from(ba))
}
/// Creates a new instance from bytes slice.
pub fn from_slice(bs: &[u8]) -> Option<Self> {
$name_from::from_slice(bs).map($name)
}
/// Returns the hex representation of the binary data.
/// Lower case letters are used (e.g. f9b4ca).
pub fn to_hex(&self) -> String {
$crate::encoding::serialize::encode_hex(&self[..])
}
}
impl fmt::Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, stringify!($name))?;
write!(f, "(")?;
for i in &self[0..BYTES_IN_DEBUG] {
write!(f, "{:02X}", i)?
}
write!(f, "...)")
}
}
impl $crate::encoding::serialize::ToHex for $name {
fn write_hex<W: ::std::fmt::Write>(&self, w: &mut W) -> ::std::fmt::Result {
(self.0).0.as_ref().write_hex(w)
}
fn write_hex_upper<W: ::std::fmt::Write>(&self, w: &mut W) -> ::std::fmt::Result {
(self.0).0.as_ref().write_hex_upper(w)
}
}
)
}
implement_public_sodium_wrapper! {
/// Ed25519 public key used to verify digital signatures.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// # crypto::init();
/// let (public_key, _) = crypto::gen_keypair();
/// # drop(public_key);
/// ```
struct PublicKey, PublicKeySodium, PUBLIC_KEY_LENGTH
}
implement_private_sodium_wrapper! {
/// Ed25519 secret key used to create digital signatures over messages.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// # crypto::init();
/// let (_, secret_key) = crypto::gen_keypair();
/// # drop(secret_key);
/// ```
struct SecretKey, SecretKeySodium, SECRET_KEY_LENGTH
}
implement_public_sodium_wrapper! {
/// SHA-256 hash.
///
/// # Examples
///
/// ```
/// use exonum::crypto::{self, Hash};
///
/// let data = [1, 2, 3];
/// let hash_from_data = crypto::hash(&data);
/// let default_hash = Hash::default();
/// # drop(hash_from_data);
/// # drop(default_hash);
/// ```
struct Hash, Digest, HASH_SIZE
}
implement_public_sodium_wrapper! {
/// Ed25519 digital signature.
///
/// # Examples
///
/// ```
/// use exonum::crypto;
///
/// # crypto::init();
/// let (public_key, secret_key) = crypto::gen_keypair();
/// let data = [1, 2, 3];
/// let signature = crypto::sign(&data, &secret_key);
/// assert!(crypto::verify(&signature, &data, &public_key));
/// ```
struct Signature, SignatureSodium, SIGNATURE_LENGTH
}
implement_private_sodium_wrapper! {
/// Ed25519 seed that can be used for deterministic keypair generation.
///
/// # Examples
///
/// ```
/// use exonum::crypto::{self, Seed};
///
/// # crypto::init();
/// let (public_key, secret_key) = crypto::gen_keypair_from_seed(&Seed::new([1; 32]));
/// # drop(public_key);
/// # drop(secret_key);
/// ```
struct Seed, SeedSodium, SEED_LENGTH
}
macro_rules! implement_serde {
($name:ident) => (
impl $crate::encoding::serialize::FromHex for $name {
type Error = $crate::encoding::serialize::FromHexError;
fn from_hex<T: AsRef<[u8]>>(v: T) -> Result<Self, Self::Error> {
let bytes = Vec::<u8>::from_hex(v)?;
if let Some(self_value) = Self::from_slice(bytes.as_ref()) {
Ok(self_value)
} else {
Err($crate::encoding::serialize::FromHexError::InvalidStringLength)
}
}
}
impl Serialize for $name
{
fn serialize<S>(&self, ser:S) -> Result<S::Ok, S::Error>
where S: Serializer
{
let hex_string = $crate::encoding::serialize::encode_hex(&self[..]);
ser.serialize_str(&hex_string)
}
}
impl<'de> Deserialize<'de> for $name
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
struct HexVisitor;
impl<'v> Visitor<'v> for HexVisitor
{
type Value = $name;
fn expecting (&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "expecting str.")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where E: de::Error
{
$name::from_hex(s).map_err(|_| de::Error::custom("Invalid hex"))
}
}
deserializer.deserialize_str(HexVisitor)
}
}
)
}
implement_serde! {Hash}
implement_serde! {PublicKey}
implement_serde! {SecretKey}
implement_serde! {Seed}
implement_serde! {Signature}
macro_rules! implement_index_traits {
($newtype:ident) => (
impl Index<Range<usize>> for $newtype {
type Output = [u8];
fn index(&self, _index: Range<usize>) -> &[u8] {
let inner = &self.0;
inner.0.index(_index)
}
}
impl Index<RangeTo<usize>> for $newtype {
type Output = [u8];
fn index(&self, _index: RangeTo<usize>) -> &[u8] {
let inner = &self.0;
inner.0.index(_index)
}
}
impl Index<RangeFrom<usize>> for $newtype {
type Output = [u8];
fn index(&self, _index: RangeFrom<usize>) -> &[u8] {
let inner = &self.0;
inner.0.index(_index)
}
}
impl Index<RangeFull> for $newtype {
type Output = [u8];
fn index(&self, _index: RangeFull) -> &[u8] {
let inner = &self.0;
inner.0.index(_index)
}
})
}
implement_index_traits! {Hash}
implement_index_traits! {PublicKey}
implement_index_traits! {SecretKey}
implement_index_traits! {Seed}
implement_index_traits! {Signature}
/// Returns hash consisting of zeros.
impl Default for Hash {
fn default() -> Hash {
Hash::zero()
}
}
#[cfg(test)]
mod tests {
use serde_json;
use encoding::serialize::FromHex;
use super::{gen_keypair, hash, Hash, HashStream, PublicKey, SecretKey, Seed, SignStream,
Signature};
#[test]
fn test_hash() {
let h = hash(&[]);
let h1 = Hash::from_hex(h.to_hex()).unwrap();
assert_eq!(h1, h);
let h = Hash::zero();
assert_eq!(*h.as_ref(), [0; 32]);
}
#[test]
fn test_keys() {
let (p, s) = gen_keypair();
let p1 = PublicKey::from_hex(p.to_hex()).unwrap();
let s1 = SecretKey::from_hex(s.to_hex()).unwrap();
assert_eq!(p1, p);
assert_eq!(s1, s);
}
#[test]
fn test_ser_deser() {
let h = Hash::new([207; 32]);
let json_h = serde_json::to_string(&h).unwrap();
let h1 = serde_json::from_str(&json_h).unwrap();
assert_eq!(h, h1);
let h = PublicKey::new([208; 32]);
let json_h = serde_json::to_string(&h).unwrap();
let h1 = serde_json::from_str(&json_h).unwrap();
assert_eq!(h, h1);
let h = Signature::new([209; 64]);
let json_h = serde_json::to_string(&h).unwrap();
let h1 = serde_json::from_str(&json_h).unwrap();
assert_eq!(h, h1);
let h = Seed::new([210; 32]);
let json_h = serde_json::to_string(&h).unwrap();
let h1 = serde_json::from_str(&json_h).unwrap();
assert_eq!(h, h1);
let h = SecretKey::new([211; 64]);
let json_h = serde_json::to_string(&h).unwrap();
let h1 = serde_json::from_str(&json_h).unwrap();
assert_eq!(h, h1);
}
#[test]
fn test_debug_format() {
// Check zero padding
let hash = Hash::new([1; 32]);
assert_eq!(format!("{:?}", &hash), "Hash(01010101)");
let pk = PublicKey::new([15; 32]);
assert_eq!(format!("{:?}", &pk), "PublicKey(0F0F0F0F)");
let sk = SecretKey::new([8; 64]);
assert_eq!(format!("{:?}", &sk), "SecretKey(08080808...)");
let signature = Signature::new([10; 64]);
assert_eq!(format!("{:?}", &signature), "Signature(0A0A0A0A)");
let seed = Seed::new([4; 32]);
assert_eq!(format!("{:?}", &seed), "Seed(04040404...)");
// Check no padding
let hash = Hash::new([128; 32]);
assert_eq!(format!("{:?}", &hash), "Hash(80808080)");
let sk = SecretKey::new([255; 64]);
assert_eq!(format!("{:?}", &sk), "SecretKey(FFFFFFFF...)");
}
#[test]
fn test_range_sodium() {
let h = hash(&[]);
let sub_range = &h[10..20];
assert_eq!(
&[244u8, 200, 153, 111, 185, 36, 39, 174, 65, 228],
sub_range
);
}
#[test]
fn test_hash_streaming_zero() {
let h1 = hash(&[]);
let state = HashStream::new();
let h2 = state.update(&[]).hash();
assert_eq!(h1, h2);
}
#[test]
fn test_hash_streaming_chunks() {
let data: [u8; 10] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
let h1 = hash(&data);
let state = HashStream::new();
let h2 = state.update(&data[..5]).update(&data[5..]).hash();
assert_eq!(h1, h2);
}
#[test]
fn test_sign_streaming_zero() {
let (pk, sk) = gen_keypair();
let mut creation_stream = SignStream::new().update(&[]);
let sig = creation_stream.sign(&sk);
let mut verified_stream = SignStream::new().update(&[]);
assert!(verified_stream.verify(&sig, &pk));
}
#[test]
fn | () {
let data: [u8; 10] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
let (pk, sk) = gen_keypair();
let mut creation_stream = SignStream::new().update(&data[..5]).update(&data[5..]);
let sig = creation_stream.sign(&sk);
let mut verified_stream = SignStream::new().update(&data[..5]).update(&data[5..]);
assert!(verified_stream.verify(&sig, &pk));
}
}
| test_sign_streaming_chunks |
app.js | /**
* This is an example of a basic node.js script that performs
* the Authorization Code oAuth2 flow to authenticate against
* the Spotify Accounts.
*
* For more information, read
* https://developer.spotify.com/web-api/authorization-guide/#authorization_code_flow
*/
var express = require('express'); // Express web server framework
var request = require('request'); // "Request" library
var cors = require('cors');
var querystring = require('querystring');
var cookieParser = require('cookie-parser');
var client_id = 'CLIENT_ID'; // Your client id
var client_secret = 'CLIENT_SECRET'; // Your secret
var redirect_uri = 'http://localhost:8888/callback/'; // Your redirect uri
var stateKey = 'spotify_auth_state';
function login(req, res) {
/**
* Generates a random string containing numbers and letters
* @param {number} length The length of the string
* @return {string} The generated string
*/
var generateRandomString = function(length) {
var text = '';
var possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
for (var i = 0; i < length; i++) {
text += possible.charAt(Math.floor(Math.random() * possible.length));
}
return text;
};
var state = generateRandomString(16);
res.cookie(stateKey, state);
// your application requests authorization
var scope = 'user-read-private user-read-email';
res.redirect('https://accounts.spotify.com/authorize?' +
querystring.stringify({
response_type: 'code',
client_id: client_id,
scope: scope,
redirect_uri: redirect_uri,
state: state
}));
}
function callback(req, res) {
// your application requests refresh and access tokens
// after checking the state parameter
var code = req.query.code || null;
var state = req.query.state || null;
var storedState = req.cookies ? req.cookies[stateKey] : null;
if (state === null || state !== storedState) {
res.redirect('/#' +
querystring.stringify({
error: 'state_mismatch'
}));
} else {
res.clearCookie(stateKey);
var authOptions = {
url: 'https://accounts.spotify.com/api/token',
form: {
code: code, | headers: {
'Authorization': 'Basic ' + (new Buffer(client_id + ':' + client_secret).toString('base64'))
},
json: true
};
request.post(authOptions, function(error, response, body) {
if (!error && response.statusCode === 200) {
var access_token = body.access_token,
refresh_token = body.refresh_token;
var options = {
url: 'https://api.spotify.com/v1/me',
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
// use the access token to access the Spotify Web API
request.get(options, function(error, response, body) {
console.log(body);
});
// we can also pass the token to the browser to make requests from there
res.redirect('/#' +
querystring.stringify({
access_token: access_token,
refresh_token: refresh_token
}));
} else {
res.redirect('/#' +
querystring.stringify({
error: 'invalid_token'
}));
}
});
}
}
function refreshToken(req, res) {
// requesting access token from refresh token
var refresh_token = req.query.refresh_token;
var authOptions = {
url: 'https://accounts.spotify.com/api/token',
headers: { 'Authorization': 'Basic ' + (new Buffer(client_id + ':' + client_secret).toString('base64')) },
form: {
grant_type: 'refresh_token',
refresh_token: refresh_token
},
json: true
};
request.post(authOptions, function(error, response, body) {
if (!error && response.statusCode === 200) {
var access_token = body.access_token;
res.send({
'access_token': access_token
});
}
});
}
var app = express();
app.use(express.static(__dirname + '/public'))
.use(cors())
.use(cookieParser());
app.get('/login', login);
app.get('/callback', callback);
app.get('/refresh_token', refreshToken);
console.log('Listening on 8888');
app.listen(8888); | redirect_uri: redirect_uri,
grant_type: 'authorization_code'
}, |
article_type_search_query.py | """
AMPAREX Rest API Documentation
This is the description of the AMPAREX Rest API. All REST calls plus the corresponding data model are described in this documentation. Direct calls to the server are possible over this page.<br/>Following steps are needed to use the API:<br/><br/>1. Get the alias identifier of your login account from AMPAREX Software (Branch office administration) -> Service accounts -> your service account -> copy alias token)<br/>2. Please use the login URL /alias/{alias}/login under section \"Login\" below with your credentials to get a valid bearer token.<br/>3. Copy bearer token from login response<br/>3. Then click \"Authorize\" on the top of this page<br/>4. Insert into the field \"value\": \"Bearer {Your Bearer token}\" (without {}) for example \"Bearer 334d34d3dgh5tz5h5h\"<br/>4. Click Authorize<br/>5. Bearer token will be automatically used in the header for every following API call.<br/>6. Now you are ready to use the API<br/><br/>See also [documentation](https://manual.amparex.com/display/HAN/AMPAREX+API) for help<br/><br/>Documentation of all the used fields and objects is at the bottom of this page called \"Models\" # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from amparex.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from amparex.exceptions import ApiAttributeError
class ArticleTypeSearchQuery(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'hidden': (bool,), # noqa: E501
}
@cached_property
def | ():
return None
attribute_map = {
'hidden': 'hidden', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""ArticleTypeSearchQuery - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
hidden (bool): if true searches only for hidden article types, if false only not hidden types are found, if empty all article types independent of hidden are found. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ArticleTypeSearchQuery - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
hidden (bool): if true searches only for hidden article types, if false only not hidden types are found, if empty all article types independent of hidden are found. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| discriminator |
link.go | // Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package lsp
import (
"context"
"go/ast"
"go/token"
"regexp"
"strconv"
"sync"
"github.com/govim/govim/cmd/govim/internal/golang_org_x_tools/lsp/protocol"
"github.com/govim/govim/cmd/govim/internal/golang_org_x_tools/lsp/source"
"github.com/govim/govim/cmd/govim/internal/golang_org_x_tools/span"
"github.com/govim/govim/cmd/govim/internal/golang_org_x_tools/telemetry/log"
"github.com/govim/govim/cmd/govim/internal/golang_org_x_tools/telemetry/tag"
errors "golang.org/x/xerrors"
)
func (s *Server) documentLink(ctx context.Context, params *protocol.DocumentLinkParams) ([]protocol.DocumentLink, error) {
uri := span.NewURI(params.TextDocument.URI)
view := s.session.ViewOf(uri)
f, err := getGoFile(ctx, view, uri)
if err != nil {
return nil, err
}
fh := f.Handle(ctx)
data, _, err := fh.Read(ctx)
if err != nil {
return nil, err
}
file, err := view.Session().Cache().ParseGoHandle(fh, source.ParseFull).Parse(ctx)
if file == nil {
return nil, err
}
tok := view.Session().Cache().FileSet().File(file.Pos())
m := protocol.NewColumnMapper(f.URI(), f.URI().Filename(), view.Session().Cache().FileSet(), tok, data)
var links []protocol.DocumentLink
ast.Inspect(file, func(node ast.Node) bool {
switch n := node.(type) {
case *ast.ImportSpec:
target, err := strconv.Unquote(n.Path.Value)
if err != nil {
log.Error(ctx, "cannot unquote import path", err, tag.Of("Path", n.Path.Value))
return false
}
target = "https://godoc.org/" + target
l, err := toProtocolLink(view, m, target, n.Pos(), n.End())
if err != nil {
log.Error(ctx, "cannot initialize DocumentLink", err, tag.Of("Path", n.Path.Value))
return false
}
links = append(links, l)
return false
case *ast.BasicLit:
if n.Kind != token.STRING {
return false
}
l, err := findLinksInString(n.Value, n.Pos(), view, m)
if err != nil {
log.Error(ctx, "cannot find links in string", err)
return false
}
links = append(links, l...)
return false
}
return true
})
for _, commentGroup := range file.Comments {
for _, comment := range commentGroup.List {
l, err := findLinksInString(comment.Text, comment.Pos(), view, m)
if err != nil {
log.Error(ctx, "cannot find links in comment", err)
continue
}
links = append(links, l...)
}
}
return links, nil
}
func findLinksInString(src string, pos token.Pos, view source.View, mapper *protocol.ColumnMapper) ([]protocol.DocumentLink, error) {
var links []protocol.DocumentLink
re, err := getURLRegexp()
if err != nil {
return nil, errors.Errorf("cannot create regexp for links: %s", err.Error())
}
for _, urlIndex := range re.FindAllIndex([]byte(src), -1) {
start := urlIndex[0]
end := urlIndex[1]
startPos := token.Pos(int(pos) + start)
endPos := token.Pos(int(pos) + end)
target := src[start:end]
l, err := toProtocolLink(view, mapper, target, startPos, endPos)
if err != nil {
return nil, err
}
links = append(links, l)
}
return links, nil
}
const urlRegexpString = "(http|ftp|https)://([\\w_-]+(?:(?:\\.[\\w_-]+)+))([\\w.,@?^=%&:/~+#-]*[\\w@?^=%&/~+#-])?"
var (
urlRegexp *regexp.Regexp
regexpOnce sync.Once
regexpErr error
)
func getURLRegexp() (*regexp.Regexp, error) |
func toProtocolLink(view source.View, mapper *protocol.ColumnMapper, target string, start, end token.Pos) (protocol.DocumentLink, error) {
spn, err := span.NewRange(view.Session().Cache().FileSet(), start, end).Span()
if err != nil {
return protocol.DocumentLink{}, err
}
rng, err := mapper.Range(spn)
if err != nil {
return protocol.DocumentLink{}, err
}
l := protocol.DocumentLink{
Range: rng,
Target: target,
}
return l, nil
}
| {
regexpOnce.Do(func() {
urlRegexp, regexpErr = regexp.Compile(urlRegexpString)
})
return urlRegexp, regexpErr
} |
keeper_test.go | package keeper_test
import (
_ "embed"
"math/big"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
sdk "github.com/cosmos/cosmos-sdk/types"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
"github.com/tharsis/ethermint/app"
"github.com/tharsis/ethermint/crypto/ethsecp256k1"
"github.com/tharsis/ethermint/encoding"
"github.com/tharsis/ethermint/tests"
ethermint "github.com/tharsis/ethermint/types"
"github.com/tharsis/ethermint/x/feemarket/types"
"github.com/ethereum/go-ethereum/common"
ethtypes "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
"github.com/tendermint/tendermint/crypto/tmhash"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
tmversion "github.com/tendermint/tendermint/proto/tendermint/version"
"github.com/tendermint/tendermint/version"
)
type KeeperTestSuite struct {
suite.Suite
ctx sdk.Context
app *app.EthermintApp
queryClient types.QueryClient
address common.Address
consAddress sdk.ConsAddress
// for generate test tx
clientCtx client.Context
ethSigner ethtypes.Signer
appCodec codec.Codec
signer keyring.Signer
}
/// DoSetupTest setup test environment, it uses`require.TestingT` to support both `testing.T` and `testing.B`.
func (suite *KeeperTestSuite) DoSetupTest(t require.TestingT) {
checkTx := false
// account key
priv, err := ethsecp256k1.GenerateKey()
require.NoError(t, err)
suite.address = common.BytesToAddress(priv.PubKey().Address().Bytes())
suite.signer = tests.NewSigner(priv)
// consensus key
priv, err = ethsecp256k1.GenerateKey()
require.NoError(t, err)
suite.consAddress = sdk.ConsAddress(priv.PubKey().Address())
suite.app = app.Setup(checkTx, nil)
suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{
Height: 1,
ChainID: "ethermint_9000-1",
Time: time.Now().UTC(),
ProposerAddress: suite.consAddress.Bytes(),
Version: tmversion.Consensus{
Block: version.BlockProtocol,
},
LastBlockId: tmproto.BlockID{
Hash: tmhash.Sum([]byte("block_id")),
PartSetHeader: tmproto.PartSetHeader{
Total: 11,
Hash: tmhash.Sum([]byte("partset_header")),
},
},
AppHash: tmhash.Sum([]byte("app")),
DataHash: tmhash.Sum([]byte("data")),
EvidenceHash: tmhash.Sum([]byte("evidence")),
ValidatorsHash: tmhash.Sum([]byte("validators")),
NextValidatorsHash: tmhash.Sum([]byte("next_validators")),
ConsensusHash: tmhash.Sum([]byte("consensus")),
LastResultsHash: tmhash.Sum([]byte("last_result")),
})
suite.app.EvmKeeper.WithContext(suite.ctx)
queryHelper := baseapp.NewQueryServerTestHelper(suite.ctx, suite.app.InterfaceRegistry())
types.RegisterQueryServer(queryHelper, suite.app.FeeMarketKeeper)
suite.queryClient = types.NewQueryClient(queryHelper)
acc := ðermint.EthAccount{
BaseAccount: authtypes.NewBaseAccount(sdk.AccAddress(suite.address.Bytes()), nil, 0, 0),
CodeHash: common.BytesToHash(crypto.Keccak256(nil)).String(),
}
suite.app.AccountKeeper.SetAccount(suite.ctx, acc)
valAddr := sdk.ValAddress(suite.address.Bytes())
validator, err := stakingtypes.NewValidator(valAddr, priv.PubKey(), stakingtypes.Description{})
err = suite.app.StakingKeeper.SetValidatorByConsAddr(suite.ctx, validator)
require.NoError(t, err)
err = suite.app.StakingKeeper.SetValidatorByConsAddr(suite.ctx, validator)
require.NoError(t, err)
suite.app.StakingKeeper.SetValidator(suite.ctx, validator)
encodingConfig := encoding.MakeConfig(app.ModuleBasics)
suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig)
suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID())
suite.appCodec = encodingConfig.Marshaler
}
func (suite *KeeperTestSuite) SetupTest() {
suite.DoSetupTest(suite.T())
}
func | (t *testing.T) {
suite.Run(t, new(KeeperTestSuite))
}
func (suite *KeeperTestSuite) TestSetGetBlockGasUsed() {
testCases := []struct {
name string
malleate func()
expGas uint64
}{
// TODO How to test len(bz) = 0
// {
// "no KeyPrefixBlockGasUsed",
// func() {},
// uint64(0),
// },
{
"with last block given",
func() {
suite.app.FeeMarketKeeper.SetBlockGasUsed(suite.ctx, uint64(1000000))
},
uint64(1000000),
},
}
for _, tc := range testCases {
tc.malleate()
gas := suite.app.FeeMarketKeeper.GetBlockGasUsed(suite.ctx)
suite.Require().Equal(tc.expGas, gas, tc.name)
}
}
func (suite *KeeperTestSuite) TestSetGetGasFee() {
testCases := []struct {
name string
malleate func()
expFee *big.Int
}{
{
"with last block given",
func() {
suite.app.FeeMarketKeeper.SetBaseFee(suite.ctx, sdk.OneDec().BigInt())
},
sdk.OneDec().BigInt(),
},
}
for _, tc := range testCases {
tc.malleate()
fee := suite.app.FeeMarketKeeper.GetBaseFee(suite.ctx)
suite.Require().Equal(tc.expFee, fee, tc.name)
}
}
| TestKeeperTestSuite |
library.py | import time
import string
import random
import os
from termcolor import colored
from collections import Counter
clean_the_screen = ("cls" if os.name == "nt" else "clear")
# Function for listing books with their full information.
def listBooks():
file = open("books.txt", "r")
lines = file.readlines()
file.close()
for i in lines:
splitted = i.split(",")
numberISBN = colored(f"{splitted[0]}", "blue")
nameBook = colored(f"{splitted[1]}", "magenta", "on_grey")
nameAuthor = colored(f"{splitted[2]}", "yellow")
checkOut = splitted[3]
if checkOut == "T\n":
checkOut = colored("Book is not in the library.", "red")
if checkOut == "F\n":
checkOut = colored("Book is in the library.", "green")
print("-" * 115)
print(f"Name: {nameBook} - Author: {nameAuthor} - Status: {checkOut} - ISBN: {numberISBN}\n")
# Function for showing the books those are checked out by students.
def listBooksChecked():
file = open("books.txt", "r")
lines = file.readlines()
file.close()
a = 0
for i in lines:
splitted = i.split(",")
numberISBN = colored(f"{splitted[0]}", "blue")
nameBook = colored(f"{splitted[1]}", "magenta", "on_grey")
nameAuthor = colored(f"{splitted[2]}", "yellow")
checkOut = splitted[3]
if checkOut == "T\n":
a += 1
print("-" * 115)
print(f"Name: {nameBook} - Author: {nameAuthor} - ISBN: {numberISBN}\n")
if a == 0:
print("-" * 115)
print(colored("\tUhm..- Nobody reads books these days.\n", "blue"))
print("There is no checked out book. All the books are in the library.")
# Function for adding new books to library's data.
def addBook():
file = open("books.txt", "r")
lines = file.readlines()
file.close()
isbn = input("Please enter the ISBN number: ")
nameBook = input("Please enter the name of book: ")
nameAuthor = input("Please enter the author name: ")
for i in lines:
splitted = i.split(",")
isbnBook = splitted[0]
nBook = splitted[1]
if isbn == isbnBook:
print(colored("There is already a book with this ISBN.", "red"))
print(f"\t{isbn} - {nBook}")
break
else:
print(colored("\nThe book succesfully added to the data.", "green"))
status = "F\n"
file = open("books.txt", "a+")
file.write(f"{isbn},{nameBook},{nameAuthor},{status}")
file.close()
# Function for searching books by their ISBN numbers in data.
def searchBookISBN():
file = open("books.txt", "r")
lines = file.readlines()
file.close()
searchingISBN = input("Enter the ISBN number of book which you are looking for.\n> ")
a = 0
for i in lines:
splitted = i.split(",")
numberISBN = colored(f"{splitted[0]}", "blue")
nameBook = colored(f"{splitted[1]}", "magenta", "on_grey")
nameAuthor = colored(f"{splitted[2]}", "yellow")
checkOut = splitted[3]
if checkOut == "T\n":
checkOut = colored("is not in the library.", "red")
if checkOut == "F\n":
checkOut = colored("is in the library.", "green")
if searchingISBN.upper() in numberISBN:
print("-" * 95)
print(colored(f"{numberISBN}", "blue"), "-", f"'{nameBook}' by {nameAuthor} {checkOut}")
print("-" * 95)
a += 1
if a == 0:
print("Sorry. There is no book with this ISBN number.")
# Function for searching books by their names in data.
def searchBookName():
file = open("books.txt", "r")
lines = file.readlines()
file.close()
searchingName = input("Enter the name of book which you are looking for.\n> ")
a = 0
for i in lines:
splitted = i.split(",")
numberISBN = colored(f"{splitted[0]}", "blue")
nameBook = colored(f"{splitted[1]}", "magenta", "on_grey")
nameAuthor = colored(f"{splitted[2]}", "yellow")
checkOut = splitted[3]
if checkOut == "T\n":
checkOut = colored("Book is not in the library.", "red")
if checkOut == "F\n":
checkOut = colored("Book is in the library.", "green")
if searchingName.lower() in nameBook.lower():
a += 1
print(colored("-" * 95, "cyan"))
print(f"ISBN: {numberISBN} - Name : {nameBook} - Author: {nameAuthor} - Status: {checkOut}\n")
print(colored("-" * 95, "magenta"))
if a == 0:
print("Sorry. There is no book with this name.")
# Function for searching books by their authors' name in data.
def searchBookAuthor():
file = open("books.txt", "r")
lines = file.readlines()
file.close()
searchingAuthor = input("Enter the author name which you are looking for: ")
a = 0
for i in lines:
splitted = i.split(",")
numberISBN = colored(f"{splitted[0]}", "blue")
nameBook = colored(f"{splitted[1]}", "magenta", "on_grey")
nameAuthor = colored(f"{splitted[2]}", "yellow")
checkOut = splitted[3]
if checkOut == "T\n":
checkOut = colored("Book is not in the library.", "red")
if checkOut == "F\n":
checkOut = colored("Book is in the library.", "green")
if searchingAuthor.lower() in nameAuthor.lower():
a += 1
print("-" * 95)
print(f"Author: {nameAuthor} - Name : {nameBook} - ISBN: {numberISBN} - Status: {checkOut}\n")
if a == 0:
print(colored("Sorry. There is no author with this name.", "red"))
# Function for generating tickets when checking out a book to check in book with.
# Possibility of 2.176.782.336 tickets.
def ticketGenerator(student_id, book_name):
chars = string.digits + string.ascii_uppercase
ticket = "".join(random.sample(chars, 6))
file = open("tickets.txt", "a+")
lines = file.readlines()
for i in lines:
splitted = i.split("-")
ticket2 = splitted[0]
if ticket == ticket2:
return ticketGenerator()
else:
file.write(f"{ticket}-{book_name}-{student_id}\n")
file.close()
return ticket
# Function for checking out books to students' data.
def checkOutBook():
file = open("books.txt", "rt")
dataBooksLines = file.readlines()
file.close()
file = open("students.txt", "r")
dataStudentsLines = file.readlines()
file.close()
dataCheckOut = open("checkouts.txt", "a")
bookToCheckOut = input("Please enter the ISBN number of book that you want to check out: ")
isBookToCheckOut = False
isBookToStudent = False
# Controlling if there is a book with this ISBN or not.
for i in dataBooksLines:
splitted = i.split(",")
numberISBN = splitted[0]
if bookToCheckOut == splitted[0]:
isBookToCheckOut = True
break
else:
print(colored("There is no book with this ISBN number.", "red"))
pass
if isBookToCheckOut == True:
bookToStudent = input("Please enter the student ID to check out: ")
for i in dataStudentsLines:
splitted = i.split(maxsplit= 1)
studentID = splitted[0]
studentName = splitted[1]
if bookToStudent == studentID:
isBookToStudent = True
break
else:
print(colored("There is no student with this ID. Try again.", "red"))
pass
if isBookToStudent == True:
for i in dataBooksLines:
splitted = i.split(",")
numberISBN = splitted[0]
nameBook = splitted[1]
nameAuthor = splitted[2]
checkOut = splitted[3]
if bookToCheckOut == numberISBN:
if checkOut == "T\n":
print(colored("Oops! This book is already checked out.", "red"))
else:
print(colored("Are you sure to check out this book?\n", "blue", "on_grey"))
print("ISBN:", colored(numberISBN, "blue"), "-", "Name :", colored(nameBook, "magenta", "on_grey"), "-", "Author:", colored(nameAuthor, "yellow"))
print(f"\nThis book will checked out to: " + colored(studentName, "white", "on_grey", attrs=['blink']))
verify = ""
while verify != "Y" or verify != "N" or verify != "y" or verify != "n":
verify = input("\nEnter Y or N\n" + colored("> ", "grey", attrs=['blink']))
if verify == "N" or verify == "n":
break
if verify == "Y" or verify == "y":
# Generating ticket and giving it to student.
ticketnumber = ticketGenerator(student_id= bookToStudent, book_name= nameBook)
os.system(clean_the_screen)
print(f"""
____/ \ / \____
/| ------------- | ----------- |\
||| ------------- | --->{colored(ticketnumber, "red", "on_cyan", attrs=['reverse', 'blink'])} |||
||| ------------- | ------------- |||
||| ------- ----- | --Here is---- |||
||| ------------- | -your-ticket--|||
||| ------------- | ----number.---|||
||| ------------ | --Use-it------|||
||| ------------- | -when-you--- |||
||| ------------- | -checking-in--|||
||| ------------- | ---the-book.--|||
||| ------------ | ------------- |||
|||_____________ | _____________|||
/_____/--------\\_//--------\_____\
""")
dataCheckOut.write(f"{numberISBN}-{ticketnumber}-{bookToStudent}-{nameBook}-{nameAuthor}\n")
dataCheckOut.close()
print(colored("\nThe book succesfully checked out to the student.", "green"))
# TO WRITE "T" ON BOOKS FILE WHEN CHANGED
for i in dataBooksLines:
splitted = i.split(",")
numberISBN = splitted[0]
nameBook = splitted[1]
nameAuthor = splitted[2]
checkOut = splitted[3]
if bookToCheckOut == numberISBN:
file = open("books.txt", "r")
content = file.read()
content = content.replace("{},{},{},{}".format(numberISBN, nameBook, nameAuthor, checkOut), "{},{},{},T\n".format(numberISBN, nameBook, nameAuthor))
file.close()
file = open("books.txt", "w")
file.write(content)
file.close()
break
# Function for listing students by their names with the books they checked out under their names.
def listStudents():
file = open("checkouts.txt", "r")
checkOutsLines = file.readlines()
file.close()
file = open("students.txt", "r")
studentsLines = file.readlines()
file.close()
file = open("checkins.txt", "r")
checkInsLines = file.readlines()
file.close()
isCheckInsLines = False
if len(checkInsLines) == 0:
isCheckInsLines = True
for i in studentsLines:
splitted = i.split()
sNumber = splitted[0]
sName = splitted[1]
sLastname = splitted[2]
print(colored("-" * 80, "grey"))
print(colored(f"{sName} {sLastname}", "blue"))
for x in checkOutsLines:
splitted = x.split("-")
nameBook = splitted[3]
scNumber = splitted[2]
ticket1 = splitted[1]
if isCheckInsLines:
if sNumber == scNumber:
print(colored("-" * 80, "grey"))
print(colored(f"\t-{nameBook}", "magenta", "on_grey"))
else:
for z in checkInsLines:
splitted = z.split("-")
ticket2 = splitted[1]
if ticket1 == ticket2:
break
else:
if sNumber == scNumber and ticket1 != ticket2:
print(colored("-" * 80, "grey"))
print(colored(f"\t-{nameBook}", "magenta", "on_grey"))
# Function for printing the top three most checked out books.
def topThreeBook():
file = open("checkouts.txt", "r")
checkoutsLines = file.readlines()
file.close()
file = open("books.txt", "r")
booksLines = file.readlines()
file.close()
isbns = []
for i in checkoutsLines:
splitted = i.split("-")
isbn = splitted[0]
isbns.append(isbn)
dictionary = Counter(isbns)
val_list = list(dictionary.values())
for i in range(3):
print("_" * 105)
if i == 0:
print(colored("THE MOST CHECKED OUT BOOK(S)!", "red", "on_yellow", attrs=['blink']))
elif i == 1:
print(colored("THE SECOND MOST CHECKED OUT BOOK(S)!", "red", "on_yellow", attrs=['blink']))
elif i == 2:
print(colored("THE THIRD MOST CHECKED OUT BOOK(S)!", "red", "on_yellow", attrs=['blink']))
try:
if len(val_list) != 0:
print("_" * 105)
print(colored(f"This/these book(s) has/have checked out for [{str(max(val_list))}] time(s)!", "cyan"))
print("_" * 105)
print("\n")
if val_list.count(max(val_list)) > 1:
for key, value in dictionary.items():
if max(val_list) == value:
for z in booksLines:
splitted2 = z.split(",")
bookISBN = splitted2[0]
bookName = splitted2[1]
if key == bookISBN:
key = bookName # key = isbn
print(key)
for i in range(val_list.count(max(val_list))):
val_list.remove(max(val_list))
elif val_list.count(max(val_list)) == 1:
for key, value in dictionary.items():
if max(val_list) == value:
for z in booksLines:
splitted2 = z.split(",")
bookISBN = splitted2[0]
bookName = splitted2[1]
if key == bookISBN:
key = bookName # key = isbn
print(key)
val_list.remove(max(val_list))
break
except:
print("There is no other books.")
# Function for printing top three students who checked out most.
def topThreeStudents():
dataCheckOut = open("checkouts.txt", "r")
dataCheckOutsLines = dataCheckOut.readlines()
dataCheckOut.close()
dataStudents = open("students.txt", "r")
dataStudentsLines = dataStudents.readlines()
dataStudents.close()
studentNumbers = []
for i in dataCheckOutsLines:
splitted = i.split("-")
stNumber = splitted[2]
studentNumbers.append(stNumber)
studentNumbers = Counter(studentNumbers)
val_list = list(studentNumbers.values())
for i in range(3):
print("_" * 105)
if i == 0:
print(colored("THE TOP #1 STUDENT(S)!", "red", "on_yellow", attrs=['blink']))
elif i == 1:
print(colored("THE TOP #2 STUDENT(S)!", "red", "on_yellow", attrs=['blink']))
elif i == 2:
print(colored("THE TOP #3 STUDENT(S)!", "red", "on_yellow", attrs=['blink']))
try:
if len(val_list) != 0:
print("_" * 105)
print(colored(f"This/these student(s) has/have checked out for [{str(max(val_list))}] time(s)!", "cyan"))
print("_" * 105)
print("\n")
if val_list.count(max(val_list)) > 1:
for key, value in studentNumbers.items():
if max(val_list) == value:
for z in dataStudentsLines:
splitted2 = z.split(maxsplit= 1)
sNumber = splitted2[0]
sName = splitted2[1]
if key == sNumber:
key = sName
print(key)
for i in range(val_list.count(max(val_list))):
val_list.remove(max(val_list))
elif val_list.count(max(val_list)) == 1:
for key, value in studentNumbers.items():
if max(val_list) == value:
for z in dataStudentsLines:
splitted2 = z.split(maxsplit= 1)
sNumber = splitted2[0]
sName = splitted2[1]
if key == sNumber:
key = sName
print(key)
val_list.remove(max(val_list))
break
except:
print("There is no other students who has checked out before.")
# Function for adding new students to data.
def addStudent():
file = open("students.txt", "r")
lines = file.readlines()
file.close()
numberStudent = input("Please enter the ID of a student to add.\n> ")
nameStudent = input("\nPlease enter the name of a student to add.\n> ")
for i in lines:
splitted = i.split(maxsplit= 1)
nStudent = splitted[0]
naStudent = splitted[1]
if numberStudent == nStudent:
print("This student ID is already exist.")
print(f"\t{nStudent} - {naStudent}")
break
else:
print(colored("\nThe student succesfully added to the data.", "green"))
file = open("students.txt", "a+")
file.write(f"{numberStudent} {nameStudent}\n")
file.close()
# Function for checking in a book with the ticket given when checked out.
def | ():
ticket = input("Please enter the ticket to check in book.\n> ")
dataBooks = open("books.txt", "r")
dataBooksLines = dataBooks.readlines()
dataBooks.close()
file = open("checkouts.txt", "r")
checkoutsLines = file.readlines()
file.close()
a = 0
for i in checkoutsLines:
splitted = i.split("-")
isbn = splitted[0]
tNumber = splitted[1]
studentID = splitted[2]
nameBook = splitted[3]
if ticket == tNumber:
a += 1
print(colored("Thank you for bringing back the book!", "green"))
file = open("checkins.txt", "a")
file.write(f"The book in-{ticket}-came back.\n")
file.close()
# TO WRITE "F" ON BOOKS FILE WHEN CHANGED
for i in dataBooksLines:
splitted = i.split(",")
numberISBN = splitted[0]
nameBook = splitted[1]
nameAuthor = splitted[2]
checkOut = splitted[3]
if isbn == numberISBN:
file = open("books.txt", "r")
content = file.read()
content = content.replace("{},{},{},{}".format(numberISBN, nameBook, nameAuthor, checkOut), "{},{},{},F\n".format(numberISBN, nameBook, nameAuthor))
file.close()
file = open("books.txt", "w")
file.write(content)
file.close()
break
if a == 0:
print(colored(f"Sorry. There is no ticket as '{ticket}'.", "red"))
maxims = [
"'I have always imagined that Paradise will be a kind of a Library.' - Jorge Luis Borges ",
"'Nothing is pleasanter than exploring a library.' - Walter Savage Landor ",
"'The only thing that you absolutely have to know, is the location of the library.' - Albert Einstein",
"'When in doubt go to the library.' - J.K. Rowling ",
"'I have found the most valuable thing in my wallet is my library card.' - Laura Bush",
"'Google can bring you back 100,000 answers, a librarian can bring you back the right one.' - Neil Gaiman",
"'The most important asset of any library goes home at night – the library staff.' - Timothy Healy",
"'Librarians are tour-guides for all of knowledge.' - Patrick Ness",
]
slider = colored("-" * 48, "red")
version = colored("library.py-v1.0", "green")
menu = f"""{version}
{random.choice(maxims)}
.--. .---.
.---|__| .-. |~~~|
.--|===|--|_ |_| |~~~|--.
| |===| |'\ .---!~| .--| |--|
|%%| | |.'\ |===| |--|%%| | |
|%%| | |\.'\ | | |__| | | |
| | | | \ \ |===| |==| | | |
| | |__| \.'\ | |_|__| |~~~|__|
| |===|--| \.'\|===|~|--|%%|~~~|--|
^--^---'--^ `-'`---^-^--^--^---'--'
{colored("HELLO FROM WORLD LIBRARY!", "white", "on_blue", attrs=['blink'])}
{colored("[1]", "blue")} List all the books in the library.
{colored("[2]", "blue")} List all the books those are checked out.
{colored("[3]", "blue")} Add a new book.
{colored("[4]", "blue")} Search a book by ISBN number.
{colored("[5]", "blue")} Search a book by name.
{colored("[6]", "blue")} Check out a book to a student.
{colored("[7]", "blue")} List all the students.
{slider}
{colored("[8] List top 3 most checked out books.", "cyan", attrs=['blink'])}
{colored("[9] List top 3 student.", "cyan", attrs=['blink'])}
{slider}
{colored("[10]", "blue")} Add new student.
{colored("[11]", "blue")} Search an author by name.
{colored("[12]", "blue")} Check in a book to a library.
{slider}
{colored("[0]", "red")} Exit
"""
password = "123456"
def login():
os.system(clean_the_screen)
print(colored("""
____________________________________________________
|____________________________________________________|
| __ __ ____ ___ || ____ ____ _ __ |
|| |__ |--|_| || |_| |||_|**|*|__|+|+||___| || | |
||==|^^||--| |=||=| |=*=||| |~~|~| |=|=|| | |~||==| |
|| |##|| | | || | | |||-| | |==|+|+||-|-|~||__| |
||__|__||__|_|_||_|_|___|||_|__|_|__|_|_||_|_|_||__|_|
||_______________________||__________________________|
| _____________________ || __ __ _ __ _ |
||=|=|=|=|=|=|=|=|=|=|=| __..\/ | |_| ||#||==| / /|
|| | | | | | | | | | | |/\ \ \\|++|=| || ||==| / / |
||_|_|_|_|_|_|_|_|_|_|_/_/\_.___\__|_|__||_||__|/_/__|
|____________________ /\~()/()~//\ __________________|
| __ __ _ _ \_ (_ . _/ _ ___ _____|
||~~|_|..|__| || |_ _ \ //\\ / |=|__|~|~|___| | | |
||--|+|^^|==| || | | |__/\ __ /\__| |==|x|x|+|+|=|=|=|
||__|_|__|__|_||_|_| / \ \ / / \_|__|_|_|_|_|_|_|_|
|_________________ _/ \/\/\/ \_ _______________|
| _____ _ __ |/ \../ \| __ __ ___|
||_____|_| |_|##|_|| | \/ __| ||_|==|_|++|_|-|||
||______||=|#|--| |\ \ o / /| | |~| | | |||
||______||_|_|__|_|_\ \ o / /_|_|__|_|__|_|_|||
|_________ __________\___\____/___/___________ ______|
|__ _ / ________ ______ /| _ _ _|
|\ \ |=|/ // /| // / / / | / ||%|%|%|
| \/\ |*/ .//____//.// /__/__/ (_) / ||=|=|=|
__| \/\|/ /(____|/ // / /||~|~|~|__
|___\_/ /________// ________ / / ||_|_|_|
|___ / (|________/ |\_______\ / /| |______|
/ \|________) / / | |
""", "yellow"))
login = input("Please enter the password to log in.\n> ")
if password == login:
print(colored("Succesfully logged in!", "green", attrs=['reverse', 'blink']))
time.sleep(2)
global isLogIn
isLogIn = True
else:
print(colored("Wrong password!", "red", attrs=['reverse', 'blink']))
print("Exiting...")
time.sleep(2)
os.system(clean_the_screen)
exit()
enterToGo = colored("Press 'Enter' to continue to the menu...", "white", "on_grey", attrs=['blink'])
if True:
isLogIn = False
login()
while isLogIn:
os.system(clean_the_screen)
print(menu)
choice = input("What would you like to do?\n> ")
choice_list = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "10", "11", "12"]
if choice in choice_list:
if choice == "1":
os.system(clean_the_screen)
listBooks()
print("-" * 112)
input(enterToGo)
elif choice == "2":
os.system(clean_the_screen)
listBooksChecked()
print("-" * 115)
input(enterToGo)
elif choice == "3":
os.system(clean_the_screen)
addBook()
input(enterToGo)
elif choice == "4":
os.system(clean_the_screen)
searchBookISBN()
input(enterToGo)
elif choice == "5":
os.system(clean_the_screen)
searchBookName()
input(enterToGo)
elif choice == "6":
os.system(clean_the_screen)
checkOutBook()
input(enterToGo)
elif choice == "7":
os.system(clean_the_screen)
listStudents()
print("-" * 80)
input(enterToGo)
elif choice == "8":
os.system(clean_the_screen)
topThreeBook()
print("-" * 80)
input(enterToGo)
elif choice == "9":
os.system(clean_the_screen)
topThreeStudents()
print("-" * 80)
input(enterToGo)
elif choice == "10":
os.system(clean_the_screen)
addStudent()
print("-" * 80)
input(enterToGo)
elif choice == "11":
os.system(clean_the_screen)
searchBookAuthor()
print("-" * 80)
input(enterToGo)
elif choice == "12":
os.system(clean_the_screen)
checkInBook()
print("-" * 80)
input(enterToGo)
elif choice == "0":
print("Saving all the changes...")
time.sleep(3)
os.system(clean_the_screen)
print("See you soon!\n")
exit()
else:
print("Please enter a number in menu. (1-12)")
input(enterToGo) | checkInBook |
test_common.go | package guardian
import (
"testing"
"os"
stake "github.com/deep2chain/sscq/x/staking"
"github.com/stretchr/testify/require"
sdk "github.com/deep2chain/sscq/types"
dbm "github.com/tendermint/tendermint/libs/db"
abci "github.com/tendermint/tendermint/abci/types"
"github.com/tendermint/tendermint/libs/log"
"github.com/deep2chain/sscq/codec"
"github.com/deep2chain/sscq/x/auth"
"github.com/tendermint/tendermint/crypto"
"github.com/tendermint/tendermint/crypto/ed25519"
"github.com/deep2chain/sscq/store"
"encoding/hex"
)
var (
pks = []crypto.PubKey{
newPubKey("0B485CFC0EECC619440448436F8FC9DF40566F2369E72400281454CB552AFB50"),
newPubKey("0B485CFC0EECC619440448436F8FC9DF40566F2369E72400281454CB552AFB51"),
newPubKey("0B485CFC0EECC619440448436F8FC9DF40566F2369E72400281454CB552AFB52"),
}
addrs = []sdk.AccAddress{
sdk.AccAddress(pks[0].Address()),
sdk.AccAddress(pks[1].Address()),
sdk.AccAddress(pks[2].Address()),
}
)
func newPubKey(pk string) (res crypto.PubKey) {
pkBytes, err := hex.DecodeString(pk)
if err != nil {
panic(err)
}
var pkEd ed25519.PubKeyEd25519
copy(pkEd[:], pkBytes[:])
return pkEd
}
func createTestCodec() *codec.Codec {
cdc := codec.New()
sdk.RegisterCodec(cdc)
RegisterCodec(cdc)
auth.RegisterCodec(cdc)
stake.RegisterCodec(cdc)
codec.RegisterCrypto(cdc)
return cdc
}
func | (t *testing.T) (sdk.Context, Keeper) {
keyProf := sdk.NewKVStoreKey("guardian")
db := dbm.NewMemDB()
ms := store.NewCommitMultiStore(db)
ms.MountStoreWithDB(keyProf, sdk.StoreTypeIAVL, db)
err := ms.LoadLatestVersion()
require.Nil(t, err)
ctx := sdk.NewContext(ms, abci.Header{}, false, log.NewTMLogger(os.Stdout))
cdc := createTestCodec()
keeper := NewKeeper(cdc, keyProf, DefaultCodespace)
return ctx, keeper
}
| createTestInput |
metrics.py | from transformers import EvalPrediction
from sklearn.metrics import precision_recall_fscore_support
import numpy as np
def compute_metrics(pred: EvalPrediction):
"""Compute recall at the masked position
"""
mask = pred.label_ids != -100
# filter everything except the masked position and flatten tensors
labels = pred.label_ids[mask].flatten()
preds = pred.predictions[mask].flatten()
_, recall, _, _ = precision_recall_fscore_support(y_true=labels, y_pred=preds, average='micro')
return {'recall': recall}
def self_test():
|
if __name__ == "__main__":
self_test()
| pred = EvalPrediction(
label_ids=np.array([
[-100, 1, -100],
[ 2, -100, -100],
[-100, -100, 3],
[-100, -100, 4]
]),
predictions=np.array([
[-100, 1, -100], # 1 true positive
[ 2, -100, -100], # 1 true positive
[ 2, 6, 8], # 1 false positive, irrelevant pos will be ignored
[ 1, 7, 4] # 1 true positive, irrelevant pos will be ignored
])
)
m = compute_metrics(pred)
print(f"recall={m['recall']}")
assert m['recall'] == 0.75
print("Looks like it is working!") |
scroll-render-fix.ts | import {Raf} from '../raf/raf';
import {DomWatcher} from './dom-watcher';
import {noop} from '../func/noop';
export interface ScrollRenderFixConfig {
/**
* Callback run immediately before the document is manually scrolled.
* Run during the RAF postWrite step.
*/
beforeScrollCallback?: () => void;
/**
* Callback run immediately after the document is manually scrolled.
* Run during the RAF postWrite step.
*/
afterScrollCallback?: () => void;
}
/**
* What this class does is, it eats the window.wheel event
* and eats the scroll. It allows the rendering to catch up and then
* once it is done, it reapplies the scroll to the document.
*
* While this sounds counter-intuitive, it allows rendering to catchup
* and can smooth animations.
*
* In short, if you have scroll tied animations or intense animations,
* this can help fix the issue.
*
*
* Thanks to Angus and Eric for this tip.
*
* Since this issue is chrome specific, you might want scope it to only chrome.
* Usage:
*
* ```
* if (is.chrome()) {
* new ScrollRenderFix();
* }
* ```
*
* To take full advantage, use toolbox mutate or degu.read / writes.
* ```
*
* const raf = new Raf();
*
* raf.read(()=> {
* // do some reading
* })
*
* raf.writing(()=> {
* // do some writing
* })
*
* ```
*
*
*
*/
export class | {
private raf: Raf;
private currentY = 0;
private targetY = 0;
private domWatcher: DomWatcher;
/**
* Callback run immediately before the document is manually scrolled.
* Run during the RAF postWrite step.
* @private
*/
private readonly beforeScrollCallback: () => void;
/**
* Callback run immediately after the document is manually scrolled.
* Run during the RAF postWrite step.
* @private
*/
private readonly afterScrollCallback: () => void;
constructor(config: ScrollRenderFixConfig = {}) {
this.raf = new Raf();
this.beforeScrollCallback = config.beforeScrollCallback || noop;
this.afterScrollCallback = config.afterScrollCallback || noop;
this.domWatcher = new DomWatcher();
this.domWatcher.add({
element: document,
on: 'wheel',
eventOptions: {passive: false, capture: true},
callback: this.wheelHandler.bind(this),
});
}
private getScrollElement(): Element {
return document.scrollingElement || document.documentElement;
}
private wheelHandler(e: WheelEvent) {
e.preventDefault();
this.raf.read(() => {
this.targetY = this.getScrollElement().scrollTop + e.deltaY;
this.raf.postWrite(() => {
this.beforeScrollCallback();
if (this.currentY !== this.targetY) {
this.getScrollElement().scrollTop = this.targetY;
this.currentY = this.targetY;
}
this.afterScrollCallback();
});
});
}
public dispose() {
this.domWatcher && this.domWatcher.dispose();
this.raf && this.raf.dispose();
}
}
| ScrollRenderFix |
platform_riscv32.rs | #[inline(always)]
// Justification: documentation is generated from mocks
#[allow(clippy::missing_safety_doc)]
pub unsafe fn yieldk() {
/* TODO: Stop yielding */
asm! (
"li a0, 0
ecall"
:
:
: "memory", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17",
"x5", "x6", "x7", "x28", "x29", "x30", "x31", "x1"
: "volatile");
}
#[inline(always)]
// Justification: documentation is generated from mocks
#[allow(clippy::missing_safety_doc)]
pub unsafe fn subscribe(
major: usize,
minor: usize,
cb: *const unsafe extern "C" fn(usize, usize, usize, usize),
ud: usize,
) -> isize {
let res;
asm!("li a0, 1
ecall"
: "={x10}" (res)
: "{x11}" (major), "{x12}" (minor), "{x13}" (cb), "{x14}" (ud)
: "memory"
: "volatile" );
res
}
#[inline(always)]
// Justification: documentation is generated from mocks
#[allow(clippy::missing_safety_doc)]
pub unsafe fn command(major: usize, minor: usize, arg1: usize, arg2: usize) -> isize {
let res;
asm!("li a0, 2
ecall"
: "={x10}" (res)
: "{x11}" (major), "{x12}" (minor), "{x13}" (arg1), "{x14}" (arg2)
: "memory"
: "volatile");
res
}
#[inline(always)]
// Justification: documentation is generated from mocks
#[allow(clippy::missing_safety_doc)]
pub unsafe fn command1(major: usize, minor: usize, arg: usize) -> isize |
#[inline(always)]
// Justification: documentation is generated from mocks
#[allow(clippy::missing_safety_doc)]
pub unsafe fn allow(major: usize, minor: usize, slice: *mut u8, len: usize) -> isize {
let res;
asm!("li a0, 3
ecall"
: "={x10}" (res)
: "{x11}" (major), "{x12}" (minor), "{x13}" (slice), "{x14}" (len)
: "memory"
: "volatile");
res
}
#[inline(always)]
// Justification: documentation is generated from mocks
#[allow(clippy::missing_safety_doc)]
pub unsafe fn memop(major: u32, arg1: usize) -> isize {
let res;
asm!("li a0, 4
ecall"
: "={x10}" (res)
: "{x11}" (major), "{x12}" (arg1)
: "memory"
: "volatile");
res
}
| {
let res;
asm!("li a0, 2
ecall"
: "={x10}" (res)
: "{x11}" (major), "{x12}" (minor), "{x13}" (arg)
: "memory"
: "volatile");
res
} |
hyp.rs | /// Hyperbolic functions.
pub trait Hyperbolic {
/// Hyperbolic sine function.
fn sinh(self) -> Self;
/// Hyperbolic cosine function.
fn cosh(self) -> Self;
/// Hyperbolic tangent function.
fn tanh(self) -> Self;
/// Inverse hyperbolic sine function.
fn asinh(self) -> Self;
/// Inverse hyperbolic cosine function.
fn acosh(self) -> Self;
| /// Inverse hyperbolic tangent function.
fn atanh(self) -> Self;
} |
|
analyse-project.ts | import * as pipe from '../node/pipe';
import { plugin } from './plugins';
import { walkProjectFiles } from './walk-project-files';
|
// Clear analyseInfo
Object.keys(plugin.analyseInfo).forEach(key => delete plugin.analyseInfo[key]);
// Clear pipe
pipe.clear();
plugin.projectAnalyses.forEach(projectAnalyse => {
const result = projectAnalyse(files, pipe.set);
if (result && typeof result === 'object') {
Object.keys(result).forEach(key => {
plugin.analyseInfo[key] = result[key];
});
}
});
return plugin.analyseInfo;
}; | export const analyseProject = async () => {
const files = await walkProjectFiles(); |
views.py | from functools import wraps
from typing import Dict
from django import forms
from django.db import transaction
from django.db.models import F
from django.http import HttpResponse
from django.http import JsonResponse
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.views import View
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import CreateView
from django.views.generic import DeleteView
from django.views.generic import ListView
from django.views.generic import RedirectView
from django.views.generic import UpdateView
from application.blog.models import BlogPost
from application.blog.models import UserLike
from framework.mixins import ExtendedContextMixin
class PostForm(forms.ModelForm):
class Meta:
model = BlogPost
fields = ["title", "content"]
widgets = {
"title": forms.TextInput(),
"content": forms.Textarea(attrs={"rows": 2}),
}
class AllPostsView(ExtendedContextMixin, ListView):
template_name = "blog/all_posts.html"
model = BlogPost
def get_extended_context(self) -> Dict:
context = {"form": PostForm()}
return context
class NewPostView(CreateView):
http_method_names = ["post"]
model = BlogPost
fields = ["content", "title"]
success_url = reverse_lazy("blog:all")
def form_valid(self, form):
post = form.save(commit=False)
post.author = self.request.user
return super().form_valid(form)
class DelAll(RedirectView):
def get_redirect_url(self, *args, **kwargs):
BlogPost.objects.all().delete()
return reverse_lazy("blog:all")
class DeletePostView(DeleteView):
http_method_names = ["post"]
model = BlogPost
success_url = reverse_lazy("blog:all")
class | (UpdateView):
model = BlogPost
fields = ["content", "title"]
template_name = "blog/post.html"
success_url = reverse_lazy("blog:all")
def form_valid(self, form):
self.object.edited = True
return super().form_valid(form)
@method_decorator(csrf_exempt, name="dispatch")
class PostLike(View):
def get(self, request, *args, **kwargs):
nr = BlogPost.objects.get(pk=kwargs.get("pk")).nr_likes
payload = str(nr)
return HttpResponse(payload, content_type="text/plain")
def post(self, request, *args, **kwargs):
payload = {
"ok": False,
"nr_likes": 0,
"is_like": False,
"reason": "unknown reason",
}
try:
pk = kwargs.get("pk", 0)
post = BlogPost.objects.get(pk=pk)
user = self.request.user
except Exception:
payload.update({"reason": "post not found"})
else:
try:
userlike = UserLike.objects.get(user=user, post=post)
except UserLike.DoesNotExist:
userlike = UserLike(user=user, post=post)
userlike.save()
# post.nr_likes += 1
# post.save()
is_like = True
else:
userlike.delete()
# post.nr_likes -= 1
# post.save()
is_like = False
post = BlogPost.objects.get(pk=pk)
nr_like = UserLike.objects.filter(post=post).count()
payload.update(
{
"ok": True,
"nr_likes": nr_like,
"is_like": is_like,
"reason": None,
}
)
return JsonResponse(payload)
| PostView |
utility.rs | // Copyright 2018 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use crate::message::{CircuitRelay, CircuitRelay_Peer, CircuitRelay_Status, CircuitRelay_Type};
use futures::{future::{self, Either}, prelude::*};
use log::trace;
use multiaddr::{Protocol, Multiaddr};
use peerstore::PeerId;
use protobuf::{self, Message};
use std::{io, error::Error, iter::FromIterator};
use tokio_codec::Framed;
use tokio_io::{AsyncRead, AsyncWrite};
use unsigned_varint::codec;
pub(crate) fn is_success(msg: &CircuitRelay) -> bool {
msg.get_field_type() == CircuitRelay_Type::STATUS
&& msg.get_code() == CircuitRelay_Status::SUCCESS
}
pub(crate) fn status(s: CircuitRelay_Status) -> CircuitRelay {
let mut msg = CircuitRelay::new();
msg.set_field_type(CircuitRelay_Type::STATUS);
msg.set_code(s);
msg
}
pub(crate) struct Io<T> {
codec: Framed<T, codec::UviBytes<Vec<u8>>>,
}
impl<T: AsyncRead + AsyncWrite> Io<T> {
pub(crate) fn new(c: T) -> Io<T> {
Io {
codec: Framed::new(c, codec::UviBytes::default()),
}
}
pub(crate) fn into(self) -> T {
self.codec.into_inner()
}
}
impl<T> Io<T>
where
T: AsyncRead + AsyncWrite + 'static,
{
pub(crate) fn send(self, msg: CircuitRelay) -> impl Future<Item=Self, Error=io::Error> {
trace!("sending protocol message: type={:?}, code={:?}",
msg.get_field_type(),
msg.get_code());
let pkg = match msg.write_to_bytes() {
Ok(p) => p,
Err(e) => return Either::A(future::err(io_err(e)))
};
Either::B(self.codec.send(pkg).map(|codec| Io { codec }))
}
pub(crate) fn recv(self) -> impl Future<Item=(Option<CircuitRelay>, Self), Error=io::Error> {
self.codec
.into_future()
.map_err(|(e, _)| io_err(e))
.and_then(|(pkg, codec)| {
if let Some(ref p) = pkg {
protobuf::parse_from_bytes(p)
.map(|msg: CircuitRelay| {
trace!("received protocol message: type={:?}, code={:?}",
msg.get_field_type(),
msg.get_code());
(Some(msg), Io { codec })
})
.map_err(io_err)
} else {
Ok((None, Io { codec }))
}
})
}
}
pub(crate) enum RelayAddr {
Address { relay: Option<Peer>, dest: Peer },
Malformed,
Multihop, // Unsupported
}
impl RelayAddr {
// Address format: [<relay>]/p2p-circuit/<destination>
pub(crate) fn parse(addr: &Multiaddr) -> RelayAddr {
let mut iter = addr.iter().peekable();
let relay = if let Some(&Protocol::P2pCircuit) = iter.peek() {
None // Address begins with "p2p-circuit", i.e. no relay is specified.
} else {
let prefix = iter.by_ref().take_while(|p| *p != Protocol::P2pCircuit);
match Peer::from(Multiaddr::from_iter(prefix)) {
None => return RelayAddr::Malformed,
peer => peer,
}
};
// After the (optional) relay, "p2p-circuit" is expected.
if Some(Protocol::P2pCircuit) != iter.next() {
return RelayAddr::Malformed;
}
let dest = {
let suffix = iter.by_ref().take_while(|p| *p != Protocol::P2pCircuit);
match Peer::from(Multiaddr::from_iter(suffix)) {
None => return RelayAddr::Malformed,
Some(p) => p,
}
};
if iter.next().is_some() {
return RelayAddr::Multihop;
}
RelayAddr::Address { relay, dest }
}
}
#[derive(Debug, Clone)]
pub(crate) struct Peer {
pub(crate) id: PeerId,
pub(crate) addrs: Vec<Multiaddr>,
}
impl Peer {
pub(crate) fn from(mut addr: Multiaddr) -> Option<Peer> |
pub(crate) fn from_message(mut m: CircuitRelay_Peer) -> Option<Peer> {
let pid = PeerId::from_bytes(m.take_id()).ok()?;
let mut addrs = Vec::new();
for a in m.take_addrs().into_iter() {
if let Ok(ma) = Multiaddr::from_bytes(a) {
addrs.push(ma)
}
}
Some(Peer { id: pid, addrs })
}
}
pub(crate) fn io_err<E>(e: E) -> io::Error
where
E: Into<Box<Error + Send + Sync>>,
{
io::Error::new(io::ErrorKind::Other, e)
}
| {
match addr.pop() {
Some(Protocol::P2p(id)) => {
PeerId::from_multihash(id).ok().map(|pid| {
if addr.iter().count() == 0 {
Peer {
id: pid,
addrs: Vec::new(),
}
} else {
Peer {
id: pid,
addrs: vec![addr],
}
}
})
}
_ => None,
}
} |
adapter_test.js | const { assert } = require('chai')
const { assertSuccess, assertError } = require('@chainlink/external-adapter')
const { execute } = require('../adapter')
describe('execute', () => {
const jobID = '1'
context('successful calls @integration', () => {
const requests = [
{
name: 'empty data',
testData: { data: {} },
},
{
name: 'no speed param',
testData: {
id: jobID,
data: { endpoint: 'not_real' },
},
},
{
name: 'id not supplied',
testData: {
data: {
endpoint: 'latest-minimum-gasprice',
speed: 'fast',
},
},
},
{
name: 'speed is standard',
testData: {
id: jobID,
data: { speed: 'standard' },
},
},
]
requests.forEach((req) => {
it(`${req.name}`, (done) => {
execute(req.testData, (statusCode, data) => {
assertSuccess({ expected: 200, actual: statusCode }, data, jobID)
assert.isAbove(Number(data.result), 0)
assert.isAbove(Number(data.data.result), 0)
done()
})
})
})
})
context('validation error', () => {
const requests = [
{
name: 'empty body',
testData: {},
},
]
requests.forEach((req) => {
it(`${req.name}`, (done) => {
execute(req.testData, (statusCode, data) => {
assertError({ expected: 400, actual: statusCode }, data, jobID)
done()
})
})
})
})
context('error calls @integration', () => {
const requests = [
{
name: 'unknown endpoint',
testData: {
id: jobID,
data: { speed: 'standard', endpoint: 'not_real' },
},
},
{
name: 'unknown speed',
testData: {
id: jobID,
data: { speed: 'not_real' }, | },
},
]
requests.forEach((req) => {
it(`${req.name}`, (done) => {
execute(req.testData, (statusCode, data) => {
assertError({ expected: 500, actual: statusCode }, data, jobID)
done()
})
})
})
})
}) | |
f7001f284c5642d54b622cdec48cd3926702afb2urls.py | # -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, url
from djangopypi.feeds import ReleaseFeed | url(r'^packages/$','packages.index', name='djangopypi-package-index'),
url(r'^simple/$','packages.simple_index', name='djangopypi-package-index-simple'),
url(r'^search/$','packages.search',name='djangopypi-search'),
url(r'^pypi/$', 'root', name='djangopypi-release-index'),
url(r'^rss/$', ReleaseFeed(), name='djangopypi-rss'),
url(r'^simple/(?P<package>[\w\d_\.\-]+)/$','packages.simple_details',
name='djangopypi-package-simple'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/$','packages.details',
name='djangopypi-package'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/rss/$', ReleaseFeed(),
name='djangopypi-package-rss'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/doap.rdf$','packages.doap',
name='djangopypi-package-doap'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/$','packages.manage',
name='djangopypi-package-manage'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/versions/$','packages.manage_versions',
name='djangopypi-package-manage-versions'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/$',
'releases.details',name='djangopypi-release'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/doap.rdf$',
'releases.doap',name='djangopypi-release-doap'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/manage/$',
'releases.manage',name='djangopypi-release-manage'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/metadata/$',
'releases.manage_metadata',name='djangopypi-release-manage-metadata'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/$',
'releases.manage_files',name='djangopypi-release-manage-files'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/upload/$',
'releases.upload_file',name='djangopypi-release-upload-file'),
) |
urlpatterns = patterns("djangopypi.views",
url(r'^$', "root", name="djangopypi-root"), |
lib.rs | pub mod cuda_runtime;
pub mod data_budget;
pub mod packet;
pub mod perf_libs;
pub mod recycler;
pub mod recycler_cache;
pub mod sigverify;
pub mod test_tx;
pub mod thread;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(test)]
#[macro_use]
extern crate matches;
#[macro_use]
extern crate analog_metrics;
fn | () -> bool {
#[cfg(target_os = "macos")]
{
use std::str::FromStr;
std::process::Command::new("sysctl")
.args(&["-in", "sysctl.proc_translated"])
.output()
.map_err(|_| ())
.and_then(|output| String::from_utf8(output.stdout).map_err(|_| ()))
.and_then(|stdout| u8::from_str(stdout.trim()).map_err(|_| ()))
.map(|enabled| enabled == 1)
.unwrap_or(false)
}
#[cfg(not(target_os = "macos"))]
{
false
}
}
pub fn report_target_features() {
warn!(
"CUDA is {}abled",
if crate::perf_libs::api().is_some() {
"en"
} else {
"dis"
}
);
// Validator binaries built on a machine with AVX support will generate invalid opcodes
// when run on machines without AVX causing a non-obvious process abort. Instead detect
// the mismatch and error cleanly.
if !is_rosetta_emulated() {
#[cfg(build_target_feature_avx)]
{
if is_x86_feature_detected!("avx") {
info!("AVX detected");
} else {
error!(
"Incompatible CPU detected: missing AVX support. Please build from source on the target"
);
std::process::abort();
}
}
#[cfg(build_target_feature_avx2)]
{
if is_x86_feature_detected!("avx2") {
info!("AVX2 detected");
} else {
error!(
"Incompatible CPU detected: missing AVX2 support. Please build from source on the target"
);
std::process::abort();
}
}
}
}
| is_rosetta_emulated |
gateway_ring.go | "fmt"
"os"
"time"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/cortexproject/cortex/pkg/ring"
"github.com/cortexproject/cortex/pkg/ring/kv"
"github.com/cortexproject/cortex/pkg/util/flagext"
util_log "github.com/cortexproject/cortex/pkg/util/log"
)
const (
// RingKey is the key under which we store the store gateways ring in the KVStore.
RingKey = "store-gateway"
// RingNameForServer is the name of the ring used by the store gateway server.
RingNameForServer = "store-gateway"
// RingNameForClient is the name of the ring used by the store gateway client (we need
// a different name to avoid clashing Prometheus metrics when running in single-binary).
RingNameForClient = "store-gateway-client"
// We use a safe default instead of exposing to config option to the user
// in order to simplify the config.
RingNumTokens = 512
)
var (
// BlocksOwnerSync is the operation used to check the authoritative owners of a block
// (replicas included).
BlocksOwnerSync = ring.NewOp([]ring.InstanceState{ring.JOINING, ring.ACTIVE, ring.LEAVING}, func(s ring.InstanceState) bool {
// Extend the replication set only when an instance is LEAVING so that
// their blocks will be loaded sooner on the next authoritative owner(s).
return s == ring.LEAVING
})
// BlocksOwnerRead is the operation used to check the authoritative owners of a block
// (replicas included) that are available for queries (a store-gateway is available for
// queries only when ACTIVE).
BlocksOwnerRead = ring.NewOp([]ring.InstanceState{ring.ACTIVE}, nil)
// BlocksRead is the operation run by the querier to query blocks via the store-gateway.
BlocksRead = ring.NewOp([]ring.InstanceState{ring.ACTIVE}, func(s ring.InstanceState) bool {
// Blocks can only be queried from ACTIVE instances. However, if the block belongs to
// a non-active instance, then we should extend the replication set and try to query it
// from the next ACTIVE instance in the ring (which is expected to have it because a
// store-gateway keeps their previously owned blocks until new owners are ACTIVE).
return s != ring.ACTIVE
})
)
// RingConfig masks the ring lifecycler config which contains
// many options not really required by the store gateways ring. This config
// is used to strip down the config to the minimum, and avoid confusion
// to the user.
type RingConfig struct {
KVStore kv.Config `yaml:"kvstore" doc:"description=The key-value store used to share the hash ring across multiple instances. This option needs be set both on the store-gateway and querier when running in microservices mode."`
HeartbeatPeriod time.Duration `yaml:"heartbeat_period"`
HeartbeatTimeout time.Duration `yaml:"heartbeat_timeout"`
ReplicationFactor int `yaml:"replication_factor"`
TokensFilePath string `yaml:"tokens_file_path"`
ZoneAwarenessEnabled bool `yaml:"zone_awareness_enabled"`
// Wait ring stability.
WaitStabilityMinDuration time.Duration `yaml:"wait_stability_min_duration"`
WaitStabilityMaxDuration time.Duration `yaml:"wait_stability_max_duration"`
// Instance details
InstanceID string `yaml:"instance_id" doc:"hidden"`
InstanceInterfaceNames []string `yaml:"instance_interface_names"`
InstancePort int `yaml:"instance_port" doc:"hidden"`
InstanceAddr string `yaml:"instance_addr" doc:"hidden"`
InstanceZone string `yaml:"instance_availability_zone"`
// Injected internally
ListenPort int `yaml:"-"`
RingCheckPeriod time.Duration `yaml:"-"`
}
// RegisterFlags adds the flags required to config this to the given FlagSet
func (cfg *RingConfig) RegisterFlags(f *flag.FlagSet) {
hostname, err := os.Hostname()
if err != nil {
level.Error(util_log.Logger).Log("msg", "failed to get hostname", "err", err)
os.Exit(1)
}
ringFlagsPrefix := "store-gateway.sharding-ring."
// Ring flags
cfg.KVStore.RegisterFlagsWithPrefix(ringFlagsPrefix, "collectors/", f)
f.DurationVar(&cfg.HeartbeatPeriod, ringFlagsPrefix+"heartbeat-period", 15*time.Second, "Period at which to heartbeat to the ring. 0 = disabled.")
f.DurationVar(&cfg.HeartbeatTimeout, ringFlagsPrefix+"heartbeat-timeout", time.Minute, "The heartbeat timeout after which store gateways are considered unhealthy within the ring. 0 = never (timeout disabled)."+sharedOptionWithQuerier)
f.IntVar(&cfg.ReplicationFactor, ringFlagsPrefix+"replication-factor", 3, "The replication factor to use when sharding blocks."+sharedOptionWithQuerier)
f.StringVar(&cfg.TokensFilePath, ringFlagsPrefix+"tokens-file-path", "", "File path where tokens are stored. If empty, tokens are not stored at shutdown and restored at startup.")
f.BoolVar(&cfg.ZoneAwarenessEnabled, ringFlagsPrefix+"zone-awareness-enabled", false, "True to enable zone-awareness and replicate blocks across different availability zones.")
// Wait stability flags.
f.DurationVar(&cfg.WaitStabilityMinDuration, ringFlagsPrefix+"wait-stability-min-duration", time.Minute, "Minimum time to wait for ring stability at startup. 0 to disable.")
f.DurationVar(&cfg.WaitStabilityMaxDuration, ringFlagsPrefix+"wait-stability-max-duration", 5*time.Minute, "Maximum time to wait for ring stability at startup. If the store-gateway ring keeps changing after this period of time, the store-gateway will start anyway.")
// Instance flags
cfg.InstanceInterfaceNames = []string{"eth0", "en0"}
f.Var((*flagext.StringSlice)(&cfg.InstanceInterfaceNames), ringFlagsPrefix+"instance-interface-names", "Name of network interface to read address from.")
f.StringVar(&cfg.InstanceAddr, ringFlagsPrefix+"instance-addr", "", "IP address to advertise in the ring.")
f.IntVar(&cfg.InstancePort, ringFlagsPrefix+"instance-port", 0, "Port to advertise in the ring (defaults to server.grpc-listen-port).")
f.StringVar(&cfg.InstanceID, ringFlagsPrefix+"instance-id", hostname, "Instance ID to register in the ring.")
f.StringVar(&cfg.InstanceZone, ringFlagsPrefix+"instance-availability-zone", "", "The availability zone where this instance is running. Required if zone-awareness is enabled.")
// Defaults for internal settings.
cfg.RingCheckPeriod = 5 * time.Second
}
func (cfg *RingConfig) ToRingConfig() ring.Config {
rc := ring.Config{}
flagext.DefaultValues(&rc)
rc.KVStore = cfg.KVStore
rc.HeartbeatTimeout = cfg.HeartbeatTimeout
rc.ReplicationFactor = cfg.ReplicationFactor
rc.ZoneAwarenessEnabled = cfg.ZoneAwarenessEnabled
rc.SubringCacheDisabled = true
return rc
}
func (cfg *RingConfig) ToLifecyclerConfig(logger log.Logger) (ring.BasicLifecyclerConfig, error) {
instanceAddr, err := ring.GetInstanceAddr(cfg.InstanceAddr, cfg.InstanceInterfaceNames, logger)
if err != nil {
return ring.BasicLifecyclerConfig{}, err
}
instancePort := ring.GetInstancePort(cfg.InstancePort, cfg.ListenPort)
return ring.BasicLifecyclerConfig{
ID: cfg.InstanceID,
Addr: fmt.Sprintf("%s:%d", instanceAddr, instancePort),
Zone: cfg.InstanceZone,
HeartbeatPeriod: cfg.HeartbeatPeriod,
TokensObservePeriod: 0,
NumTokens: RingNumTokens,
}, nil
} | package storegateway
import (
"flag" |
|
Middleware.js | import colors from 'colors'
import InvalidArgument from '../errors/InvalidArgument.js'
import RouterElement from './RouterElement.js'
/** Middleware element for custom router. */
export default class | extends RouterElement {
/**
* Instanciate Middleware object
*
* @param {string} name Middleware name
*/
constructor(name) {
super(Middleware, [], {})
this.name = undefined
this.middleware = undefined
this.__parseName(name)
}
/**
* Parse middleware name.
*
* @param {string} name Middleware name
*/
__parseName(name) {
if (!name || typeof name !== 'string') {
throw new InvalidArgument(`Middleware.name="${name}" must be a String.`)
}
this.name = name
}
/**
* Load middleware from name and parser configuration
*
* @param {Router} router Express router
* @param {string} path Middleware path
* @param {string} middlewareDir Controller directory
*/
async load(router, path, middlewareDir) {
this.middleware = await this.__loadModule(this.name, middlewareDir)
router.use(path, this.middleware)
}
/**
* Make readable this object
*
* @returns {string} Instance description
*/
toString() {
return `${colors.green(this.name)}`
}
}
| Middleware |
Env.py | class Env:
__table = None
_prev = None
| def __init__(self, n):
self.__table = {}
self._prev = n
def put(self, w, i):
self.__table[w] = i
def get(self, w):
e = self
while e is not None:
found = e.__table.get(w)
if found is not None:
return found
e = e._prev
return None | |
avro_utils.rs | use std::collections::{BTreeMap, HashMap};
use failure::{err_msg, Error};
use avro_rs::schema::{RecordField, Schema};
use avro_rs::types::{ToAvro, Value};
use serde_pickle::value::HashableValue;
use serde_pickle::value::Value as PickleValue;
pub fn avro_value_from_pickle(schema: &Schema, value: PickleValue) -> Result<Value, Error> |
fn from_null(value: &PickleValue) -> Result<Value, Error> {
match value {
PickleValue::None => Ok(Value::Null),
_ => Err(err_msg("not a null")),
}
}
fn from_boolean(value: &PickleValue) -> Result<Value, Error> {
match value {
PickleValue::Bool(b) => Ok(Value::Boolean(*b)),
_ => Err(err_msg("not a bool")),
}
}
fn from_int(value: &PickleValue) -> Result<Value, Error> {
match value {
PickleValue::I64(n) => Ok(Value::Int(*n as i32)),
_ => Err(err_msg("not an int")),
}
}
fn from_long(value: &PickleValue) -> Result<Value, Error> {
match value {
PickleValue::I64(n) => Ok(Value::Long(*n)),
_ => Err(err_msg("not a long")),
}
}
fn from_float(value: &PickleValue) -> Result<Value, Error> {
match value {
PickleValue::F64(x) => Ok(Value::Float(*x as f32)),
_ => Err(err_msg("not a float")),
}
}
fn from_double(value: &PickleValue) -> Result<Value, Error> {
match value {
PickleValue::F64(x) => Ok(Value::Double(*x)),
_ => Err(err_msg("not a double")),
}
}
fn from_bytes(value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::Bytes(bytes) => Ok(Value::Bytes(bytes)),
PickleValue::String(s) => Ok(Value::Bytes(s.into_bytes())),
_ => Err(err_msg("not a bytes")),
}
}
fn from_string(value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::String(s) => Ok(Value::String(s)),
PickleValue::Bytes(bytes) => String::from_utf8(bytes)
.map_err(|_| err_msg("not a valid utf-8 string"))
.map(Value::String),
_ => Err(err_msg("not a string")),
}
}
fn from_fixed(size: usize, value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::Bytes(bytes) => {
if size == bytes.len() {
Ok(Value::Fixed(size, bytes))
} else {
Err(err_msg("fixed size does not match"))
}
},
_ => Err(err_msg("not a fixed")),
}
}
fn from_array(schema: &Schema, value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::List(values) | PickleValue::Tuple(values) => Ok(Value::Array(
values
.into_iter()
.map(|value| avro_value_from_pickle(schema, value))
.collect::<Result<Vec<_>, _>>()?,
)),
PickleValue::Set(values) | PickleValue::FrozenSet(values) => Ok(Value::Array(
values
.into_iter()
.map(|value| avro_value_from_pickle(schema, value.into_value()))
.collect::<Result<Vec<_>, _>>()?,
)),
_ => Err(err_msg("not an array")),
}
}
fn from_map(schema: &Schema, value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::Dict(values) => Ok(Value::Map(
values
.into_iter()
.map(|(key, value)| {
if let HashableValue::String(key) = key {
let value = avro_value_from_pickle(schema, value)?;
Ok((key, value))
} else {
Err(err_msg("map key should be string"))
}
})
.collect::<Result<HashMap<_, _>, _>>()?,
)),
_ => Err(err_msg("not a map")),
}
}
fn from_union(schema: &Schema, value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::None => Ok(Value::Union(None)),
value => Ok(Value::Union(Some(Box::new(avro_value_from_pickle(
schema, value,
)?)))),
}
}
fn from_enum(symbols: &[String], value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::String(s) => Ok(Value::Enum(
symbols
.iter()
.position(|ref item| item == &&s)
.ok_or_else(|| err_msg("unsupported enum value"))? as i32,
s,
)),
_ => Err(err_msg("not an enum")),
}
}
fn from_record(schema_fields: &[RecordField], value: PickleValue) -> Result<Value, Error> {
match value {
PickleValue::Dict(mut fields) => Ok(Value::Record(
schema_fields
.iter()
.map(|field| {
let value = match fields.remove(&HashableValue::String(field.name.clone())) {
Some(value) => avro_value_from_pickle(&field.schema, value),
None => match fields
.remove(&HashableValue::Bytes(field.name.clone().into_bytes()))
{
Some(value) => avro_value_from_pickle(&field.schema, value),
None => match field.default {
Some(ref value) => Ok(value.clone().avro()),
None => {
Err(err_msg(format!("missing field {} in record", field.name)))
},
},
},
};
value.map(|value| (field.name.clone(), value))
})
.collect::<Result<Vec<_>, _>>()?,
)),
_ => Err(err_msg("not a record")),
}
}
pub fn pickle_value_from_avro(value: Value) -> PickleValue {
match value {
Value::Null => PickleValue::None,
Value::Boolean(b) => PickleValue::Bool(b),
Value::Int(n) => PickleValue::I64(i64::from(n)),
Value::Long(n) => PickleValue::I64(n),
Value::Float(x) => PickleValue::F64(f64::from(x)),
Value::Double(x) => PickleValue::F64(x),
Value::Bytes(bytes) | Value::Fixed(_, bytes) => PickleValue::Bytes(bytes),
Value::String(s) => PickleValue::String(s),
Value::Array(values) => {
PickleValue::List(values.into_iter().map(pickle_value_from_avro).collect())
},
Value::Map(values) => PickleValue::Dict(
values
.into_iter()
.map(|(key, value)| (HashableValue::String(key), pickle_value_from_avro(value)))
.collect::<BTreeMap<_, _>>(),
),
Value::Union(None) => PickleValue::None,
Value::Union(Some(value)) => pickle_value_from_avro(*value),
Value::Record(fields) => PickleValue::Dict(
fields
.into_iter()
.map(|(key, value)| (HashableValue::String(key), pickle_value_from_avro(value)))
.collect::<BTreeMap<_, _>>(),
),
Value::Enum(_, repr) => PickleValue::String(repr),
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::BTreeMap;
#[test]
fn pickle_int() {
let avro_value = avro_value_from_pickle(&Schema::Int, PickleValue::I64(42));
assert!(avro_value.is_ok());
assert_eq!(avro_value.unwrap(), Value::Int(42));
}
#[test]
fn pickle_record() {
let schema = Schema::parse_str(r#"
{"namespace": "test", "type": "record", "name": "Test", "fields": [{"type": {"type": "string"}, "name": "field"}]}
"#).unwrap();
let mut record = BTreeMap::new();
record.insert(
HashableValue::Bytes("field".as_bytes().to_owned()),
PickleValue::String("foo".to_owned()),
);
let avro_value = avro_value_from_pickle(&schema, PickleValue::Dict(record));
if let Ok(Value::Record(fields)) = avro_value {
assert_eq!(fields.len(), 1);
assert_eq!(fields[0].1, Value::String("foo".to_owned()));
} else {
assert!(false);
}
}
}
| {
match *schema {
Schema::Null => from_null(&value),
Schema::Boolean => from_boolean(&value),
Schema::Int => from_int(&value),
Schema::Long => from_long(&value),
Schema::Float => from_float(&value),
Schema::Double => from_double(&value),
Schema::Bytes => from_bytes(value),
Schema::String => from_string(value),
Schema::Fixed { size, .. } => from_fixed(size, value),
Schema::Array(ref inner) => from_array(inner, value),
Schema::Map(ref inner) => from_map(inner, value),
Schema::Union(ref inner) => from_union(inner, value),
Schema::Record { ref fields, .. } => from_record(fields, value),
Schema::Enum { ref symbols, .. } => from_enum(symbols, value),
}
} |
multi_scopic.py | """
The core part of the SOTA model of CPSC2019,
branched, and has different scope (in terms of dilation) in each branch
"""
from copy import deepcopy
from itertools import repeat
from collections import OrderedDict
from typing import Union, Optional, Sequence, NoReturn
import numpy as np
np.set_printoptions(precision=5, suppress=True)
import torch
from torch import nn
from torch import Tensor
from ...cfg import CFG, DEFAULTS
from ...utils.utils_nn import compute_module_size, SizeMixin
from ...utils.misc import dict_to_str
from ...models._nets import (
Conv_Bn_Activation,
DownSample,
NonLocalBlock, SEBlock, GlobalContextBlock,
)
if DEFAULTS.torch_dtype == torch.float64:
torch.set_default_tensor_type(torch.DoubleTensor)
__all__ = [
"MultiScopicCNN",
"MultiScopicBasicBlock",
"MultiScopicBranch",
]
class MultiScopicBasicBlock(SizeMixin, nn.Sequential):
""" finished, checked,
basic building block of the CNN part of the SOTA model
from CPSC2019 challenge (entry 0416)
(conv -> activation) * N --> bn --> down_sample
"""
__DEBUG__ = False
__name__ = "MultiScopicBasicBlock"
def __init__(self,
in_channels:int,
scopes:Sequence[int],
num_filters:Union[int,Sequence[int]],
filter_lengths:Union[int,Sequence[int]],
subsample_length:int,
groups:int=1,
**config) -> NoReturn:
""" finished, checked,
Parameters
----------
in_channels: int,
number of channels in the input
scopes: sequence of int,
scopes of the convolutional layers, via `dilation`
num_filters: int or sequence of int,
number of filters of the convolutional layer(s)
filter_lengths: int or sequence of int,
filter length(s) (kernel size(s)) of the convolutional layer(s)
subsample_length: int,
subsample length (ratio) at the last layer of the block
"""
super().__init__()
self.__in_channels = in_channels
self.__scopes = scopes
self.__num_convs = len(self.__scopes)
if isinstance(num_filters, int):
self.__out_channels = list(repeat(num_filters, self.__num_convs))
else:
self.__out_channels = num_filters
assert len(self.__out_channels) == self.__num_convs, \
f"`scopes` indicates {self.__num_convs} convolutional layers, while `num_filters` indicates {len(self.__out_channels)}"
if isinstance(filter_lengths, int):
self.__filter_lengths = list(repeat(filter_lengths, self.__num_convs))
else:
self.__filter_lengths = filter_lengths
assert len(self.__filter_lengths) == self.__num_convs, \
f"`scopes` indicates {self.__num_convs} convolutional layers, while `filter_lengths` indicates {len(self.__filter_lengths)}"
self.__subsample_length = subsample_length
self.__groups = groups
self.config = CFG(deepcopy(config))
conv_in_channels = self.__in_channels
for idx in range(self.__num_convs):
self.add_module(
f"ca_{idx}",
Conv_Bn_Activation(
in_channels=conv_in_channels,
out_channels=self.__out_channels[idx],
kernel_size=self.__filter_lengths[idx],
stride=1,
dilation=self.__scopes[idx],
groups=self.__groups,
batch_norm=self.config.batch_norm,
# kw_bn=self.config.kw_bn,
activation=self.config.activation,
kw_activation=self.config.kw_activation,
kernel_initializer=self.config.kernel_initializer,
kw_initializer=self.config.kw_initializer,
bias=self.config.bias,
)
)
conv_in_channels = self.__out_channels[idx]
self.add_module(
"bn",
nn.BatchNorm1d(self.__out_channels[-1])
)
self.add_module(
"down",
DownSample(
down_scale=self.__subsample_length,
in_channels=self.__out_channels[-1],
groups=self.__groups,
# padding=
batch_norm=False,
mode=self.config.subsample_mode,
)
)
if self.config.dropout > 0:
self.add_module(
"dropout",
nn.Dropout(self.config.dropout, inplace=False)
)
def forward(self, input:Tensor) -> Tensor:
""" finished, checked,
Parameters
----------
input: Tensor,
of shape (batch_size, n_channels, seq_len)
Returns
-------
output: Tensor,
of shape (batch_size, n_channels, seq_len)
"""
output = super().forward(input)
return output
def compute_output_shape(self, seq_len:Optional[int]=None, batch_size:Optional[int]=None) -> Sequence[Union[int, None]]:
""" finished, checked,
Parameters
----------
seq_len: int,
length of the 1d sequence
batch_size: int, optional,
the batch size, can be None
Returns
-------
output_shape: sequence,
the output shape of this block, given `seq_len` and `batch_size`
"""
_seq_len = seq_len
for idx, module in enumerate(self):
if idx == self.__num_convs: # bn layer
continue
elif self.config.dropout > 0 and idx == len(self)-1: # dropout layer
continue
output_shape = module.compute_output_shape(_seq_len, batch_size)
_, _, _seq_len = output_shape
return output_shape
class MultiScopicBranch(SizeMixin, nn.Sequential):
""" finished, checked,
branch path of the CNN part of the SOTA model
from CPSC2019 challenge (entry 0416)
"""
__DEBUG__ = False
__name__ = "MultiScopicBranch"
def __init__(self,
in_channels:int,
scopes:Sequence[Sequence[int]],
num_filters:Union[Sequence[int],Sequence[Sequence[int]]],
filter_lengths:Union[Sequence[int],Sequence[Sequence[int]]],
subsample_lengths:Union[int,Sequence[int]],
groups:int=1,
**config) -> NoReturn:
""" finished, checked,
Parameters
----------
in_channels: int,
number of features (channels) of the input
scopes: sequence of sequences of int,
scopes (in terms of `dilation`) for the convolutional layers,
each sequence of int is for one branch
num_filters: sequence of int, or sequence of sequences of int,
number of filters for the convolutional layers,
if is sequence of int,
then convolutionaly layers in one branch will have the same number of filters
filter_lengths: sequence of int, or sequence of sequences of int,
filter length (kernel size) of the convolutional layers,
if is sequence of int,
then convolutionaly layers in one branch will have the same filter length
subsample_lengths: int, or sequence of int,
subsample length (stride) of the convolutional layers,
if is sequence of int,
then convolutionaly layers in one branch will have the same subsample length
groups: int, default 1,
connection pattern (of channels) of the inputs and outputs
config: dict,
other hyper-parameters, including
dropout, activation choices, weight initializer, etc.
"""
super().__init__()
self.__in_channels = in_channels
self.__scopes = scopes
self.__num_blocks = len(self.__scopes)
self.__num_filters = num_filters
assert len(self.__num_filters) == self.__num_blocks, \
f"`scopes` indicates {self.__num_blocks} `MultiScopicBasicBlock`s, while `num_filters` indicates {len(self.__num_filters)}"
self.__filter_lengths = filter_lengths
assert len(self.__filter_lengths) == self.__num_blocks, \
f"`scopes` indicates {self.__num_blocks} `MultiScopicBasicBlock`s, while `filter_lengths` indicates {len(self.__filter_lengths)}"
if isinstance(subsample_lengths, int):
self.__subsample_lengths = list(repeat(subsample_lengths, self.__num_blocks))
else:
self.__subsample_lengths = filter_lengths
assert len(self.__subsample_lengths) == self.__num_blocks, \
f"`scopes` indicates {self.__num_blocks} `MultiScopicBasicBlock`s, while `subsample_lengths` indicates {len(self.__subsample_lengths)}"
self.__groups = groups
self.config = CFG(deepcopy(config))
block_in_channels = self.__in_channels
for idx in range(self.__num_blocks):
self.add_module(
f"block_{idx}",
MultiScopicBasicBlock(
in_channels=block_in_channels,
scopes=self.__scopes[idx],
num_filters=self.__num_filters[idx],
filter_lengths=self.__filter_lengths[idx],
subsample_length=self.__subsample_lengths[idx],
groups=self.__groups,
dropout=self.config.dropouts[idx],
**(self.config.block)
)
)
block_in_channels = self.__num_filters[idx]
def forward(self, input:Tensor) -> Tensor: | input: Tensor,
of shape (batch_size, n_channels, seq_len)
Returns
-------
output: Tensor,
of shape (batch_size, n_channels, seq_len)
"""
output = super().forward(input)
return output
def compute_output_shape(self, seq_len:Optional[int]=None, batch_size:Optional[int]=None) -> Sequence[Union[int, None]]:
""" finished, checked,
Parameters
----------
seq_len: int,
length of the 1d sequence
batch_size: int, optional,
the batch size, can be None
Returns
-------
output_shape: sequence,
the output shape of this block, given `seq_len` and `batch_size`
"""
_seq_len = seq_len
for idx, module in enumerate(self):
output_shape = module.compute_output_shape(_seq_len, batch_size)
_, _, _seq_len = output_shape
return output_shape
class MultiScopicCNN(SizeMixin, nn.Module):
""" finished, checked,
CNN part of the SOTA model from CPSC2019 challenge (entry 0416)
"""
__DEBUG__ = False
__name__ = "MultiScopicCNN"
def __init__(self, in_channels:int, **config) -> NoReturn:
""" finished, checked,
Parameters
----------
in_channels: int,
number of channels in the input
config: dict,
other hyper-parameters of the Module, ref. corresponding config file
key word arguments that have to be set:
scopes: sequence of sequences of sequences of int,
scopes (in terms of dilation) of each convolution
num_filters: sequence of sequences (of int or of sequences of int),
number of filters of the convolutional layers,
with granularity to each block of each branch,
or to each convolution of each block of each branch
filter_lengths: sequence of sequences (of int or of sequences of int),
filter length(s) (kernel size(s)) of the convolutions,
with granularity to each block of each branch,
or to each convolution of each block of each branch
subsample_lengths: sequence of int or sequence of sequences of int,
subsampling length(s) (ratio(s)) of all blocks,
with granularity to each branch or to each block of each branch,
each subsamples after the last convolution of each block
dropouts: sequence of int or sequence of sequences of int,
dropout rates of all blocks,
with granularity to each branch or to each block of each branch,
each dropouts at the last of each block
groups: int,
connection pattern (of channels) of the inputs and outputs
block: dict,
other parameters that can be set for the building blocks
for a full list of configurable parameters, ref. corr. config file
"""
super().__init__()
self.__in_channels = in_channels
self.config = CFG(deepcopy(config))
self.__scopes = self.config.scopes
self.__num_branches = len(self.__scopes)
if self.__DEBUG__:
print(f"configuration of {self.__name__} is as follows\n{dict_to_str(self.config)}")
self.branches = nn.ModuleDict()
for idx in range(self.__num_branches):
self.branches[f"branch_{idx}"] = \
MultiScopicBranch(
in_channels=self.__in_channels,
scopes=self.__scopes[idx],
num_filters=self.config.num_filters[idx],
filter_lengths=self.config.filter_lengths[idx],
subsample_lengths=self.config.subsample_lengths[idx],
groups=self.config.groups,
dropouts=self.config.dropouts[idx],
block=self.config.block, # a dict
)
def forward(self, input:Tensor) -> Tensor:
""" finished, checked,
Parameters
----------
input: Tensor,
of shape (batch_size, n_channels, seq_len)
Returns
-------
output: Tensor,
of shape (batch_size, n_channels, seq_len)
"""
branch_out = OrderedDict()
for idx in range(self.__num_branches):
key = f"branch_{idx}"
branch_out[key] = self.branches[key].forward(input)
output = torch.cat(
[branch_out[f"branch_{idx}"] for idx in range(self.__num_branches)],
dim=1, # along channels
)
return output
def compute_output_shape(self, seq_len:Optional[int]=None, batch_size:Optional[int]=None) -> Sequence[Union[int, None]]:
""" finished, checked,
Parameters
----------
seq_len: int,
length of the 1d sequence
batch_size: int, optional,
the batch size, can be None
Returns
-------
output_shape: sequence,
the output shape of this block, given `seq_len` and `batch_size`
"""
out_channels = 0
for idx in range(self.__num_branches):
key = f"branch_{idx}"
_, _branch_oc, _seq_len = \
self.branches[key].compute_output_shape(seq_len, batch_size)
out_channels += _branch_oc
output_shape = (batch_size, out_channels, _seq_len)
return output_shape | """ finished, checked,
Parameters
---------- |
util.ts | import { Ajv } from 'ajv';
import { OpenAPIV3 } from '../../framework/types';
import ajv = require('ajv');
import { OpenAPIFramework } from '../../framework';
export function dereferenceParameter(
apiDocs: OpenAPIV3.Document,
parameter: OpenAPIV3.ReferenceObject | OpenAPIV3.ParameterObject,
): OpenAPIV3.ParameterObject {
// TODO this should recurse or use ajv.getSchema - if implemented as such, may want to cache the result
// as it is called by query.paraer and req.parameter mutator
if (is$Ref(parameter)) {
const p = <OpenAPIV3.ReferenceObject>parameter;
const id = p.$ref.replace(/^.+\//i, '');
return <OpenAPIV3.ParameterObject>apiDocs.components.parameters[id];
} else {
return <OpenAPIV3.ParameterObject>parameter;
}
}
export function normalizeParameter(
ajv: Ajv,
parameter: OpenAPIV3.ParameterObject,
): {
name: string;
schema: OpenAPIV3.SchemaObject;
} {
let schema;
if (is$Ref(parameter)) {
schema = dereferenceSchema(ajv, parameter['$ref']);
} else if (parameter?.schema?.['$ref']) {
schema = dereferenceSchema(ajv, parameter.schema['$ref']);
} else {
schema = parameter.schema;
}
if (!schema && parameter.content) {
const contentType = Object.keys(parameter.content)[0];
schema = parameter.content?.[contentType]?.schema;
}
if (!schema) {
schema = parameter;
}
applyParameterStyle(parameter);
applyParameterExplode(parameter);
const name =
parameter.in === 'header' ? parameter.name.toLowerCase() : parameter.name;
return { name, schema };
}
function applyParameterStyle(param: OpenAPIV3.ParameterObject) {
if (!param.style) {
if (param.in === 'path') {
param.style = 'simple';
} else if (param.in === 'query') {
param.style = 'form';
} else if (param.style === 'header') {
param.style = 'simple';
} else if (param.style === 'cookie') {
param.style = 'form';
}
}
}
function | (param: OpenAPIV3.ParameterObject) {
if (param.explode == null) {
if (param.in === 'path') {
param.explode = false;
} else if (param.in === 'query') {
param.explode = true;
} else if (param.style === 'header') {
param.explode = false;
} else if (param.style === 'cookie') {
param.explode = true;
}
}
}
export function dereferenceSchema(ajv: Ajv, ref: string) {
// TODO cache schemas - so that we don't recurse every time
const derefSchema = ajv.getSchema(ref);
if (derefSchema?.['$ref']) {
return dereferenceSchema(ajv, '');
}
return derefSchema?.schema;
}
function is$Ref(
parameter: OpenAPIV3.ParameterObject | OpenAPIV3.ReferenceObject,
): boolean {
return parameter.hasOwnProperty('$ref');
}
| applyParameterExplode |
lib.rs | ///!
///! # http-client-provider
///! This library exposes the HTTP client capability to waSCC-compliant actors
mod http_client;
#[macro_use]
extern crate wascc_codec as codec;
#[macro_use]
extern crate log;
use codec::capabilities::{
CapabilityDescriptor, CapabilityProvider, Dispatcher, NullDispatcher, OperationDirection,
OP_GET_CAPABILITY_DESCRIPTOR,
};
use codec::core::{CapabilityConfiguration, OP_BIND_ACTOR, OP_REMOVE_ACTOR};
use codec::http::{Request, OP_PERFORM_REQUEST};
use codec::{deserialize, serialize, SYSTEM_ACTOR};
use std::collections::HashMap;
use std::error::Error;
use std::sync::{Arc, RwLock};
use std::time::Duration;
const CAPABILITY_ID: &str = "wascc:http_client";
const VERSION: &str = env!("CARGO_PKG_VERSION");
const REVISION: u32 = 0;
#[cfg(not(feature = "static_plugin"))]
capability_provider!(HttpClientProvider, HttpClientProvider::new);
/// An implementation HTTP client provider using reqwest.
pub struct HttpClientProvider {
dispatcher: Arc<RwLock<Box<dyn Dispatcher>>>,
clients: Arc<RwLock<HashMap<String, reqwest::Client>>>,
runtime: tokio::runtime::Runtime,
}
impl HttpClientProvider {
/// Create a new HTTP client provider.
pub fn new() -> Self {
Self::default()
}
/// Configure the HTTP client for a particular actor.
/// Each actor gets a dedicated client so that we can take advantage of connection pooling.
/// TODO: This needs to set things like timeouts, redirects, etc.
fn | (&self, config: CapabilityConfiguration) -> Result<Vec<u8>, Box<dyn Error>> {
let timeout = match config.values.get("timeout") {
Some(v) => {
let parsed: u64 = v.parse()?;
Duration::new(parsed, 0)
}
None => Duration::new(30, 0),
};
let redirect_policy = match config.values.get("max_redirects") {
Some(v) => {
let parsed: usize = v.parse()?;
reqwest::redirect::Policy::limited(parsed)
}
None => reqwest::redirect::Policy::default(),
};
self.clients.write().unwrap().insert(
config.module.clone(),
reqwest::Client::builder()
.timeout(timeout)
.redirect(redirect_policy)
.build()?,
);
Ok(vec![])
}
/// Clean up resources when a actor disconnects.
/// This removes the HTTP client associated with an actor.
fn deconfigure(&self, config: CapabilityConfiguration) -> Result<Vec<u8>, Box<dyn Error>> {
if self
.clients
.write()
.unwrap()
.remove(&config.module)
.is_none()
{
warn!(
"attempted to remove non-existent actor: {}",
config.module.as_str()
);
}
Ok(vec![])
}
/// Make a HTTP request.
fn request(&self, actor: &str, msg: Request) -> Result<Vec<u8>, Box<dyn Error>> {
let lock = self.clients.read().unwrap();
let client = lock.get(actor).unwrap();
self.runtime
.handle()
.block_on(async { http_client::request(&client, msg).await })
}
fn get_descriptor(&self) -> Result<Vec<u8>, Box<dyn Error>> {
use OperationDirection::ToProvider;
Ok(serialize(
CapabilityDescriptor::builder()
.id(CAPABILITY_ID)
.name("wasCC HTTP Client Provider")
.long_description("A http client provider")
.version(VERSION)
.revision(REVISION)
.with_operation(OP_PERFORM_REQUEST, ToProvider, "Perform a http request")
.build(),
)?)
}
}
impl Default for HttpClientProvider {
fn default() -> Self {
let _ = env_logger::builder().format_module_path(false).try_init();
let r = tokio::runtime::Builder::new()
.threaded_scheduler()
.enable_all()
.build()
.unwrap();
HttpClientProvider {
dispatcher: Arc::new(RwLock::new(Box::new(NullDispatcher::new()))),
clients: Arc::new(RwLock::new(HashMap::new())),
runtime: r,
}
}
}
/// Implements the CapabilityProvider interface.
impl CapabilityProvider for HttpClientProvider {
fn configure_dispatch(&self, dispatcher: Box<dyn Dispatcher>) -> Result<(), Box<dyn Error>> {
info!("Dispatcher configured");
let mut lock = self.dispatcher.write().unwrap();
*lock = dispatcher;
Ok(())
}
/// Handle all calls from actors.
fn handle_call(&self, actor: &str, op: &str, msg: &[u8]) -> Result<Vec<u8>, Box<dyn Error>> {
match op {
OP_BIND_ACTOR if actor == SYSTEM_ACTOR => self.configure(deserialize(msg)?),
OP_REMOVE_ACTOR if actor == SYSTEM_ACTOR => self.deconfigure(deserialize(msg)?),
OP_PERFORM_REQUEST => self.request(actor, deserialize(msg)?),
OP_GET_CAPABILITY_DESCRIPTOR => self.get_descriptor(),
_ => Err(format!("Unknown operation: {}", op).into()),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use codec::deserialize;
use codec::http::Response;
use mockito::mock;
#[test]
fn test_request() {
let _ = env_logger::try_init();
let request = Request {
method: "GET".to_string(),
path: mockito::server_url(),
header: HashMap::new(),
body: vec![],
query_string: String::new(),
};
let _m = mock("GET", "/")
.with_header("content-type", "text/plain")
.with_body("ohai")
.create();
let hp = HttpClientProvider::new();
hp.configure(CapabilityConfiguration {
module: "test".to_string(),
values: HashMap::new(),
})
.unwrap();
let result = hp.request("test", request).unwrap();
let response: Response = deserialize(result.as_slice()).unwrap();
assert_eq!(response.status_code, 200);
}
}
| configure |
role_command.go | // Copyright 2016 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package command
import (
"fmt"
"github.com/coreos/etcd/clientv3"
"github.com/spf13/cobra"
"golang.org/x/net/context"
)
var (
grantPermissionPrefix bool
permFromKey bool
)
// NewRoleCommand returns the cobra command for "role".
func NewRoleCommand() *cobra.Command {
ac := &cobra.Command{
Use: "role <subcommand>",
Short: "Role related commands",
}
ac.AddCommand(newRoleAddCommand())
ac.AddCommand(newRoleDeleteCommand())
ac.AddCommand(newRoleGetCommand())
ac.AddCommand(newRoleListCommand())
ac.AddCommand(newRoleGrantPermissionCommand())
ac.AddCommand(newRoleRevokePermissionCommand())
return ac
}
func newRoleAddCommand() *cobra.Command {
return &cobra.Command{
Use: "add <role name>",
Short: "Adds a new role",
Run: roleAddCommandFunc,
}
}
func newRoleDeleteCommand() *cobra.Command {
return &cobra.Command{
Use: "delete <role name>",
Short: "Deletes a role",
Run: roleDeleteCommandFunc,
}
}
func newRoleGetCommand() *cobra.Command {
return &cobra.Command{
Use: "get <role name>",
Short: "Gets detailed information of a role",
Run: roleGetCommandFunc,
}
}
func newRoleListCommand() *cobra.Command {
return &cobra.Command{
Use: "list",
Short: "Lists all roles",
Run: roleListCommandFunc,
}
}
func newRoleGrantPermissionCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "grant-permission [options] <role name> <permission type> <key> [endkey]",
Short: "Grants a key to a role",
Run: roleGrantPermissionCommandFunc,
}
cmd.Flags().BoolVar(&grantPermissionPrefix, "prefix", false, "grant a prefix permission")
cmd.Flags().BoolVar(&permFromKey, "from-key", false, "grant a permission of keys that are greater than or equal to the given key using byte compare")
return cmd
}
func newRoleRevokePermissionCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "revoke-permission <role name> <key> [endkey]",
Short: "Revokes a key from a role",
Run: roleRevokePermissionCommandFunc,
}
cmd.Flags().BoolVar(&permFromKey, "from-key", false, "grant a permission of keys that are greater than or equal to the given key using byte compare")
return cmd
}
// roleAddCommandFunc executes the "role add" command.
func roleAddCommandFunc(cmd *cobra.Command, args []string) {
if len(args) != 1 {
ExitWithError(ExitBadArgs, fmt.Errorf("role add command requires role name as its argument."))
}
resp, err := mustClientFromCmd(cmd).Auth.RoleAdd(context.TODO(), args[0])
if err != nil {
ExitWithError(ExitError, err)
}
display.RoleAdd(args[0], *resp)
}
// roleDeleteCommandFunc executes the "role delete" command.
func roleDeleteCommandFunc(cmd *cobra.Command, args []string) {
if len(args) != 1 {
ExitWithError(ExitBadArgs, fmt.Errorf("role delete command requires role name as its argument."))
}
resp, err := mustClientFromCmd(cmd).Auth.RoleDelete(context.TODO(), args[0])
if err != nil {
ExitWithError(ExitError, err)
}
display.RoleDelete(args[0], *resp)
}
// roleGetCommandFunc executes the "role get" command.
func roleGetCommandFunc(cmd *cobra.Command, args []string) {
if len(args) != 1 {
ExitWithError(ExitBadArgs, fmt.Errorf("role get command requires role name as its argument."))
}
name := args[0]
resp, err := mustClientFromCmd(cmd).Auth.RoleGet(context.TODO(), name)
if err != nil {
ExitWithError(ExitError, err)
}
display.RoleGet(name, *resp)
}
// roleListCommandFunc executes the "role list" command.
func roleListCommandFunc(cmd *cobra.Command, args []string) {
if len(args) != 0 {
ExitWithError(ExitBadArgs, fmt.Errorf("role list command requires no arguments."))
}
resp, err := mustClientFromCmd(cmd).Auth.RoleList(context.TODO())
if err != nil {
ExitWithError(ExitError, err)
}
display.RoleList(*resp)
}
// roleGrantPermissionCommandFunc executes the "role grant-permission" command.
func roleGrantPermissionCommandFunc(cmd *cobra.Command, args []string) {
if len(args) < 3 {
ExitWithError(ExitBadArgs, fmt.Errorf("role grant command requires role name, permission type, and key [endkey] as its argument."))
}
perm, err := clientv3.StrToPermissionType(args[1])
if err != nil {
ExitWithError(ExitBadArgs, err)
}
rangeEnd := ""
if 4 <= len(args) {
if grantPermissionPrefix {
ExitWithError(ExitBadArgs, fmt.Errorf("don't pass both of --prefix option and range end to grant permission command"))
}
if permFromKey {
ExitWithError(ExitBadArgs, fmt.Errorf("don't pass both of --from-key option and range end to grant permission command"))
}
rangeEnd = args[3]
} else if grantPermissionPrefix {
if permFromKey {
ExitWithError(ExitBadArgs, fmt.Errorf("don't pass both of --from-key option and --prefix option to grant permission command"))
}
rangeEnd = clientv3.GetPrefixRangeEnd(args[2])
} else if permFromKey {
rangeEnd = "\x00"
}
resp, err := mustClientFromCmd(cmd).Auth.RoleGrantPermission(context.TODO(), args[0], args[2], rangeEnd, perm)
if err != nil {
ExitWithError(ExitError, err)
}
display.RoleGrantPermission(args[0], *resp)
}
// roleRevokePermissionCommandFunc executes the "role revoke-permission" command.
func | (cmd *cobra.Command, args []string) {
if len(args) < 2 {
ExitWithError(ExitBadArgs, fmt.Errorf("role revoke-permission command requires role name and key [endkey] as its argument."))
}
rangeEnd := ""
if 3 <= len(args) {
rangeEnd = args[2]
} else if permFromKey {
rangeEnd = "\x00"
}
resp, err := mustClientFromCmd(cmd).Auth.RoleRevokePermission(context.TODO(), args[0], args[1], rangeEnd)
if err != nil {
ExitWithError(ExitError, err)
}
display.RoleRevokePermission(args[0], args[1], rangeEnd, *resp)
}
| roleRevokePermissionCommandFunc |
mod.rs | //! BLAKE2 family
//!
//! # General info
//!
//! | Name | Digest size | Block size | Rounds | Structure | Reference |
//! | ----------- | ----------: | ---------: | -----: | -------------------- | ----------------------- |
//! | BLAKE2s-128 | 128 bits | 512 bits | 10 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//! | BLAKE2s-160 | 160 bits | 512 bits | 10 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//! | BLAKE2s-224 | 224 bits | 512 bits | 10 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//! | BLAKE2s-256 | 256 bits | 512 bits | 10 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//! | BLAKE2b-160 | 160 bits | 1024 bits | 12 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//! | BLAKE2b-256 | 256 bits | 1024 bits | 12 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//! | BLAKE2b-384 | 384 bits | 1024 bits | 12 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//! | BLAKE2b-512 | 512 bits | 1024 bits | 12 | [Merkle–Damgård][md] | [FIPS 180-4][fips180-4] |
//!
//! [md]: https://en.wikipedia.org/wiki/Merkle%E2%80%93Damg%C3%A5rd_construction
#![allow(doc_markdown)]
use core::marker::PhantomData;
use core::ptr;
use core::ops::Mul;
use static_buffer::{FixedBuffer64, FixedBuffer128, FixedBuf};
use byteorder::{ByteOrder, LittleEndian};
use generic_array::ArrayLength;
use typenum::uint::Unsigned;
use typenum::consts::{U8, U16, U20, U28, U32, U48, U64, U128};
use Digest;
use wrapping::*;
const BLAKE2S_INIT: [w32; 8] = [W(0x6a09e667),
W(0xbb67ae85),
W(0x3c6ef372),
W(0xa54ff53a),
W(0x510e527f),
W(0x9b05688c),
W(0x1f83d9ab),
W(0x5be0cd19)];
const BLAKE2B_INIT: [w64; 8] = [W(0x6a09e667f3bcc908),
W(0xbb67ae8584caa73b),
W(0x3c6ef372fe94f82b),
W(0xa54ff53a5f1d36f1),
W(0x510e527fade682d1),
W(0x9b05688c2b3e6c1f),
W(0x1f83d9abfb41bd6b),
W(0x5be0cd19137e2179)];
const SIGMA: [[usize; 16]; 12] = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
[14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3],
[11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4],
[7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8],
[9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13],
[2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9],
[12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11],
[13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10],
[6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5],
[10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0],
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
[14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3]];
macro_rules! G {
($v:ident, $a:expr, $b:expr, $c:expr, $d:expr, $x:expr, $y:expr) => {
$v[$a] += $v[$b] + $x;
$v[$d] = ($v[$d] ^ $v[$a]).rotate_right(R1);
$v[$c] += $v[$d];
$v[$b] = ($v[$b] ^ $v[$c]).rotate_right(R2);
$v[$a] += $v[$b] + $y;
$v[$d] = ($v[$d] ^ $v[$a]).rotate_right(R3);
$v[$c] += $v[$d];
$v[$b] = ($v[$b] ^ $v[$c]).rotate_right(R4);
}
}
#[derive(Copy, Clone, Debug)]
struct State<Word: Copy> {
h: [W<Word>; 8],
}
macro_rules! blake2_state {
($word:ty, $read:ident, $init:ident, $rounds:expr, $r1:expr, $r2:expr, $r3:expr, $r4:expr) => {
impl State<$word> {
fn new(key_size: u8, size: u8) -> Self {
let mut state = $init;
state[0] ^= W(0x01010000 ^ ((key_size as $word) << 8) ^ (size as $word));
State { h: state }
}
#[inline]
fn compress(&mut self, input: &[u8], len: Length<$word>, last: bool) {
debug_assert!(input.len() % 16 == 0);
const R1: u32 = $r1;
const R2: u32 = $r2;
const R3: u32 = $r3;
const R4: u32 = $r4;
let mut message = [W(0); 16];
for (word, chunk) in message.iter_mut().zip(input.chunks(input.len() / 16)) {
*word = W(LittleEndian::$read(chunk));
}
let mut v = [W(0); 16];
for (v, state) in v.iter_mut().zip(self.h.iter().chain(&$init)) {
*v = *state;
}
v[12].0 ^= len.0.to_le();
v[13].0 ^= len.1.to_le();
if last {
v[14] = !v[14];
}
for sigma in &SIGMA[..$rounds] {
G!(v, 0, 4, 8, 12, message[sigma[0]], message[sigma[1]]);
G!(v, 1, 5, 9, 13, message[sigma[2]], message[sigma[3]]);
G!(v, 2, 6, 10, 14, message[sigma[4]], message[sigma[5]]);
G!(v, 3, 7, 11, 15, message[sigma[6]], message[sigma[7]]);
G!(v, 0, 5, 10, 15, message[sigma[8]], message[sigma[9]]);
G!(v, 1, 6, 11, 12, message[sigma[10]], message[sigma[11]]);
G!(v, 2, 7, 8, 13, message[sigma[12]], message[sigma[13]]);
G!(v, 3, 4, 9, 14, message[sigma[14]], message[sigma[15]]);
}
let (head, tail) = v.split_at(8);
let vs = head.iter().zip(tail);
for (h, (&v1, &v2)) in self.h.iter_mut().zip(vs) {
*h ^= v1 ^ v2;
}
}
}
}
}
blake2_state!(u32, read_u32, BLAKE2S_INIT, 10, 16, 12, 8, 7);
blake2_state!(u64, read_u64, BLAKE2B_INIT, 12, 32, 24, 16, 63);
#[derive(Copy, Clone, Debug)]
struct Length<T>(T, T);
impl Length<u32> {
fn increment(&mut self, val | {
self.0 = self.0.wrapping_add(val as u32);
}
}
impl Length<u64> {
fn increment(&mut self, val: usize) {
self.0 = self.0.wrapping_add(val as u64);
}
}
macro_rules! blake2 {
($(#[$attr:meta])* struct $name:ident<$word:ty>, $buf:ty, $bsize:ty, $wsize: expr) => {
#[derive(Clone)]
$(#[$attr])*
pub struct $name<Size: Unsigned + Clone> {
state: State<$word>,
len: Length<$word>,
buffer: $buf,
_phantom: PhantomData<Size>,
}
impl<Size> $name<Size>
where Size: Unsigned + Clone + ArrayLength<u8> + Mul<U8>,
<Size as Mul<U8>>::Output: ArrayLength<u8>
{
/// Default
pub fn default() -> Self {
Self::with_key(&[])
}
/// Initialize BLAKE2 hash function with custom key
pub fn with_key<K: AsRef<[u8]>>(key: K) -> Self {
let key = key.as_ref();
assert!(key.len() <= $wsize);
let mut ret = $name {
state: State::<$word>::new(key.len() as u8, Size::to_u8()),
len: Length(0, 0),
buffer: <$buf>::new(),
_phantom: PhantomData
};
if !key.is_empty() {
ret.update(key);
ret.buffer.zero_until(<$buf>::size());
}
ret
}
}
impl<Size> Digest for $name<Size>
where Size: ArrayLength<u8> + Mul<U8> + Clone,
<Size as Mul<U8>>::Output: ArrayLength<u8>
{
type OutputBits = <Self::OutputBytes as Mul<U8>>::Output;
type OutputBytes = Size;
type BlockSize = $bsize;
fn update<T: AsRef<[u8]>>(&mut self, input: T) {
let input = input.as_ref();
let state = &mut self.state;
let len = &mut self.len;
self.buffer.input(input, |d| {
len.increment(d.len());
state.compress(d, *len, false);
})
}
fn result<T: AsMut<[u8]>>(mut self, mut out: T) {
let rest = self.buffer.position();
self.len.increment(rest);
self.buffer.zero_until(<$buf>::size());
self.state.compress(self.buffer.full_buffer(), self.len, true);
let mut out = out.as_mut();
assert!(out.len() >= Self::output_bytes());
unsafe {
ptr::copy_nonoverlapping(self.state.h.as_ptr() as *const u8,
out.as_mut_ptr(),
Self::output_bytes())
}
}
}
}
}
blake2! {
/// General BLAKE2s implementation
struct Blake2s<u32>, FixedBuffer64, U64, 32
}
blake2! {
/// General BLAKE2b implementation
struct Blake2b<u64>, FixedBuffer128, U128, 64
}
/// BLAKE2s-128 implementation
pub type Blake2s128 = Blake2s<U16>;
/// BLAKE2s-160 implementation
pub type Blake2s160 = Blake2s<U20>;
/// BLAKE2s-224 implementation
pub type Blake2s224 = Blake2s<U28>;
/// BLAKE2s-256 implementation
pub type Blake2s256 = Blake2s<U32>;
/// BLAKE2b-160 implementation
pub type Blake2b160 = Blake2b<U20>;
/// BLAKE2b-256 implementation
pub type Blake2b256 = Blake2b<U32>;
/// BLAKE2b-384 implementation
pub type Blake2b384 = Blake2b<U48>;
/// BLAKE2b-512 implementation
pub type Blake2b512 = Blake2b<U64>;
| : usize) |
events.rs | use bevy::prelude::*;
use heron::prelude::*;
const SPEED: f32 = 300.0;
#[derive(PhysicsLayer)]
enum Layer {
Enemy,
Player,
}
struct Player;
fn main() {
App::build()
.add_plugins(DefaultPlugins)
.add_plugin(PhysicsPlugin::default())
.add_startup_system(spawn_camera.system())
.add_startup_system(spawn_player.system())
.add_startup_system(spawn_enemy.system())
.add_system(handle_input.system())
.add_system(log_collisions.system())
.add_system(kill_enemy.system())
.run();
}
// ANCHOR: log-collisions
fn log_collisions(mut events: EventReader<CollisionEvent>) {
for event in events.iter() {
match event {
CollisionEvent::Started(d1, d2) => {
println!("Collision started between {:?} and {:?}", d1, d2)
}
CollisionEvent::Stopped(d1, d2) => {
println!("Collision stopped between {:?} and {:?}", d1, d2)
}
}
}
}
// ANCHOR_END: log-collisions
// ANCHOR: kill-enemy
fn kill_enemy(mut commands: Commands, mut events: EventReader<CollisionEvent>) {
events
.iter()
// We care about when the entities "start" to collide
.filter(|e| e.is_started())
.filter_map(|event| {
let (entity_1, entity_2) = event.rigid_body_entities();
let (layers_1, layers_2) = event.collision_layers();
if is_player(layers_1) && is_enemy(layers_2) {
Some(entity_2)
} else if is_player(layers_2) && is_enemy(layers_1) {
Some(entity_1)
} else {
// This event is not the collision between an enemy and the player. We can ignore it.
None
}
})
.for_each(|enemy_entity| commands.entity(enemy_entity).despawn());
} | // Note: We check both layers each time to avoid a false-positive
// that can occur if an entity has the default (unconfigured) `CollisionLayers`
fn is_player(layers: CollisionLayers) -> bool {
layers.contains_group(Layer::Player) && !layers.contains_group(Layer::Enemy)
}
fn is_enemy(layers: CollisionLayers) -> bool {
!layers.contains_group(Layer::Player) && layers.contains_group(Layer::Enemy)
}
// ANCHOR_END: kill-enemy
fn spawn_player(mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>) {
let size = Vec2::new(30.0, 30.0);
commands
.spawn_bundle(SpriteBundle {
sprite: Sprite::new(size),
material: materials.add(Color::GREEN.into()),
transform: Transform::from_translation(Vec3::new(-400.0, 200.0, 0.0)),
..Default::default()
})
.insert(Player)
.insert(RigidBody::Dynamic)
.insert(CollisionShape::Cuboid {
half_extends: size.extend(0.0) / 2.0,
})
.insert(Velocity::default())
.insert(CollisionLayers::new(Layer::Player, Layer::Enemy));
}
fn spawn_enemy(mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>) {
let size = Vec2::new(30.0, 30.0);
commands
.spawn_bundle(SpriteBundle {
sprite: Sprite::new(size),
material: materials.add(Color::RED.into()),
transform: Transform::from_translation(Vec3::new(0.0, 200.0, 0.0)),
..Default::default()
})
.insert(RigidBody::Static)
.insert(CollisionShape::Cuboid {
half_extends: size.extend(0.0) / 2.0,
})
.insert(CollisionLayers::new(Layer::Enemy, Layer::Player));
}
fn spawn_camera(mut commands: Commands) {
commands.spawn_bundle(OrthographicCameraBundle::new_2d());
}
fn handle_input(input: Res<Input<KeyCode>>, mut players: Query<&mut Velocity, With<Player>>) {
let x = if input.pressed(KeyCode::Left) {
-1.0
} else if input.pressed(KeyCode::Right) {
1.0
} else {
0.0
};
let y = if input.pressed(KeyCode::Down) {
-1.0
} else if input.pressed(KeyCode::Up) {
1.0
} else {
0.0
};
let target_velocity = Vec2::new(x, y).normalize_or_zero().extend(0.0) * SPEED;
for mut velocity in players.iter_mut() {
velocity.linear = target_velocity;
}
} | |
btc_lottery.rs | use super::{account_id_from_hex, TransactionError, TransactionResult};
use crate::chain;
use crate::contracts::{self, AccountId};
use std::{
collections::{
btree_map::Entry::{Occupied, Vacant},
BTreeMap,
},
str::FromStr,
string::ToString,
};
use anyhow::Result;
use lazy_static;
use log::error;
use parity_scale_codec::{Decode, Encode};
use phala_mq::{MessageOrigin, Sr25519MessageChannel as MessageChannel};
use rand::{rngs::StdRng, seq::IteratorRandom, SeedableRng};
use sp_core::{crypto::Pair, hashing::blake2_256, sr25519, U256};
use sp_runtime_interface::pass_by::PassByInner as _;
use bitcoin;
use bitcoin::blockdata::script::Builder;
use bitcoin::blockdata::transaction::{OutPoint, SigHashType, TxIn, TxOut};
use bitcoin::consensus::encode::serialize;
use bitcoin::network::constants::Network;
use bitcoin::secp256k1::{All, Secp256k1, Signature};
use bitcoin::util::bip32::ExtendedPrivKey;
use bitcoin::{Address, PrivateKey, PublicKey, Script, Transaction, Txid as BtcTxid};
use bitcoin_hashes::Hash as _;
use phala_types::messaging::{
Lottery, LotteryCommand as Command, LotteryPalletCommand, LotteryUserCommand, Txid,
};
use super::NativeContext;
type SequenceType = u64;
const ALICE: &str = "d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d";
const RBF: u32 = 0xffffffff - 2;
lazy_static! {
// 10000...000, used to tell if this is a NFT
static ref TYPE_NF_BIT: U256 = U256::from(1) << 255;
}
pub struct BtcLottery {
round_id: u32,
token_set: BTreeMap<u32, Vec<String>>,
lottery_set: BTreeMap<u32, BTreeMap<String, PrivateKey>>,
tx_set: Vec<Vec<u8>>,
sequence: SequenceType, // Starting from zero
secret: Option<sr25519::Pair>, // TODO: replace it with a seed.
/// round_id => (txid, vout, amount)?
utxo: BTreeMap<u32, BTreeMap<Address, (Txid, u32, u64)>>,
admin: AccountId,
}
impl core::fmt::Debug for BtcLottery {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "Hi")
}
}
#[derive(Encode, Decode, Debug)]
pub enum Error {
InvalidRequest,
}
#[derive(Encode, Decode, Debug, Clone)]
pub enum Request {
GetAllRounds,
GetRoundInfo { round_id: u32 },
GetRoundAddress { round_id: u32 },
QueryUtxo { round_id: u32 },
GetSignedTx { round_id: u32 },
}
type AddressString = String;
#[derive(Encode, Decode, Debug)]
pub enum Response {
GetAllRounds {
round_id: u32,
},
GetRoundInfo {
token_number: u32,
winner_count: u32,
},
GetRoundAddress {
prize_addr: Vec<String>,
},
QueryUtxo {
utxo: Vec<(AddressString, (Txid, u32, u64))>,
},
GetSignedTx {
tx_set: Vec<Vec<u8>>,
},
PendingLotteryEgress {
length: u64,
lottery_queue_b64: String,
},
Error(Error),
}
impl BtcLottery {
/// Initializes the contract
pub fn new(secret: Option<sr25519::Pair>) -> Self {
let token_set = BTreeMap::<u32, Vec<String>>::new();
let lottery_set = BTreeMap::<u32, BTreeMap<String, PrivateKey>>::new();
let utxo = BTreeMap::<u32, BTreeMap<Address, (Txid, u32, u64)>>::new();
let admin = account_id_from_hex(ALICE).expect("Bad initial admin hex");
BtcLottery {
round_id: 0,
token_set,
lottery_set,
tx_set: Vec::new(),
sequence: 0,
secret,
utxo,
admin,
}
}
pub fn sign(
secp: &Secp256k1<All>,
digest: &[u8],
key: &PrivateKey,
) -> Result<Signature, bitcoin::secp256k1::Error> {
Ok(secp.sign(&bitcoin::secp256k1::Message::from_slice(digest)?, &key.key))
}
fn check_secret_key(&self) -> bool {
if self.secret.is_none() {
error!("Empty secret key");
false
} else {
true
}
}
pub fn new_round(
&mut self,
mq: &MessageChannel,
round_id: u32,
total_count: u32,
winner_count: u32,
) {
info!("new_round({}, {}, {})", round_id, total_count, winner_count);
if !self.check_secret_key() {
return;
}
if !self.token_set.contains_key(&round_id) && !self.lottery_set.contains_key(&round_id) {
let _sequence = self.sequence;
let secret = self.secret.as_ref().expect("Key is checked; qed.");
let token_round_id: U256 = U256::from(round_id) << 128;
let mut round_token = Vec::new();
for token_no in 1..=total_count {
let nft_id = (token_round_id + token_no) | *TYPE_NF_BIT;
let token_id = format!("{:#x}", nft_id);
round_token.push(token_id);
}
info!("new_round: n round_token: {}", round_token.len());
let mut lottery_token = BTreeMap::<String, PrivateKey>::new();
let raw_seed = blake2_256(&Encode::encode(&(secret.to_raw_vec(), round_id)));
let mut r: StdRng = SeedableRng::from_seed(raw_seed);
let sample = round_token
.iter()
.choose_multiple(&mut r, winner_count as usize);
info!("new_round: n sampled: {}", sample.len());
let mut address_set = Vec::new();
let mut salt = round_id * 10000;
for winner_id in sample {
let raw_data = (raw_seed, salt);
let seed = blake2_256(&Encode::encode(&raw_data));
let sk = match ExtendedPrivKey::new_master(Network::Bitcoin, &seed) {
Ok(e) => e.private_key,
Err(_err) => {
error!(
"LotteryNewRound: cannot create a new secret key from the seed: {:?}",
&seed
);
return;
}
};
let secp = Secp256k1::new();
let public_key = PublicKey::from_private_key(&secp, &sk);
let prize_addr = Address::p2pkh(&public_key, Network::Bitcoin);
address_set.push(prize_addr.to_string().as_bytes().to_vec());
lottery_token.insert(String::from(winner_id), sk);
salt += 1;
}
self.lottery_set.insert(round_id, lottery_token);
self.token_set.insert(round_id, round_token);
self.round_id = round_id;
mq.send(&Lottery::BtcAddresses { address_set });
} else {
error!("Round {} has already started", round_id);
}
}
pub fn open_lottery(
&mut self,
mq: &MessageChannel,
round_id: u32,
token_no: u32,
btc_address: Vec<u8>,
) {
if !self.check_secret_key() {
return;
}
if self.lottery_set.contains_key(&round_id) && self.utxo.contains_key(&round_id) {
let token_round_id: U256 = U256::from(round_id) << 128;
let nft_id = (token_round_id + token_no) | *TYPE_NF_BIT;
let token_id = format!("{:#x}", nft_id);
// from Vec<u8> to String
let btc_address = match String::from_utf8(btc_address.clone()) {
Ok(e) => e,
Err(_err) => {
error!(
"LotteryOpenBox: cannot convert btc_address to String: {:?}",
&btc_address
);
return;
}
};
let target = match Address::from_str(&btc_address) {
Ok(e) => e,
Err(_error) => {
error!(
"LotteryOpenBox: cannot convert btc_address to Address: {:?}",
&btc_address
);
return;
}
};
let data = if !self
.lottery_set
.get(&round_id)
.expect("round_id is known in the lottery_set; qed")
.contains_key(&token_id)
{
Lottery::SignedTx {
round_id,
token_id: token_id.as_bytes().to_vec(),
tx: Vec::new(),
}
} else {
let secp = Secp256k1::new();
let private_key: PrivateKey = *self
.lottery_set
.get(&round_id)
.expect("round_id is known in the lottery_set; qed")
.get(&token_id)
.expect("token_id is known in the lottery_set; qed");
let public_key = PublicKey::from_private_key(&secp, &private_key);
let prize_addr = Address::p2pkh(&public_key, Network::Bitcoin);
let round_utxo = self
.utxo
.get(&round_id)
.expect("round_id is known in the utxo; qed");
let (txid, vout, amount) = round_utxo
.get(&prize_addr)
.expect("address is known in the utxo; qed");
let mut tx = Transaction {
input: vec![TxIn {
previous_output: OutPoint {
txid: BtcTxid::from_inner(*txid),
vout: *vout,
},
sequence: RBF,
witness: Vec::new(),
script_sig: Script::new(),
}],
// TODO: deal with fee
output: vec![TxOut {
script_pubkey: target.script_pubkey(),
value: *amount,
}],
lock_time: 0,
version: 2,
};
let sighash =
tx.signature_hash(0, &prize_addr.script_pubkey(), SigHashType::All.as_u32());
let secp_sign: Secp256k1<All> = Secp256k1::<All>::new();
let tx_sign = match Self::sign(&secp_sign, &sighash[..], &private_key) {
Ok(e) => e.serialize_der(),
Err(err) => {
error!(
"LotteryOpenBox: the signing of the tx meets some problems:{}",
err
);
return;
}
};
let mut with_hashtype = tx_sign.to_vec();
with_hashtype.push(SigHashType::All.as_u32() as u8);
tx.input[0].script_sig = Builder::new()
.push_slice(with_hashtype.as_slice())
.push_slice(public_key.to_bytes().as_slice())
.into_script();
tx.input[0].witness.clear();
let tx_bytes = serialize(&tx);
self.tx_set.push(tx_bytes.clone());
Lottery::SignedTx {
round_id,
token_id: token_id.as_bytes().to_vec(),
tx: tx_bytes,
}
};
mq.send(&data);
} else {
error!("Round {} has already started", round_id);
}
}
}
impl contracts::NativeContract for BtcLottery {
type Cmd = Command;
type QReq = Request;
type QResp = Response;
// Returns the contract id
fn id(&self) -> contracts::ContractId32 {
contracts::BTC_LOTTERY
}
fn handle_command(
&mut self,
context: &NativeContext,
origin: MessageOrigin,
cmd: Self::Cmd,
) -> TransactionResult {
match cmd {
Command::PalletCommand(cmd) => self.handle_pallet_command(context, origin, cmd),
Command::UserCommand(cmd) => self.handle_user_command(context, origin, cmd),
}
}
fn handle_query(&mut self, _origin: Option<&chain::AccountId>, req: Request) -> Response {
match req {
Request::GetAllRounds => Response::GetAllRounds {
round_id: self.round_id,
},
Request::GetRoundInfo { round_id } => {
if self.token_set.contains_key(&round_id)
&& self.lottery_set.contains_key(&round_id)
{
let token_number = self
.token_set
.get(&round_id)
.expect("round_id is known in the token_set; qed")
.len();
let winner_count = self
.lottery_set
.get(&round_id)
.expect("round_id is known in the lottery_set; qed")
.len();
Response::GetRoundInfo {
token_number: token_number as u32,
winner_count: winner_count as u32,
}
} else {
Response::Error(Error::InvalidRequest)
}
}
Request::GetRoundAddress { round_id } => {
if self.lottery_set.contains_key(&round_id) {
let temp = self
.lottery_set
.get(&round_id)
.expect("round_id is known in the lottery_set; qed");
let mut address_set = Vec::new();
for (_, private_key) in temp.iter() {
let secp = Secp256k1::new();
let public_key = PublicKey::from_private_key(&secp, private_key);
let prize_addr = Address::p2pkh(&public_key, Network::Bitcoin);
address_set.push(prize_addr.to_string());
}
Response::GetRoundAddress {
prize_addr: address_set,
}
} else {
Response::Error(Error::InvalidRequest)
}
}
Request::QueryUtxo { round_id } => {
if self.utxo.contains_key(&round_id) {
let utxo = self
.utxo
.get(&round_id)
.expect("round_id is known in the utxo set; qed")
.iter()
.map(|(addr, utxo)| (addr.to_string(), *utxo))
.collect();
Response::QueryUtxo { utxo }
} else {
Response::Error(Error::InvalidRequest)
}
}
Request::GetSignedTx { round_id: _ } => Response::GetSignedTx {
tx_set: self.tx_set.clone(),
},
}
}
}
impl BtcLottery {
fn | (
&mut self,
_context: &NativeContext,
origin: MessageOrigin,
cmd: LotteryUserCommand,
) -> TransactionResult {
let origin: chain::AccountId = match origin {
MessageOrigin::AccountId(id) => (*id.inner()).into(),
_ => return Err(TransactionError::BadOrigin),
};
match cmd {
LotteryUserCommand::SubmitUtxo {
round_id,
address,
utxo,
} => {
let sender = origin;
let btc_address = match Address::from_str(&address) {
Ok(e) => e,
Err(_) => return Err(TransactionError::BadCommand),
};
if self.admin == sender {
let round_utxo = match self.utxo.entry(round_id) {
Occupied(_entry) => return Err(TransactionError::BadCommand),
Vacant(entry) => entry.insert(Default::default()),
};
round_utxo.insert(btc_address, utxo);
}
Ok(())
}
LotteryUserCommand::SetAdmin { new_admin } => {
// TODO: listen to some specific privileged account instead of ALICE
let sender = origin;
if let Ok(new_admin) = account_id_from_hex(&new_admin) {
if self.admin == sender {
self.admin = new_admin;
}
Ok(())
} else {
Err(TransactionError::InvalidAccount)
}
}
}
}
fn handle_pallet_command(
&mut self,
context: &NativeContext,
origin: MessageOrigin,
ce: LotteryPalletCommand,
) -> TransactionResult {
if !origin.is_pallet() {
error!("Received trasfer event from invalid origin: {:?}", origin);
return Err(TransactionError::BadOrigin);
}
info!("Received trasfer event from {:?}", origin);
match ce {
LotteryPalletCommand::NewRound {
round_id,
total_count,
winner_count,
} => Self::new_round(self, context.mq(), round_id, total_count, winner_count),
LotteryPalletCommand::OpenBox {
round_id,
token_id,
btc_address,
} => Self::open_lottery(self, context.mq(), round_id, token_id, btc_address),
}
Ok(())
}
}
| handle_user_command |
wrappers.go | package handlers
import (
"fmt"
"github.com/ds-test-framework/scheduler/testlib"
"github.com/ds-test-framework/scheduler/types"
)
type CountWrapper struct {
counterFunc func(*types.Event, *testlib.Context) (*testlib.Counter, bool)
}
func Count(label string) *CountWrapper {
return &CountWrapper{
counterFunc: func(_ *types.Event, c *testlib.Context) (*testlib.Counter, bool) {
return c.Vars.GetCounter(label)
},
}
}
func CountTo(label string) *CountWrapper {
return &CountWrapper{
counterFunc: func(e *types.Event, c *testlib.Context) (*testlib.Counter, bool) {
message, ok := c.GetMessage(e)
if !ok {
return nil, false
}
counter, ok := c.Vars.GetCounter(fmt.Sprintf("%s_%s", label, message.To))
if !ok {
return nil, false
}
return counter, true
},
}
}
type SetWrapper struct {
setFunc func(*types.Event, *testlib.Context) (*types.MessageStore, bool)
}
func Set(label string) *SetWrapper {
return &SetWrapper{
setFunc: func(e *types.Event, c *testlib.Context) (*types.MessageStore, bool) {
return c.Vars.GetMessageSet(label)
},
}
}
func (s *SetWrapper) Count() *CountWrapper {
return &CountWrapper{
counterFunc: func(e *types.Event, c *testlib.Context) (*testlib.Counter, bool) {
set, ok := s.setFunc(e, c)
if !ok {
return nil, false
}
counter := testlib.NewCounter()
counter.SetValue(set.Size())
return counter, true
}, | }
} |
|
proxy_simple_test_case.py | from typing import List
from unittest import TestCase
from puma.attribute import copied
from puma.buffer import Publishable
from puma.runnable import CommandDrivenRunnable
from puma.runnable.decorator.run_in_child_scope import run_in_child_scope
from puma.scope_id import get_current_scope_id
from tests.runnable.proxy.proxy_test_case import ProxyTestCase, RunnableTestInterface
from tests.runnable.proxy.proxy_test_environment import ProxyTestEnvironment
from tests.runnable.proxy.proxy_test_helpers import CallResponse, HasMethodThatReturnsValue, SendsCallsToBufferImpl, from_scope_id
class SimpleProxyTestCase(ProxyTestCase): | def create_demo_interface(self, call_response_publishable: Publishable[CallResponse]) -> RunnableTestInterface:
return SendsCallsToBufferRunnable(SendsCallsToBufferImpl(call_response_publishable))
def perform_commands(self, test_case: TestCase, test_interface: HasMethodThatReturnsValue) -> None:
test_interface.no_args()
test_interface.one_arg(get_current_scope_id())
test_interface.two_args(get_current_scope_id(), "2")
value = test_interface.returns_value(get_current_scope_id(), 3)
test_interface.two_args(get_current_scope_id(), value)
def check_results(self, test_case: TestCase, proxy_test_env: ProxyTestEnvironment, commands: List[CallResponse]) -> None:
test_case.assertEqual(self._get_expected_command_count(), len(commands))
# Ensure the correct commands were called (can't verify first argument, as it is generated - it will be checked later)
test_case.assertEqual("no_args", commands[0].method_name)
test_case.assertEqual([], commands[0].args)
test_case.assertEqual("one_arg", commands[1].method_name)
test_case.assertEqual("two_args", commands[2].method_name)
test_case.assertEqual("2", commands[2].args[1])
test_case.assertEqual("returns_value", commands[3].method_name)
test_case.assertEqual(3, commands[3].args[1])
test_case.assertEqual("two_args", commands[4].method_name)
test_case.assertEqual(self._expected_result_value, commands[4].args[1])
# Ensure all commands ran in the same scope
command_run_scope_ids = set()
for c in commands:
command_run_scope_ids.add(c.scope_id)
test_case.assertEqual(1, len(command_run_scope_ids), f"Not all commands were run in the same scope - {command_run_scope_ids}")
# Ensure all commands called in the same scope
command_called_scope_ids = set()
for c in commands:
if len(c.args) > 0:
command_called_scope_ids.add(c.args[0])
test_case.assertEqual(1, len(command_called_scope_ids), f"Not all commands were called in the same scope - {command_called_scope_ids}")
command_called_scope = from_scope_id(command_called_scope_ids.pop())
command_run_scope = from_scope_id(command_run_scope_ids.pop())
# Ensure commands weren't called from or run in the main thread
main_thread_scope = from_scope_id(get_current_scope_id())
test_case.assertNotEqual(main_thread_scope, command_called_scope)
test_case.assertNotEqual(main_thread_scope, command_run_scope)
# Ensure commands were called from the expected scope
proxy_test_env.environment_verifier.verify(test_case, command_called_scope, command_run_scope)
def _get_expected_command_count(self) -> int:
return 5
class SendsCallsToBufferRunnable(CommandDrivenRunnable, RunnableTestInterface):
_wrapped_instance: HasMethodThatReturnsValue = copied("_wrapped_instance")
def __init__(self, wrapped_interface: HasMethodThatReturnsValue) -> None:
super().__init__(self.__class__.__name__, [])
self._wrapped_instance = wrapped_interface
@run_in_child_scope
def no_args(self) -> None:
self._wrapped_instance.no_args()
@run_in_child_scope
def one_arg(self, a: str) -> None:
self._wrapped_instance.one_arg(a)
@run_in_child_scope
def two_args(self, a: str, b: str) -> None:
self._wrapped_instance.two_args(a, b)
def returns_value(self, a: str, b: int) -> str:
self._in_child_returns_value(a, b)
return f"Called by {self.__class__.__name__}"
@run_in_child_scope
def _in_child_returns_value(self, a: str, b: int) -> None:
self._wrapped_instance.returns_value(a, b) |
def __init__(self, expected_result_value: str):
self._expected_result_value = expected_result_value
|
log.go | package main
import (
"net/http"
"os"
log "github.com/sirupsen/logrus"
)
// InitLogger configures log to output on os.Stdout and with
// json format
func InitLogger() {
log.SetFormatter(&log.JSONFormatter{
FieldMap: log.FieldMap{
log.FieldKeyTime: "timestamp",
log.FieldKeyMsg: "message",
},
})
log.SetOutput(os.Stdout)
}
// LogRequestFields returns a log.Fields with
// useful information taken from r.
func | (r *http.Request) log.Fields {
return log.Fields{
"method": r.Method,
"URI": r.RequestURI,
"upstream": r.Host,
"contract": r.Header.Get("X-Devroute"),
"matched-service": r.Header.Get("X-Devroute-Matched"),
}
}
| LogRequestFields |
Subsets and Splits