file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
test_sync.py | from pych.extern import Chapel
@Chapel()
def ex_sync():
|
if __name__ == '__main__':
ex_sync()
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_sync():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use out.startswith
# ensure starts and ends with correct statements
startLoc = out.find('Starting!\n')
assert startLoc >= 0
# ensure contains all of the remainder
for i in xrange(1, 6):
lineLoc = out.find('#' + str(i) + ' line.\n')
assert lineLoc >= 0
assert lineLoc >= startLoc
assert out.endswith('DONE!\n')
| """
writeln("Starting!");
sync {
begin writeln("#1 line.");
begin writeln("#2 line.");
begin writeln("#3 line.");
begin writeln("#4 line.");
begin writeln("#5 line.");
}
writeln("DONE!");
"""
return None |
specificcardinality_builder.go | package tokens
import "errors"
type specificCardinalityBuilder struct {
amount *uint
rnge Range
}
func createSpecificCardinalityBuilder() SpecificCardinalityBuilder {
out := specificCardinalityBuilder{
amount: nil,
rnge: nil,
}
return &out
}
// Create initializes the builder
func (app *specificCardinalityBuilder) Create() SpecificCardinalityBuilder {
return createSpecificCardinalityBuilder()
}
// WithAmount adds an amount to the builder
func (app *specificCardinalityBuilder) WithAmount(amount uint) SpecificCardinalityBuilder {
app.amount = &amount
return app
}
// WithRange adds a range to the builder
func (app *specificCardinalityBuilder) WithRange(rnge Range) SpecificCardinalityBuilder {
app.rnge = rnge
return app
}
// Now builds a new SpecificCardinality instance
func (app *specificCardinalityBuilder) Now() (SpecificCardinality, error) {
if app.amount != nil {
return createSpecificCardinalityWithAmount(app.amount), nil
}
if app.rnge != nil |
return nil, errors.New("the SpecificCardinality instance is invalid")
}
| {
return createSpecificCardinalityWithRange(app.rnge), nil
} |
utils.py | # Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import tarfile
import tempfile
from distutils.version import StrictVersion
import requests
import six
from .. import errors
DEFAULT_HTTP_HOST = "127.0.0.1"
DEFAULT_UNIX_SOCKET = "http+unix://var/run/docker.sock"
def mkbuildcontext(dockerfile):
f = tempfile.NamedTemporaryFile()
t = tarfile.open(mode='w', fileobj=f)
if isinstance(dockerfile, io.StringIO):
dfinfo = tarfile.TarInfo('Dockerfile')
if six.PY3:
raise TypeError('Please use io.BytesIO to create in-memory '
'Dockerfiles with Python 3')
else:
dfinfo.size = len(dockerfile.getvalue())
elif isinstance(dockerfile, io.BytesIO):
dfinfo = tarfile.TarInfo('Dockerfile')
dfinfo.size = len(dockerfile.getvalue())
else:
dfinfo = t.gettarinfo(fileobj=dockerfile, arcname='Dockerfile')
t.addfile(dfinfo, dockerfile)
t.close()
f.seek(0)
return f
def tar(path):
f = tempfile.NamedTemporaryFile()
t = tarfile.open(mode='w', fileobj=f)
t.add(path, arcname='.')
t.close()
f.seek(0)
return f
def compare_version(v1, v2):
"""Compare docker versions
>>> v1 = '1.9'
>>> v2 = '1.10'
>>> compare_version(v1, v2)
1
>>> compare_version(v2, v1)
-1
>>> compare_version(v2, v2)
0
"""
s1 = StrictVersion(v1)
s2 = StrictVersion(v2)
if s1 == s2:
return 0
elif s1 > s2:
return -1
else:
return 1
def ping(url):
try:
res = requests.get(url)
except Exception:
return False
else:
return res.status_code < 400
def _convert_port_binding(binding):
result = {'HostIp': '', 'HostPort': ''}
if isinstance(binding, tuple):
if len(binding) == 2:
result['HostPort'] = binding[1]
result['HostIp'] = binding[0]
elif isinstance(binding[0], six.string_types):
result['HostIp'] = binding[0]
else:
result['HostPort'] = binding[0]
elif isinstance(binding, dict):
if 'HostPort' in binding:
result['HostPort'] = binding['HostPort']
if 'HostIp' in binding:
result['HostIp'] = binding['HostIp'] | result['HostPort'] = binding
if result['HostPort'] is None:
result['HostPort'] = ''
else:
result['HostPort'] = str(result['HostPort'])
return result
def convert_port_bindings(port_bindings):
result = {}
for k, v in six.iteritems(port_bindings):
key = str(k)
if '/' not in key:
key = key + '/tcp'
if isinstance(v, list):
result[key] = [_convert_port_binding(binding) for binding in v]
else:
result[key] = [_convert_port_binding(v)]
return result
def convert_volume_binds(binds):
result = []
for k, v in binds.items():
if isinstance(v, dict):
result.append('%s:%s:%s' % (
k, v['bind'], 'ro' if v.get('ro', False) else 'rw'
))
else:
result.append('%s:%s:rw' % (k, v))
return result
def parse_repository_tag(repo):
column_index = repo.rfind(':')
if column_index < 0:
return repo, None
tag = repo[column_index + 1:]
slash_index = tag.find('/')
if slash_index < 0:
return repo[:column_index], tag
return repo, None
# Based on utils.go:ParseHost http://tinyurl.com/nkahcfh
# fd:// protocol unsupported (for obvious reasons)
# Added support for http and https
# Protocol translation: tcp -> http, unix -> http+unix
def parse_host(addr):
proto = "http+unix"
host = DEFAULT_HTTP_HOST
port = None
if not addr or addr.strip() == 'unix://':
return DEFAULT_UNIX_SOCKET
addr = addr.strip()
if addr.startswith('http://'):
addr = addr.replace('http://', 'tcp://')
if addr.startswith('http+unix://'):
addr = addr.replace('http+unix://', 'unix://')
if addr == 'tcp://':
raise errors.DockerException("Invalid bind address format: %s" % addr)
elif addr.startswith('unix://'):
addr = addr[7:]
elif addr.startswith('tcp://'):
proto = "http"
addr = addr[6:]
elif addr.startswith('https://'):
proto = "https"
addr = addr[8:]
elif addr.startswith('fd://'):
raise errors.DockerException("fd protocol is not implemented")
else:
if "://" in addr:
raise errors.DockerException(
"Invalid bind address protocol: %s" % addr
)
proto = "http"
if proto != "http+unix" and ":" in addr:
host_parts = addr.split(':')
if len(host_parts) != 2:
raise errors.DockerException(
"Invalid bind address format: %s" % addr
)
if host_parts[0]:
host = host_parts[0]
try:
port = int(host_parts[1])
except Exception:
raise errors.DockerException(
"Invalid port: %s", addr
)
elif proto in ("http", "https") and ':' not in addr:
raise errors.DockerException("Bind address needs a port: %s" % addr)
else:
host = addr
if proto == "http+unix":
return "%s://%s" % (proto, host)
return "%s://%s:%d" % (proto, host, port) | else:
raise ValueError(binding)
else: |
test_interface.py | """
test the following:
model + scene understanding + interface
"""
import os
import sys
PATH = os.path.join(os.getcwd(), '..')
sys.path.append(PATH)
import cv2
from PyQt5.QtGui import QImage, QColor, QPixmap
from PyQt5.QtWidgets import QApplication
import qtmodern.styles
import qtmodern.windows
from layout import Layout
from pyqt_utils import convert_qimg
from scene_summary import get_names, create_grid, scene_summarize
from simplify_thread_utils import FrameStore, FrameThread
from obj_avoidance import run_avoidance
class SimplifyInteface(Layout):
def __init__(self):
super().__init__()
# setup for scene understanding
self.load_lightbulb()
self.mat = create_grid(h = 240, w = 427)
self.names = get_names()
self.init_thread()
def load_lightbulb(self):
RED_PATH = os.path.join('..', 'images', 'red.jpg')
GREEN_PATH = os.path.join('..', 'images', 'green.jpg')
assert os.path.isfile(RED_PATH), '[ERROR] Path not exist: {}'.format(RED_PATH)
assert os.path.isfile(GREEN_PATH), '[ERROR] Path not exist: {}'.format(GREEN_PATH)
red = cv2.imread(RED_PATH)
green = cv2.imread(GREEN_PATH)
self.red_qimg = convert_qimg(red, win_width = 50, win_height = 50)
self.green_qimg = convert_qimg(green, win_width = 50, win_height = 50)
def init_thread(self):
self.f_thread = FrameThread()
self.seg_names = self.f_thread.model_config.names
self.seg_colors = self.f_thread.model_config.colors
self.f_thread.frame_signal.connect(lambda frame_store: self.update_first_layer(frame_store))
self.f_thread.frame_signal.connect(lambda frame_store: self.update_second_layer(frame_store))
self.f_thread.frame_signal.connect(lambda frame_store: self.update_third_layer(frame_store))
self.f_thread.start()
def update_first_layer(self, frame_store):
"""
update different runtime and FPS
"""
self.model_time.setText('{0:.1f} ms'.format(frame_store.model_time * 1000))
self.fps_time.setText('{0:.1f}'.format(frame_store.fps_time))
def update_second_layer(self, frame_store):
"""
update segmentation result and scene summary
"""
# update segmentation result
qimg = convert_qimg(frame_store.pred_rgb, win_width = 620, win_height = 360)
self.seg_frame.setPixmap(QPixmap.fromImage(qimg))
# update scene summary
grid_dict = scene_summarize(frame_store.pred_idx,
self.mat, self.names,
threshold = 900)
self.update_scene_summary(grid_dict)
def update_scene_summary(self, grid_dict):
for i, obj_ls in grid_dict.items():
txt = ', '.join(obj_ls)
q_label = getattr(self, 'grid_{}'.format(i + 1))
q_label.setText(txt)
def update_third_layer(self, frame_store):
|
if __name__ == '__main__':
app = QApplication(sys.argv)
win = SimplifyInteface()
qtmodern.styles.dark(app)
win_modern = qtmodern.windows.ModernWindow(win)
win_modern.show()
sys.exit(app.exec_())
| obj_tup, obj_img = run_avoidance(frame_store.d1_img, frame_store.pred_idx)
qimg = convert_qimg(obj_img, win_width = 620, win_height = 360, is_gray = True)
# update frame on left
self.obj_frame.setPixmap(QPixmap.fromImage(qimg))
# update summary on right
if obj_tup[1] is None:
self.obj_name.setText('NA')
self.obj_dist.setText('NA')
self.lightbulb.setPixmap(QPixmap.fromImage(self.green_qimg))
else:
obj_name = self.names[obj_tup[1] + 1]
self.obj_name.setText(obj_name)
self.obj_dist.setText('{} m'.format(obj_tup[2]))
self.lightbulb.setPixmap(QPixmap.fromImage(self.red_qimg)) |
app.controller.js | // CONTROLLER
(function(){
var app = angular.module('Japri');
app.controller("HeaderCtrl", ["$scope", "MenuService", HeaderCtrl]);
function HeaderCtrl($scope, MenuService){
var vm = this;
vm.Menu = MenuService.GetMenu(SuccessLogoutCallback);
vm.ReloadMenu = ReloadMenu;
function ReloadMenu(){
vm.Menu = MenuService.GetMenu(SuccessLogoutCallback);
}
function SuccessLogoutCallback(){
Materialize.toast("You have been logged out", 5000);
}
}
app.controller("IndexCtrl", ["$scope", "$controller", "AnnyangService", IndexCtrl]);
function IndexCtrl($scope, $controller, AnnyangService){
var vm = this;
AnnyangService.Init();
$controller("LightCtrl", {$scope: $scope});
}
app.controller("TimeCtrl", ["$scope", "SessionService", "TimeService", TimeCtrl]);
function TimeCtrl($scope, SessionService, TimeService){
var name = SessionService.GetName() || "good looking";
var vm = this;
var info = TimeService.GetAllInfo();
vm.time = info.time;
vm.date = info.date;
vm.fullTime = info.fullTime;
vm.friendlyText = info.friendlyText;
GetInfo();
function GetInfo(){
var info = TimeService.GetAllInfo(name);
vm.time = info.time;
vm.date = info.date;
vm.fullTime = info.fullTime;
vm.friendlyText = info.friendlyText;
}
// Start Interval
vm.timeInterval = setInterval(function(){
GetInfo();
$scope.$apply();
}, 1000);
$scope.$on("$destroy", function(){
clearInterval(vm.timeInterval);
});
}
app.controller("WeatherCtrl", ["$scope", "WeatherResource", "TimeService", WeatherCtrl]);
function WeatherCtrl($scope, WeatherResource, TimeService){
var vm = this;
vm.iconColor = "white";
vm.loading = true;
vm.data = false;
vm.interval = false;
vm.refreshTime = 1000 * 60 * 30; // 30 Minutes
// Initial
GetWeather();
vm.interval = setInterval(GetWeather, vm.refreshTime);
function GetWeather(){
vm.loading = true;
return WeatherResource.getPrediction({position: "42.0258191,-93.6965875"}).$promise.then(function(success){
vm.loading = false;
vm.data = success.data;
var time = TimeService.GetAllInfo();
vm.lastUpdated = time.fullTime;
}, function(error){
vm.Error = "Unable to retrieve weather information";
});
}
$scope.$on("$destroy", function dismiss() {
clearInterval(vm.interval);
});
}
app.controller("BibleCtrl", ["$scope", "BibleResource", BibleCtrl]);
function BibleCtrl($scope, BibleResource){
var vm = this;
vm.loading = true;
vm.data = false;
vm.interval = false;
vm.refreshTime = 1000 * 60 * 60 * 4 // 4 Hour
// Initial
GetBibleVOTD();
vm.interval = setInterval(GetBibleVOTD, vm.refreshTime);
function GetBibleVOTD(){
vm.loading = true;
return BibleResource.getVotd().$promise.then(function(success){
vm.loading = false;
vm.data = success.data;
}, function(error){
vm.data = false;
});
}
$scope.$on("$destroy", function dismiss() {
clearInterval(vm.interval);
});
}
app.controller("NewsCtrl", ["$scope", "NewsResource", NewsCtrl]);
function NewsCtrl($scope, NewsResource){
var vm = this;
vm.loading = true;
vm.data = false;
vm.interval = false;
vm.refreshTime = 1000 * 60 * 60; // 1 Hour
//Initial
GetNews();
vm.interval = setInterval(GetNews, vm.refreshTime);
function GetNews(){
vm.loading = true;
return NewsResource.getTopStories({section: "national"}).$promise.then(function(success){
vm.loading = false;
vm.data = success.data;
}, function(error){
vm.error = "Unable to retrieve news";
});
}
$scope.$on("$destroy", function dismiss() {
clearInterval(vm.interval);
});
}
app.controller("NextbusCtrl", ["$scope", "NextbusResource", "TimeService", NextbusCtrl]);
function NextbusCtrl($scope, NextbusResource, TimeService){
var default_nextbus_agent = "cyride";
var colorMap =
{
1: "red",
2: "green",
3: "blue",
4: "grey darken-1",
5: "yellow darken-2",
6: "brown",
7: "purple",
8: "cyan",
10: "pink",
23: "orange"
}; // Matching it with what Materialize has
var vm = this;
vm.loading = true;
vm.interval = false;
vm.routes = false;
vm.stops = false;
vm.predictions = false;
vm.route = "6S"; // Default value route
vm.stop = "1092"; // Default value stop: Howe Hall
vm.busColor = colorMap[StripNonDigit(vm.route)];
vm.refreshTime = 1000 * 30; // 30 Seconds
vm.isArray = angular.isArray;
// Initial
if(vm.route && vm.stop){
Init();
}
function Init(){
GetNextbusRoute();
GetNextbusRouteConfig();
GetPredictions();
}
// Functions
vm.ChangeNextbusRoute = ChangeNextbusRoute;
vm.ChangeNextbusStop = ChangeNextbusStop;
function StripNonDigit(str){
return str.replace(/\D/g,'');
}
function GetNextbusRoute(){
vm.loading = true;
NextbusResource.getRoute({agentId: default_nextbus_agent}).$promise.then(function(success){
vm.routes = success.data.body.route;
vm.loading = false;
}, function(error){
vm.error = "Unable to retrieve Nextbus routes";
});
}
function GetNextbusRouteConfig(){
vm.loading = true;
NextbusResource.getRouteConfig({agentId: default_nextbus_agent, routeTag: vm.route}).$promise.then(function(success){
vm.stops = success.data.body.route.stop;
vm.loading = false;
}, function(error){
vm.error = "Unable to retrieve Nextbus stops";
});
}
function ChangeNextbusRoute(){
vm.stops = false;
vm.stop = "";
vm.predictions = false;
if(vm.route != ""){
vm.busColor = colorMap[StripNonDigit(vm.route)];
GetNextbusRouteConfig();
}
}
function ChangeNextbusStop(){
vm.predictions = false;
if(vm.stop != ""){
GetPredictions();
}
}
function GetPredictions(){
if(vm.route && vm.stop){
vm.loading = true;
NextbusResource.getPrediction({agentId: default_nextbus_agent, routeTag: vm.route, stopTag: vm.stop}).$promise.then(function(success){
var time = TimeService.GetAllInfo();
vm.lastUpdated = time.fullTime;
vm.predictions = success.data.body.predictions;
vm.loading = false;
}, function(error){
vm.error = "Unable to retrieve Nextbus predictions";
});
}
}
vm.interval = setInterval(GetPredictions, vm.refreshTime);
$scope.$on("$destroy", function dismiss() {
clearInterval(vm.interval);
});
}
app.controller("LoginCtrl", ["$scope", "AuthService", "AuthResource", LoginCtrl]);
function LoginCtrl($scope, AuthService, AuthResource){
var vm = this;
// Login form's Model
vm.model = {};
vm.Login = Login;
function Login(dashboard){
var model = (dashboard) ? false : vm.model;
return AuthService.Login(model, function(success){
Materialize.toast("Welcome back!", 5000);
delete vm.error;
}, function(error){
if(error.data.msg){
Materialize.toast(error.data.msg, 5000)
} else{
vm.error = "Unable to log in now. Please try again later.";
}
});
}
}
app.controller("UserCtrl", ["UserResource", "NFCResource", UserCtrl]);
function UserCtrl(UserResource, NFCResource){
var vm = this;
vm.usersTab = false;
vm.signupTab = true;
vm.model = {};
vm.respond = {};
vm.Signup = Signup;
vm.EditUser = EditUser;
vm.DeleteUser = DeleteUser;
LoadUsers();
LoadTags();
function EditUser(user){
return UserResource.edit({email: user.email,model: user.EditFormModel}).$promise.then(function(success){
user.name = success.user.name;
user.permission = success.user.permission;
user.tag = success.user.tag;
user.isEditing = false;
Materialize.toast("Successfully edited user", 5000);
}, function(error){
Materialize.toast(error.data.msg, 5000);
});
}
function DeleteUser(user){
return UserResource.delete({email: user.email}).$promise.then(function(success){
user.isDeleting = false;
delete user;
Materialize.toast("Successfully deleted user", 5000);
}, function(error){
Materialize.toast(error.data.msg, 5000);
});
}
function LoadUsers(){
UserResource.query().$promise.then(function(success){
vm.users = success;
angular.forEach(vm.users, function(value, key){
value.isEditing = false;
value.isDeleting = false;
if(value.tag){
NFCResource.get({tagId: value.tag}).$promise.then(function(success){
value.tag = success.data;
}, function(error){
Materialize.toast("Unable to retrieve Tag UID", 5000);
});
}
});
}, function(error){
Materialize.toast("Unable to retrieve users", 5000);
});
}
function LoadTags(){
NFCResource.query().$promise.then(function(success){
vm.Tags = success;
}, function(error){
});
}
function Signup(){
return UserResource.save({}, vm.model).$promise.then(function(res){
vm.respond.success = true;
vm.respond.message = "User has successfully been created";
}, function (err){
if(err.data.msg){
vm.respond.success = false;
vm.respond.message = err.data.msg;
}else{
vm.respond.success = false;
vm.respond.message = "User cannot be created. Please try again later";
}
});
}
}
app.controller("CamCtrl", ["$rootScope", "$scope", "$location", "SessionService", "SocketService", "CameraResource", CamCtrl]);
function CamCtrl($rootScope, $scope, $location, SessionService, SocketService, CameraResource){
var vm = this;
vm.IsDeveloper = SessionService.IsDeveloper();
vm.ToggleStreaming = ToggleStreaming;
vm.DevToggleStreaming = DevToggleStreaming;
vm.loadingLiveStream = true;
vm.isStreaming = true;
vm.isServerStreaming = false;
function ToggleStreaming(){
if(vm.isStreaming){
SocketService.off("liveReply");
} else{
SocketService.on("liveReply", function(data){
vm.liveStreamUri = "data:image/jpg;base64," + data;
});
}
vm.isStreaming = !vm.isStreaming;
}
function DevToggleStreaming(){
if(vm.isServerStreaming){
return CameraResource.save({action: "stop"}).$promise.then(function(success){
Materialize.toast("LiveStream has been stopped", 5000);
}, function(err){
Materialize.toast("Unable to turn off LiveStream", 5000);
});
} else{
return CameraResource.save({action: "start"}).$promise.then(function(success){
vm.imgLoaded = false;
Materialize.toast("LiveStream has been started", 5000);
}, function(err){
Materialize.toast("Unable to turn on LiveStream", 5000);
});
}
}
SocketService.emit("getCamStatus", {query:SessionService.GetToken()});
SocketService.on("liveReply", function(data){
vm.liveStreamUri = "data:image/jpg;base64," + data;
});
SocketService.on("liveStreamStatus", function(data){
vm.loadingLiveStream = false;
vm.isServerStreaming = data;
}); | $scope.$on("$destroy", function dismiss() {
SocketService.off("liveReply");
SocketService.off("liveStreamStatus");
});
}
app.controller("NFCCtrl", ["$scope", "SessionService", "SocketService", "NFCResource", NFCCtrl]);
function NFCCtrl($scope, SessionService, SocketService, NFCResource){
var vm = this;
vm.loadingNfc = true;
vm.detectedTags = [];
vm.isNFCOn = false;
vm.isReading = false;
vm.isWriting = false;
vm.isPolling = false;
vm.pollType = false;
vm.OpenWriteTab = OpenWriteTab;
vm.OpenReadTab = OpenReadTab;
vm.RegisterTag = RegisterTag;
vm.StopNFC = StopNFC;
vm.Poll = Poll;
vm.ReadTag = ReadTag;
vm.WriteTag = WriteTag;
SocketService.emit("getNFCStatus", {"query": SessionService.GetToken()}, function(){
});
SocketService.on("nfcStatus", function(data){
vm.loadingNfc = false;
vm.isNFCOn = data;
});
SocketService.on("nfcPollData", function(data){
if(data){
NFCResource.tag({tagUid: data.uid}).$promise.then(function(success){
vm.detectedTags.unshift({date: new Date(), tag: data.uid, registered: success.found});
}, function(error){
vm.detectedTags.unshift({date: new Date(), tag: data.uid, error: true});
});
}
});
function RegisterTag(uid){
return NFCResource.register({tagUid: uid}).$promise.then(function(success){
vm.registerTagInput = "";
Materialize.toast("NFC Tag with UID: "+uid+" has successfully been registered", 5000);
}, function(error){
Materialize.toast("NFC Tag with UID: "+uid+" cannot be registered", 5000);
});
}
function OpenWriteTab(){
vm.isWriting = true;
vm.isPolling = false;
vm.isReading = false;
}
function OpenReadTab(){
vm.isReading = true;
vm.isWriting = false;
vm.isPolling = false;
}
function StopNFC(){
return NFCResource.save({action: "stop"}).$promise.then(function(success){
Materialize.toast("NFC Reader has been stopped", 5000);
vm.isReading = false;
vm.isWriting = false;
vm.isPolling = true;
vm.pollType = false;
}, function(error){
Materialize.toast("Unable to turn off NFC Reader", 5000);
vm.isReading = false;
vm.isWriting = false;
vm.isPolling = true;
});
}
function Poll(type){
return NFCResource.save({action: "poll", type: type}).$promise.then(function(success){
vm.pollType = (type == "authorizeDoorOpener") ? "Authenticating door" : "Polling";
Materialize.toast("NFC Reader starts polling", 5000);
}, function(error){
Materialize.toast("Unable to start NFC Reader", 5000);
});
}
function ReadTag(){
vm.IsWaiting = true;
return NFCResource.save({action: "read"}).$promise.then(function(success){
Materialize.toast("NFC Reader is waiting for an NFC Tag", 5000);
vm.IsWaiting = false;
}, function(error){
if(error.data.timeout){
Materialize.toast("Timeout: NFC Reader does not detect an NFC Tag", 5000);
} else{
Materialize.toast("There is an error in reading your NFC Tag", 5000);
}
vm.IsWaiting = false;
});
}
function WriteTag(message){
vm.IsWaiting = true;
return NFCResource.save({action: "write", data: message}).$promise.then(function(success){
Materialize.toast("NFC Reader is waiting for an NFC Tag", 5000);
vm.IsWaiting = false;
}, function(error){
if(error.data.timeout){
Materialize.toast("Timeout: NFC Reader does not detect an NFC Tag", 5000);
} else{
Materialize.toast("There is an error in writing your NFC Tag", 5000);
}
vm.IsWaiting = false;
});
}
// Always have this in Controller that uses SocketService, to disconnect user from Server's Socket.IO on leaving the page that has this controller
$scope.$on("$destroy", function dismiss() {
SocketService.off("nfcStatus");
SocketService.off("nfcPollData");
});
}
app.controller("DoorCtrl", ["$rootScope", "$scope", "SessionService", "SocketService", "ServoResource", "AnnyangService", DoorCtrl]);
function DoorCtrl($rootScope, $scope, SessionService, SocketService, ServoResource, AnnyangService){
var vm = this;
vm.loadingServoStatus = true;
vm.isBusy = true;
vm.OpenDoor = OpenDoor;
function OpenDoor(){
return ServoResource.save({action: "open-door"}).$promise.then(function(success){
Materialize.toast("Door has been opened", 5000);
}, function (error){
Materialize.toast("Door cannot be opened", 5000);
});
}
SocketService.emit("getDoorStatus", {"query": SessionService.GetToken()}, function(){
});
SocketService.on("doorStatus", function(data){
vm.loadingServoStatus = false;
vm.isBusy = data;
});
AnnyangService.AddCommand("open the door", function(){
OpenDoor();
});
// Always have this in Controller that uses SocketService, to disconnect user from Server's Socket.IO on leaving the page that has this controller
$scope.$on("$destroy", function dismiss() {
SocketService.off("doorStatus");
});
}
app.controller("PirSensorCtrl", ["$scope", "SessionService", "SocketService", "SensorResource", PirSensorCtrl]);
function PirSensorCtrl($scope, SessionService, SocketService, SensorResource){
var maxDataLength = 1000;
var vm = this;
vm.loadingPIR = true;
vm.isPIROn = false;
vm.PIRReading = false;
vm.TogglePIR = TogglePIR;
SocketService.emit("getPIRStatus", {query:SessionService.GetToken()});
SocketService.on("sendPirStatus", function(data){
vm.loadingPIR = false;
vm.isPIROn = data;
if(!vm.isPIROn){
vm.PIRReading = false;
}
});
SocketService.on("sendPirReading", function(data){
vm.PIRReading = data
});
function TogglePIR(){
if(vm.isPIROn){
return SensorResource.togglePirSensor({action:"off"}).$promise.then(function(success){
Materialize.toast("PIR Sensor has been turned off", 5000);
}, function(error){
Materialize.toast("Unable to turn off PIR Sensor", 5000);
})
} else{
return SensorResource.togglePirSensor({action:"on"}).$promise.then(function(success){
Materialize.toast("PIR Sensor has been turned on", 5000);
}, function(error){
Materialize.toast("Unable to turn on PIR Sensor", 5000);
})
}
}
}
app.controller("LightSensorCtrl", ["$scope", "SessionService", "SocketService", "SensorResource", LightSensorCtrl]);
function LightSensorCtrl($scope, SessionService, SocketService, SensorResource){
var vm = this;
vm.loadingLightSensor = true;
vm.isLightSensorOn = false;
vm.LightSensorReading = false;
vm.LightSensorReadingFormatted = "";
vm.ToggleLightSensor = ToggleLightSensor;
SocketService.emit("getLightSensorStatus", {query:SessionService.GetToken()});
SocketService.on("sendLightSensorStatus", function(data){
vm.loadingLightSensor = false;
vm.isLightSensorOn = data;
// Reset if it's off
if(!vm.isLightSensorOn){
vm.LightSensorReading = 0;
}
});
SocketService.on("sendLightSensorReading", function(data){
vm.LightSensorReading = data.raw;
vm.LightSensorReadingFormatted = data.friendlyData;
});
function ToggleLightSensor(){
if(vm.isLightSensorOn){
return SensorResource.toggleLightSensor({action:"off"}).$promise.then(function(success){
Materialize.toast("LightSensor has been turned off", 5000);
}, function(error){
Materialize.toast("Unable to turn off LightSensor", 5000);
});
} else{
return SensorResource.toggleLightSensor({action:"on"}).$promise.then(function(success){
Materialize.toast("LightSensor has been turned on", 5000);
}, function(error){
Materialize.toast("Unable to turn on LightSensor", 5000);
});
}
}
// Always have this in Controller that uses SocketService, to disconnect user from Server's Socket.IO on leaving the page that has this controller
$scope.$on("$destroy", function dismiss() {
SocketService.off("sendLightSensorStatus");
SocketService.off("sendLightSensorReading");
});
}
app.controller("SensorCtrl", ["$scope", "$controller", SensorCtrl]);
function SensorCtrl($scope, $controller){
var vm = this;
vm.p = $controller("PirSensorCtrl", {$scope: $scope});
vm.l = $controller("LightSensorCtrl", {$scope: $scope});
}
app.controller("MilightCtrl", ["$scope", "SessionService", "SocketService", "LightResource", "AnnyangService", MilightCtrl]);
function MilightCtrl($scope, SessionService, SocketService, LightResource, AnnyangService){
var vm = this;
vm.loadingMilightStatus = true;
vm.isLightOn = false;
vm.wheelColor;
vm.ChangeMilightColor = ChangeMilightColor;
vm.ChangeMilightWhite = ChangeMilightWhite;
vm.ChangeBrightness = ChangeBrightness;
vm.TurnMilightOff = TurnMilightOff;
function ChangeMilightColor(hex){
var color = hex || vm.wheelColor;
// Since this is a color wheel, multiple api requests will spam notifications
// therefore I'm just assuming it will be successful.
LightResource.changeColor({type: "milight", color: color});
}
function ChangeMilightWhite(){
return LightResource.toggle({type: "milight", action: "on"}).$promise.then(function(success){
Materialize.toast("Milight has been turned on", 5000);
}, function(error){
Materialize.toast("Unable to turn on Milight", 5000);
});
}
function TurnMilightOff(){
return LightResource.toggle({type: "milight", action: "off"}).$promise.then(function(success){
Materialize.toast("Milight has been turned off", 5000);
}, function(error){
Materialize.toast("Unable to turn off Milight", 5000);
});
}
function ChangeBrightness(){
// Since this is a color wheel, multiple api requests will spam notifications
// therefore I'm just assuming it will be successful.
LightResource.changeBrightness({type: "milight", brightness: vm.milightBrightness});
}
SocketService.emit("getMilightStatus", {"query": SessionService.GetToken()}, function(){
});
SocketService.emit("getLightStripStatus", {"query": SessionService.GetToken()}, function(){
});
SocketService.on("milightStatus", function(data){
vm.loadingMilightStatus = false;
vm.isLightOn = data;
});
SocketService.on("milightBrightness", function(data){
vm.milightBrightness = data;
});
// Always have this in Controller that uses SocketService, to disconnect user from Server's Socket.IO on leaving the page that has this controller
$scope.$on("$destroy", function dismiss() {
SocketService.off("milightStatus");
SocketService.off("milightBrightness");
});
}
app.controller("LightStripCtrl", ["$scope", "SessionService", "SocketService", "LightResource", "AnnyangService", LightStripCtrl]);
function LightStripCtrl($scope, SessionService, SocketService, LightResource, AnnyangService){
var vm = this;
vm.loadingLightStripStatus = true;
vm.isLightStripOn = false;
vm.wheelColor;
vm.ToggleLightStrip = ToggleLightStrip;
vm.Rainbow = Rainbow;
vm.Iterate = Iterate;
vm.Beam = Beam;
vm.SetLightStripColor = SetLightStripColor;
vm.Blink = Blink;
vm.SetLightStripBrightness = SetLightStripBrightness;
vm.StopLightStrip = StopLightStrip;
// LightStrips
function ToggleLightStrip(){
if(vm.isLightStripOn){
StopLightStrip();
} else{
SetLightStripColor("#ffffff");
}
}
function Rainbow(){
return LightResource.lightStripAction({action: "rainbow"}).$promise.then(function(success){
Materialize.toast("LightStrip Rainbow mode!", 5000);
}, function(error){
Materialize.toast("Unable to turn rainbow", 5000);
});
}
function Iterate(){
var delay = vm.lightStripDelay || 500;
var direction = vm.lightStripDirection || true;
return LightResource.lightStripAction({action: "iterate", delay: delay, color: vm.lightStripWheelColor, direction: direction}).$promise.then(function(success){
Materialize.toast("LightStrip Iterate mode!", 5000);
}, function(error){
Materialize.toast("Unable to iterate LightStrip", 5000);
});
}
function SetLightStripColor(hex){
var color = hex || vm.lightStripWheelColor;
LightResource.lightStripAction({action: "set-color", color: color});
}
function Blink(){
var delay = vm.lightStripDelay || 1000;
return LightResource.lightStripAction({action: "blink", color: vm.lightStripWheelColor, delay: delay}).$promise.then(function(success){
Materialize.toast("LightStrip has blink!", 5000);
}, function(error){
Materialize.toast("Unable to blink LightStrip", 5000);
});
}
function Beam(){
return LightResource.lightStripAction({action: "beam", color: vm.lightStripWheelColor}).$promise.then(function(success){
Materialize.toast("LightStrip is beaming!", 5000);
}, function(error){
Materialize.toast("Unable to beam LightStrip", 5000);
});
}
function SetLightStripBrightness(){
LightResource.lightStripAction({action: "set-brightness", brightness: vm.lightStripBrightness});
}
function StopLightStrip(){
return LightResource.lightStripAction({action: "stop"}).$promise.then(function(success){
Materialize.toast("LightStrip has been stopped!", 5000);
}, function(error){
Materialize.toast("Unable to stop LightStrip", 5000);
});
}
SocketService.on("lightStripStatus", function(data){
vm.isLightStripOn = data;
vm.loadingLightStripStatus = false;
});
SocketService.on("lightStripBrightness", function(data){
vm.lightStripBrightness = data;
});
// Always have this in Controller that uses SocketService, to disconnect user from Server's Socket.IO on leaving the page that has this controller
$scope.$on("$destroy", function dismiss() {
SocketService.off("lightStripStatus");
SocketService.off("lightStripBrightness");
});
}
app.controller("LightCtrl", ["$scope", "$controller", "AnnyangService", LightCtrl]);
function LightCtrl($scope, $controller, AnnyangService){
var vm = this;
vm.m = $controller("MilightCtrl", {$scope: $scope});
vm.l = $controller("LightStripCtrl", {$scope: $scope});
AnnyangService.AddCommand("turn off the lights", function(){
vm.m.TurnMilightOff();
vm.l.StopLightStrip();
});
AnnyangService.AddCommand("turn on the lights", function(){
vm.m.ChangeMilightWhite();
});
AnnyangService.AddCommand("i feel blue", function(){
var color = "#3374db";
vm.m.ChangeMilightColor(color);
vm.l.SetLightStripColor(color);
});
AnnyangService.AddCommand("paint my love", function(){
var color = "#db33ac";
vm.m.ChangeMilightColor(color);
vm.l.SetLightStripColor(color);
});
AnnyangService.AddCommand("where is the bed", function(){
vm.m.TurnMilightOff();
vm.l.SetLightStripColor("#ffffff");
});
AnnyangService.AddCommand("party mode", function(){
vm.m.TurnMilightOff();
vm.l.Rainbow();
});
}
app.controller("ChatCtrl", ["$rootScope", "$scope", "SessionService", "SocketService", "NotificationService", ChatCtrl]);
function ChatCtrl($rootScope, $scope, SessionService, SocketService, NotificationService){
var vm = this;
vm.Users = [];
vm.Chats = [];
vm.SendMessage = SendMessage;
SocketService.on("connectedClients", ListUsers);
SocketService.emit("getChatClients", {"query": SessionService.GetToken()}, function(data){
ListUsers(data);
});
function ListUsers(data){
vm.Users = data;
}
function SendMessage(){
vm.Chats.push({me: true, message: vm.chat});
SocketService.emit("serverGetChat", {"query": SessionService.GetToken(), from: SessionService.GetName() || SessionService.GetEmail(), message: vm.chat});
delete vm.chat;
}
SocketService.on("serverSendChat", function(data){
vm.Chats.push(data);
});
// Always have this in Controller that uses SocketService, to disconnect user from Server's Socket.IO on leaving the page that has this controller
$scope.$on("$destroy", function() {
SocketService.off("connectedClients");
SocketService.off("serverSendChat");
});
}
})(); |
// Always have this in Controller that uses SocketService, to disconnect user from Server's Socket.IO on leaving the page that has this controller |
priority_queue.go | package queue
import (
"container/heap"
"math"
)
var _ Interface = &priorityQueue{}
func | (capacity int, niceness func(Item) int) *priorityQueue {
return &priorityQueue{
niceness: niceness,
heapSlice: make([]*item, 0, int(math.Max(1, float64(capacity)))),
autoscale: capacity > 0,
}
}
type priorityQueue struct {
// Similar to the nice function in unix low values
// are high priority, high values are nicer and let
// others cut in line.
niceness func(Item) int
heapSlice heapSlice
autoscale bool
}
type item struct {
value Item // The value of the item; arbitrary.
priority int // The priority of the item in the queue.
// The index is needed by update and is maintained by the heap.Interface methods.
index int // The index of the item in the heap.
}
func (pq *priorityQueue) Peek() Item {
return pq.heapSlice[0].value
}
func (pq *priorityQueue) Enqueue(val Item) {
n := len(pq.heapSlice)
pq.heapSlice = append(pq.heapSlice, &item{
value: val,
priority: pq.niceness(val),
index: n,
})
heap.Fix(&pq.heapSlice, n)
}
func (pq *priorityQueue) Dequeue() Item {
val, _ := heap.Pop(&pq.heapSlice).(*item)
return val.value
}
func (pq *priorityQueue) Len() int {
return pq.heapSlice.Len()
}
////////////// Type over which the heap algorithm performs /////////
type heapSlice []*item
var heapSliceTypeAssertion heapSlice = []*item{}
var _ heap.Interface = &heapSliceTypeAssertion
func (hs *heapSlice) Push(x interface{}) {
n := len(*hs)
item := x.(*item)
item.index = n
*hs = append(*hs, item)
}
func (hs *heapSlice) Pop() interface{} {
old := *hs
n := len(old)
item := old[n-1]
old[n-1] = nil // avoid memory leak
item.index = -1 // for safety
*hs = old[0 : n-1]
return item
}
func (hs *heapSlice) update(item *item) {
heap.Fix(hs, item.index)
}
////////////// sort.Interface //////////////////////////
func (hs heapSlice) Len() int { return len(hs) }
func (hs heapSlice) Less(i, j int) bool {
return hs[i].priority < hs[j].priority
}
func (hs heapSlice) Swap(i, j int) {
hs[i], hs[j] = hs[j], hs[i]
hs[i].index = i
hs[j].index = j
}
| NewPriorityQueue |
wall.py | from vkwave.types.responses import *
from ._category import Category
from ._utils import get_params
class Wall(Category):
async def check_copyright_link(
self, link: str, return_raw_response: bool = False,
) -> typing.Union[dict, BaseBoolResponse]:
"""
:param link:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("checkCopyrightLink", params)
if return_raw_response:
return raw_result
result = BaseBoolResponse(**raw_result)
return result
async def close_comments(
self, owner_id: int, post_id: int, return_raw_response: bool = False,
) -> typing.Union[dict, BaseBoolResponse]:
"""
:param owner_id:
:param post_id:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("closeComments", params)
if return_raw_response:
return raw_result
result = BaseBoolResponse(**raw_result)
return result
async def create_comment(
self,
post_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
from_group: typing.Optional[int] = None,
message: typing.Optional[str] = None,
reply_to_comment: typing.Optional[int] = None,
attachments: typing.Optional[typing.List[str]] = None,
sticker_id: typing.Optional[int] = None,
guid: typing.Optional[str] = None,
) -> typing.Union[dict, WallCreateCommentResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param post_id: - Post ID.
:param from_group: - Group ID.
:param message: - (Required if 'attachments' is not set.) Text of the comment.
:param reply_to_comment: - ID of comment to reply.
:param attachments: - (Required if 'message' is not set.) List of media objects attached to the comment, in the following format: "<owner_id>_<media_id>,<owner_id>_<media_id>", '' — Type of media ojbect: 'photo' — photo, 'video' — video, 'audio' — audio, 'doc' — document, '<owner_id>' — ID of the media owner. '<media_id>' — Media ID. For example: "photo100172_166443618,photo66748_265827614"
:param sticker_id: - Sticker ID.
:param guid: - Unique identifier to avoid repeated comments.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("createComment", params)
if return_raw_response:
return raw_result
result = WallCreateCommentResponse(**raw_result)
return result
async def delete(
self,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
post_id: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param post_id: - ID of the post to be deleted.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("delete", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def delete_comment(
self,
comment_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param comment_id: - Comment ID.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("deleteComment", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def edit(
self,
post_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
friends_only: typing.Optional[bool] = None,
message: typing.Optional[str] = None,
attachments: typing.Optional[typing.List[str]] = None,
services: typing.Optional[str] = None,
signed: typing.Optional[bool] = None,
publish_date: typing.Optional[int] = None,
lat: typing.Optional[int] = None,
long: typing.Optional[int] = None,
place_id: typing.Optional[int] = None,
mark_as_ads: typing.Optional[bool] = None,
close_comments: typing.Optional[bool] = None,
poster_bkg_id: typing.Optional[int] = None,
poster_bkg_owner_id: typing.Optional[int] = None,
poster_bkg_access_hash: typing.Optional[str] = None,
copyright: typing.Optional[str] = None,
) -> typing.Union[dict, WallEditResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param post_id:
:param friends_only:
:param message: - (Required if 'attachments' is not set.) Text of the post.
:param attachments: - (Required if 'message' is not set.) List of objects attached to the post, in the following format: "<owner_id>_<media_id>,<owner_id>_<media_id>", '' — Type of media attachment: 'photo' — photo, 'video' — video, 'audio' — audio, 'doc' — document, '<owner_id>' — ID of the media application owner. '<media_id>' — Media application ID. Example: "photo100172_166443618,photo66748_265827614", May contain a link to an external page to include in the post. Example: "photo66748_265827614,http://habrahabr.ru", "NOTE: If more than one link is being attached, an error is thrown."
:param services:
:param signed:
:param publish_date:
:param lat:
:param long:
:param place_id:
:param mark_as_ads:
:param close_comments:
:param poster_bkg_id:
:param poster_bkg_owner_id:
:param poster_bkg_access_hash:
:param copyright:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("edit", params)
if return_raw_response:
return raw_result
result = WallEditResponse(**raw_result)
return result
async def edit_ads_stealth(
self,
post_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
message: typing.Optional[str] = None,
attachments: typing.Optional[typing.List[str]] = None,
signed: typing.Optional[BaseBoolInt] = None,
lat: typing.Optional[int] = None,
long: typing.Optional[int] = None,
place_id: typing.Optional[int] = None,
link_button: typing.Optional[str] = None,
link_title: typing.Optional[str] = None,
link_image: typing.Optional[str] = None,
link_video: typing.Optional[str] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param post_id: - Post ID. Used for publishing of scheduled and suggested posts.
:param message: - (Required if 'attachments' is not set.) Text of the post.
:param attachments: - (Required if 'message' is not set.) List of objects attached to the post, in the following format: "<owner_id>_<media_id>,<owner_id>_<media_id>", '' — Type of media attachment: 'photo' — photo, 'video' — video, 'audio' — audio, 'doc' — document, 'page' — wiki-page, 'note' — note, 'poll' — poll, 'album' — photo album, '<owner_id>' — ID of the media application owner. '<media_id>' — Media application ID. Example: "photo100172_166443618,photo66748_265827614", May contain a link to an external page to include in the post. Example: "photo66748_265827614,http://habrahabr.ru", "NOTE: If more than one link is being attached, an error will be thrown."
:param signed: - Only for posts in communities with 'from_group' set to '1': '1' — post will be signed with the name of the posting user, '0' — post will not be signed (default)
:param lat: - Geographical latitude of a check-in, in degrees (from -90 to 90).
:param long: - Geographical longitude of a check-in, in degrees (from -180 to 180).
:param place_id: - ID of the location where the user was tagged.
:param link_button: - Link button ID
:param link_title: - Link title
:param link_image: - Link image url
:param link_video: - Link video ID in format "<owner_id>_<media_id>"
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("editAdsStealth", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def edit_comment(
self,
comment_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
message: typing.Optional[str] = None,
attachments: typing.Optional[typing.List[str]] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param comment_id: - Comment ID.
:param message: - New comment text.
:param attachments: - List of objects attached to the comment, in the following format: , "<owner_id>_<media_id>,<owner_id>_<media_id>", '' — Type of media attachment: 'photo' — photo, 'video' — video, 'audio' — audio, 'doc' — document, '<owner_id>' — ID of the media attachment owner. '<media_id>' — Media attachment ID. For example: "photo100172_166443618,photo66748_265827614"
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("editComment", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def get(
self,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
domain: typing.Optional[str] = None,
offset: typing.Optional[int] = None,
count: typing.Optional[int] = None,
filter: typing.Optional[str] = None,
extended: typing.Optional[BaseBoolInt] = None,
fields: typing.Optional[typing.List[BaseUserGroupFields]] = None,
) -> typing.Union[dict, WallGetResponse, WallGetExtendedResponse]:
"""
:param owner_id: - ID of the user or community that owns the wall. By default, current user ID. Use a negative value to designate a community ID.
:param domain: - User or community short address.
:param offset: - Offset needed to return a specific subset of posts.
:param count: - Number of posts to return (maximum 100).
:param filter: - Filter to apply: 'owner' — posts by the wall owner, 'others' — posts by someone else, 'all' — posts by the wall owner and others (default), 'postponed' — timed posts (only available for calls with an 'access_token'), 'suggests' — suggested posts on a community wall
:param extended: - '1' — to return 'wall', 'profiles', and 'groups' fields, '0' — to return no additional fields (default)
:param fields:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("get", params)
if return_raw_response:
return raw_result
result = (
WallGetResponse(**raw_result)
if not extended
else WallGetExtendedResponse(**raw_result)
)
return result
async def get_by_id(
self,
posts: typing.List[str],
return_raw_response: bool = False,
extended: typing.Optional[BaseBoolInt] = None,
copy_history_depth: typing.Optional[int] = None,
fields: typing.Optional[typing.List[BaseUserGroupFields]] = None,
) -> typing.Union[dict, WallGetByIdLegacyResponse, WallGetByIdExtendedResponse]:
"""
:param posts: - User or community IDs and post IDs, separated by underscores. Use a negative value to designate a community ID. Example: "93388_21539,93388_20904,2943_4276,-1_1"
:param extended: - '1' — to return user and community objects needed to display posts, '0' — no additional fields are returned (default)
:param copy_history_depth: - Sets the number of parent elements to include in the array 'copy_history' that is returned if the post is a repost from another wall.
:param fields:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("getById", params)
if return_raw_response:
return raw_result
result = (
WallGetByIdLegacyResponse(**raw_result)
if not extended
else WallGetByIdExtendedResponse(**raw_result)
)
return result
async def get_comment(
self,
comment_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
extended: typing.Optional[BaseBoolInt] = None,
fields: typing.Optional[typing.List[BaseUserGroupFields]] = None,
) -> typing.Union[dict, WallGetCommentResponse, WallGetCommentExtendedResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param comment_id: - Comment ID.
:param extended:
:param fields:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("getComment", params)
if return_raw_response:
return raw_result
result = (
WallGetCommentResponse(**raw_result)
if not extended
else WallGetCommentExtendedResponse(**raw_result)
)
return result
async def get_comments(
self,
return_raw_response: bool = False,
owner_i | tional[int] = None,
post_id: typing.Optional[int] = None,
need_likes: typing.Optional[BaseBoolInt] = None,
start_comment_id: typing.Optional[int] = None,
offset: typing.Optional[int] = None,
count: typing.Optional[int] = None,
sort: typing.Optional[str] = None,
preview_length: typing.Optional[int] = None,
extended: typing.Optional[BaseBoolInt] = None,
fields: typing.Optional[typing.List[BaseUserGroupFields]] = None,
comment_id: typing.Optional[int] = None,
thread_items_count: typing.Optional[int] = None,
) -> typing.Union[dict, WallGetCommentsResponse, WallGetCommentsExtendedResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param post_id: - Post ID.
:param need_likes: - '1' — to return the 'likes' field, '0' — not to return the 'likes' field (default)
:param start_comment_id:
:param offset: - Offset needed to return a specific subset of comments.
:param count: - Number of comments to return (maximum 100).
:param sort: - Sort order: 'asc' — chronological, 'desc' — reverse chronological
:param preview_length: - Number of characters at which to truncate comments when previewed. By default, '90'. Specify '0' if you do not want to truncate comments.
:param extended:
:param fields:
:param comment_id: - Comment ID.
:param thread_items_count: - Count items in threads.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("getComments", params)
if return_raw_response:
return raw_result
result = (
WallGetCommentsResponse(**raw_result)
if not extended
else WallGetCommentsExtendedResponse(**raw_result)
)
return result
async def get_reposts(
self,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
post_id: typing.Optional[int] = None,
offset: typing.Optional[int] = None,
count: typing.Optional[int] = None,
) -> typing.Union[dict, WallGetRepostsResponse]:
"""
:param owner_id: - User ID or community ID. By default, current user ID. Use a negative value to designate a community ID.
:param post_id: - Post ID.
:param offset: - Offset needed to return a specific subset of reposts.
:param count: - Number of reposts to return.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("getReposts", params)
if return_raw_response:
return raw_result
result = WallGetRepostsResponse(**raw_result)
return result
async def open_comments(
self, owner_id: int, post_id: int, return_raw_response: bool = False,
) -> typing.Union[dict, BaseBoolResponse]:
"""
:param owner_id:
:param post_id:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("openComments", params)
if return_raw_response:
return raw_result
result = BaseBoolResponse(**raw_result)
return result
async def pin(
self,
post_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - ID of the user or community that owns the wall. By default, current user ID. Use a negative value to designate a community ID.
:param post_id: - Post ID.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("pin", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def post(
self,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
friends_only: typing.Optional[BaseBoolInt] = None,
from_group: typing.Optional[BaseBoolInt] = None,
message: typing.Optional[str] = None,
attachments: typing.Optional[typing.List[str]] = None,
services: typing.Optional[str] = None,
signed: typing.Optional[BaseBoolInt] = None,
publish_date: typing.Optional[int] = None,
lat: typing.Optional[int] = None,
long: typing.Optional[int] = None,
place_id: typing.Optional[int] = None,
post_id: typing.Optional[int] = None,
guid: typing.Optional[str] = None,
mark_as_ads: typing.Optional[bool] = None,
close_comments: typing.Optional[bool] = None,
mute_notifications: typing.Optional[bool] = None,
copyright: typing.Optional[str] = None,
) -> typing.Union[dict, WallPostResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param friends_only: - '1' — post will be available to friends only, '0' — post will be available to all users (default)
:param from_group: - For a community: '1' — post will be published by the community, '0' — post will be published by the user (default)
:param message: - (Required if 'attachments' is not set.) Text of the post.
:param attachments: - (Required if 'message' is not set.) List of objects attached to the post, in the following format: "<owner_id>_<media_id>,<owner_id>_<media_id>", '' — Type of media attachment: 'photo' — photo, 'video' — video, 'audio' — audio, 'doc' — document, 'page' — wiki-page, 'note' — note, 'poll' — poll, 'album' — photo album, '<owner_id>' — ID of the media application owner. '<media_id>' — Media application ID. Example: "photo100172_166443618,photo66748_265827614", May contain a link to an external page to include in the post. Example: "photo66748_265827614,http://habrahabr.ru", "NOTE: If more than one link is being attached, an error will be thrown."
:param services: - List of services or websites the update will be exported to, if the user has so requested. Sample values: 'twitter', 'facebook'.
:param signed: - Only for posts in communities with 'from_group' set to '1': '1' — post will be signed with the name of the posting user, '0' — post will not be signed (default)
:param publish_date: - Publication date (in Unix time). If used, posting will be delayed until the set time.
:param lat: - Geographical latitude of a check-in, in degrees (from -90 to 90).
:param long: - Geographical longitude of a check-in, in degrees (from -180 to 180).
:param place_id: - ID of the location where the user was tagged.
:param post_id: - Post ID. Used for publishing of scheduled and suggested posts.
:param guid:
:param mark_as_ads:
:param close_comments:
:param mute_notifications:
:param copyright:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("post", params)
if return_raw_response:
return raw_result
result = WallPostResponse(**raw_result)
return result
async def post_ads_stealth(
self,
owner_id: int,
return_raw_response: bool = False,
message: typing.Optional[str] = None,
attachments: typing.Optional[typing.List[str]] = None,
signed: typing.Optional[BaseBoolInt] = None,
lat: typing.Optional[int] = None,
long: typing.Optional[int] = None,
place_id: typing.Optional[int] = None,
guid: typing.Optional[str] = None,
link_button: typing.Optional[str] = None,
link_title: typing.Optional[str] = None,
link_image: typing.Optional[str] = None,
link_video: typing.Optional[str] = None,
) -> typing.Union[dict, WallPostAdsStealthResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param message: - (Required if 'attachments' is not set.) Text of the post.
:param attachments: - (Required if 'message' is not set.) List of objects attached to the post, in the following format: "<owner_id>_<media_id>,<owner_id>_<media_id>", '' — Type of media attachment: 'photo' — photo, 'video' — video, 'audio' — audio, 'doc' — document, 'page' — wiki-page, 'note' — note, 'poll' — poll, 'album' — photo album, '<owner_id>' — ID of the media application owner. '<media_id>' — Media application ID. Example: "photo100172_166443618,photo66748_265827614", May contain a link to an external page to include in the post. Example: "photo66748_265827614,http://habrahabr.ru", "NOTE: If more than one link is being attached, an error will be thrown."
:param signed: - Only for posts in communities with 'from_group' set to '1': '1' — post will be signed with the name of the posting user, '0' — post will not be signed (default)
:param lat: - Geographical latitude of a check-in, in degrees (from -90 to 90).
:param long: - Geographical longitude of a check-in, in degrees (from -180 to 180).
:param place_id: - ID of the location where the user was tagged.
:param guid: - Unique identifier to avoid duplication the same post.
:param link_button: - Link button ID
:param link_title: - Link title
:param link_image: - Link image url
:param link_video: - Link video ID in format "<owner_id>_<media_id>"
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("postAdsStealth", params)
if return_raw_response:
return raw_result
result = WallPostAdsStealthResponse(**raw_result)
return result
async def report_comment(
self,
owner_id: int,
comment_id: int,
return_raw_response: bool = False,
reason: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - ID of the user or community that owns the wall.
:param comment_id: - Comment ID.
:param reason: - Reason for the complaint: '0' – spam, '1' – child pornography, '2' – extremism, '3' – violence, '4' – drug propaganda, '5' – adult material, '6' – insult, abuse
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("reportComment", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def report_post(
self,
owner_id: int,
post_id: int,
return_raw_response: bool = False,
reason: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - ID of the user or community that owns the wall.
:param post_id: - Post ID.
:param reason: - Reason for the complaint: '0' – spam, '1' – child pornography, '2' – extremism, '3' – violence, '4' – drug propaganda, '5' – adult material, '6' – insult, abuse
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("reportPost", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def repost(
self,
object: str,
return_raw_response: bool = False,
message: typing.Optional[str] = None,
group_id: typing.Optional[int] = None,
mark_as_ads: typing.Optional[bool] = None,
mute_notifications: typing.Optional[bool] = None,
) -> typing.Union[dict, WallRepostResponse]:
"""
:param object: - ID of the object to be reposted on the wall. Example: "wall66748_3675"
:param message: - Comment to be added along with the reposted object.
:param group_id: - Target community ID when reposting to a community.
:param mark_as_ads:
:param mute_notifications:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("repost", params)
if return_raw_response:
return raw_result
result = WallRepostResponse(**raw_result)
return result
async def restore(
self,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
post_id: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - User ID or community ID from whose wall the post was deleted. Use a negative value to designate a community ID.
:param post_id: - ID of the post to be restored.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("restore", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def restore_comment(
self,
comment_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - User ID or community ID. Use a negative value to designate a community ID.
:param comment_id: - Comment ID.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("restoreComment", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
async def search(
self,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
domain: typing.Optional[str] = None,
query: typing.Optional[str] = None,
owners_only: typing.Optional[BaseBoolInt] = None,
count: typing.Optional[int] = None,
offset: typing.Optional[int] = None,
extended: typing.Optional[BaseBoolInt] = None,
fields: typing.Optional[typing.List[BaseUserGroupFields]] = None,
) -> typing.Union[dict, WallSearchResponse, WallSearchExtendedResponse]:
"""
:param owner_id: - user or community id. "Remember that for a community 'owner_id' must be negative."
:param domain: - user or community screen name.
:param query: - search query string.
:param owners_only: - '1' – returns only page owner's posts.
:param count: - count of posts to return.
:param offset: - Offset needed to return a specific subset of posts.
:param extended: - show extended post info.
:param fields:
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("search", params)
if return_raw_response:
return raw_result
result = (
WallSearchResponse(**raw_result)
if not extended
else WallSearchExtendedResponse(**raw_result)
)
return result
async def unpin(
self,
post_id: int,
return_raw_response: bool = False,
owner_id: typing.Optional[int] = None,
) -> typing.Union[dict, BaseOkResponse]:
"""
:param owner_id: - ID of the user or community that owns the wall. By default, current user ID. Use a negative value to designate a community ID.
:param post_id: - Post ID.
:param return_raw_response: - return result at dict
:return:
"""
params = get_params(locals())
raw_result = await self.api_request("unpin", params)
if return_raw_response:
return raw_result
result = BaseOkResponse(**raw_result)
return result
| d: typing.Op |
mempool_persist.py | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
By default, bitcoind will dump mempool on shutdown and
then reload it on startup. This can be overridden with
the -persistmempool=0 command line option.
Test is as follows:
- start node0, node1 and node2. node1 has -persistmempool=0
- create 5 transactions on node2 to its own address. Note that these
are not sent to node0 or node1 addresses because we don't want
them to be saved in the wallet.
- check that node0 and node1 have 5 transactions in their mempools
- shutdown all nodes.
- startup node0. Verify that it still has 5 transactions
in its mempool. Shutdown node0. This tests that by default the
mempool is persistent.
- startup node1. Verify that its mempool is empty. Shutdown node1.
This tests that with -persistmempool=0, the mempool is not
dumped to disk when the node is shut down.
- Restart node0 with -persistmempool=0. Verify that its mempool is
empty. Shutdown node0. This tests that with -persistmempool=0,
the mempool is not loaded from disk on start up.
- Restart node0 with -persistmempool. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=0
does not overwrite a previously valid mempool stored on disk.
- Remove node0 mempool.dat and verify savemempool RPC recreates it
and verify that node1 can load it and has 5 transactions in its
mempool.
- Verify that savemempool throws when the RPC is called if
node1 can't write to disk.
"""
from decimal import Decimal
import os
import time
from test_framework.p2p import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
assert_raises_rpc_error,
connect_nodes,
disconnect_nodes,
)
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def skip_test_if_missing_module(self):
|
def run_test(self):
self.log.debug("Send 5 transactions from node2 (to its own address)")
tx_creation_time_lower = int(time.time())
for _ in range(5):
last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
tx_creation_time_higher = int(time.time())
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prioritize a transaction on node0")
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'], fees['modified'])
self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
tx_creation_time = self.nodes[0].getmempoolentry(txid=last_txid)['time']
assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower)
assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)
# disconnect nodes & make a txn that remains in the unbroadcast set.
disconnect_nodes(self.nodes[0], 1)
assert(len(self.nodes[0].getpeerinfo()) == 0)
assert(len(self.nodes[0].p2ps) == 0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("12"))
connect_nodes(self.nodes[0], 2)
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
assert self.nodes[0].getmempoolinfo()["loaded"] # start_node is blocking on the mempool being loaded
assert self.nodes[2].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 6)
assert_equal(len(self.nodes[2].getrawmempool()), 5)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.log.debug('Verify prioritization is loaded correctly')
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
self.log.debug('Verify time is loaded correctly')
assert_equal(tx_creation_time, self.nodes[0].getmempoolentry(txid=last_txid)['time'])
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
# start node0 with wallet disabled so wallet transactions don't get resubmitted
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0", "-disablewallet"])
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 6)
mempooldat0 = os.path.join(self.nodes[0].datadir, self.chain, 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, self.chain, 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 6 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
assert self.nodes[1].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[1].getrawmempool()), 6)
self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
# which is an implementation detail that could change and break this test
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
self.test_persist_unbroadcast()
def test_persist_unbroadcast(self):
node0 = self.nodes[0]
self.start_node(0)
# clear out mempool
node0.generate(1)
# ensure node0 doesn't have any connections
# make a transaction that will remain in the unbroadcast set
assert(len(node0.getpeerinfo()) == 0)
assert(len(node0.p2ps) == 0)
node0.sendtoaddress(self.nodes[1].getnewaddress(), Decimal("12"))
# shutdown, then startup with wallet disabled
self.stop_nodes()
self.start_node(0, extra_args=["-disablewallet"])
# check that txn gets broadcast due to unbroadcast logic
conn = node0.add_p2p_connection(P2PTxInvStore())
node0.mockscheduler(16*60) # 15 min + 1 for buffer
self.wait_until(lambda: len(conn.get_invs()) == 1)
if __name__ == '__main__':
MempoolPersistTest().main()
| self.skip_if_no_wallet() |
p2p_timeouts.py | #!/usr/bin/env python3
# Copyright (c) 2016-2018 The Test Coin Super Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various net timeouts.
- Create three testcoinsuperd nodes:
no_verack_node - we never send a verack in response to their version
no_version_node - we never send a version (only a ping)
no_send_node - we never send any P2P message.
- Start all three nodes
- Wait 1 second
- Assert that we're connected
- Send a ping to no_verack_node and no_version_node
- Wait 30 seconds
- Assert that we're still connected
- Send a ping to no_verack_node and no_version_node
- Wait 31 seconds
- Assert that we're no longer connected (timeout to receive version/verack is 60 seconds)
"""
from time import sleep
from test_framework.messages import msg_ping
from test_framework.mininode import P2PInterface
from test_framework.test_framework import TestCoinSuperTestFramework
class TestP2PConn(P2PInterface):
|
class TimeoutsTest(TestCoinSuperTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
# Setup the p2p connections
no_verack_node = self.nodes[0].add_p2p_connection(TestP2PConn())
no_version_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False)
no_send_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False)
sleep(1)
assert no_verack_node.is_connected
assert no_version_node.is_connected
assert no_send_node.is_connected
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
sleep(30)
assert "version" in no_verack_node.last_message
assert no_verack_node.is_connected
assert no_version_node.is_connected
assert no_send_node.is_connected
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
sleep(31)
assert not no_verack_node.is_connected
assert not no_version_node.is_connected
assert not no_send_node.is_connected
if __name__ == '__main__':
TimeoutsTest().main()
| def on_version(self, message):
# Don't send a verack in response
pass |
tracer-provider.ts | import { IncomingHttpHeaders } from 'http';
import { JaegerExporter } from '@opentelemetry/exporter-jaeger';
import { NodeTracerProvider } from '@opentelemetry/node';
import { TracerShim } from '@opentelemetry/shim-opentracing';
import {
BatchSpanProcessor,
SimpleSpanProcessor,
} from '@opentelemetry/tracing';
import {
FORMAT_HTTP_HEADERS,
FORMAT_TEXT_MAP,
globalTracer, | initGlobalTracer,
Span,
SpanContext,
Tracer,
} from 'opentracing';
import { TracingData, TracingModuleOptions } from '../interfaces';
export class TracerProvider {
private static instance: TracerProvider;
private tracer: Tracer;
private constructor(options: TracingModuleOptions) {
process.env.OTEL_LOG_LEVEL = 'ERROR';
process.env.OTEL_NO_PATCH_MODULES = '*';
this.tracer = TracerProvider.initialize(options);
}
private static initialize(options: TracingModuleOptions): Tracer {
const { exporterConfig, isSimpleSpanProcessor } = options;
const { serviceName } = exporterConfig;
const tracerProvider = new NodeTracerProvider();
const exporter = new JaegerExporter(exporterConfig);
const spanProcessor = isSimpleSpanProcessor
? new SimpleSpanProcessor(exporter)
: new BatchSpanProcessor(exporter);
tracerProvider.addSpanProcessor(spanProcessor);
tracerProvider.register();
const tracer = tracerProvider.getTracer(serviceName);
initGlobalTracer(new TracerShim(tracer));
return globalTracer();
}
static getInstance(options?: TracingModuleOptions): TracerProvider {
if (!this.instance && options) {
this.instance = new TracerProvider(options);
}
return this.instance;
}
getTracer(): Tracer {
return this.tracer;
}
extractHeaders(headers: IncomingHttpHeaders): SpanContext {
const context = this.tracer.extract(FORMAT_HTTP_HEADERS, headers);
return context || undefined;
}
extractTracing(tracingData: TracingData): SpanContext {
const carrier = { traceparent: tracingData && tracingData.carrier };
const context = this.tracer.extract(FORMAT_TEXT_MAP, carrier);
return context || undefined;
}
getCarrier(span: Span): string {
const data: Record<string, string> = {};
this.tracer.inject(span, FORMAT_TEXT_MAP, data);
return data.traceparent;
}
} | |
updateupstream.py | """Generates an upstream.yaml from a config.yaml and a GitHub release URL
"""
import argparse
import os
from tempfile import TemporaryDirectory
import yaml
import zipfile
import gftools.packager
from gftools.builder import GFBuilder
from strictyaml import as_document
from gftools.utils import download_file
from fontTools.ttLib import TTFont
parser = argparse.ArgumentParser(description="Process some integers.")
parser.add_argument("url", help="URL of GitHub release")
parser.add_argument("--family", help="Family name", required=False)
parser.add_argument("--config", help="Config file", default="sources/config.yaml", required=False)
def get_family_name(config):
if "familyName" in config:
return config["familyName"]
return GFBuilder(config).get_family_name()
def generate_upstream(config, url):
repo = os.environ.get("GITHUB_REPOSITORY")
if not repo:
raise ValueError("Not being run from a GitHub action?")
if "category" not in config:
config["category"] = ["SANS_SERIF"]
upstream = {
"name": get_family_name(config),
"repository_url": os.environ["GITHUB_SERVER_URL"] + "/" + repo + ".git",
"archive": url,
"branch": "main",
"category": config["category"],
"build": "",
"files": {},
"designer": "Will be filled in",
}
return upstream
def | (upstream):
print("Downloading release archive...")
upstream["files"] = {}
with TemporaryDirectory() as tmp:
archive_path = os.path.join(tmp, "archive.zip")
download_file(upstream["archive"], archive_path)
license_found = False
description_found = False
a_font = None
with zipfile.ZipFile(archive_path, "r") as zip_ref:
zip_ref.extractall(tmp)
for root, _, files in os.walk(tmp):
for file in files:
fullpath = os.path.join(root, file)
relpath = os.path.relpath(fullpath, tmp)
print(relpath)
if file == "OFL.txt":
license_found = True
upstream["files"][relpath] = file
elif file == "DESCRIPTION.en_us.html":
description_found = True
upstream["files"][relpath] = file
elif file.endswith("ttf"):
if config.get("buildVariable", True):
# Only add the file if it is the variable font
if "[" in file:
upstream["files"][relpath] = file
a_font = fullpath
else:
# Add statics
upstream["files"][relpath] = file
a_font = fullpath
if not license_found:
raise ValueError(
"No license file was found. Ensure OFL.txt is added the the release"
)
if not description_found and "Noto" not in upstream["name"]:
raise ValueError(
"No description file was found. Ensure DESCRIPTION.en_us.html is added the the release"
)
if not a_font:
raise ValueError("No font files were found. Is the release broken?")
designer = TTFont(a_font)["name"].getDebugName(9)
if designer:
upstream["designer"] = designer
if __name__ == "__main__":
args = parser.parse_args()
if args.family:
config = {"familyName": args.family}
else:
config = yaml.load(
open(args.config, Loader=yaml.FullLoader)
)
if os.path.isfile("upstream.yaml"):
try:
upstream = gftools.packager._upstream_conf_from_file(
"upstream.yaml", yes=True, quiet=True
)
except Exception as e:
raise ValueError("Something went wrong parsing upstream.yaml: " + str(e))
else:
try:
upstream = as_document(
generate_upstream(config, args.url),
gftools.packager.upstream_yaml_schema,
)
except Exception as e:
raise ValueError(
"Something went wrong generating upstream.yaml (bug in updateupstream): "
+ str(e)
)
# Add archive URL
upstream["archive"] = args.url
update_file_list(upstream)
with open("upstream.yaml", "w") as upstream_yaml_file:
upstream_yaml_file.write(upstream.as_yaml())
| update_file_list |
classify_capture.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A demo to classify Raspberry Pi camera stream.""" | import io
import time
from edgetpu.classification.engine import ClassificationEngine
from edgetpu.utils import dataset_utils
import numpy as np
import picamera
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--model', help='File path of Tflite model.', required=True)
parser.add_argument('--label', help='File path of label file.', required=True)
args = parser.parse_args()
labels = dataset_utils.read_label_file(args.label)
engine = ClassificationEngine(args.model)
with picamera.PiCamera() as camera:
camera.resolution = (640, 480)
camera.framerate = 30
_, height, width, _ = engine.get_input_tensor_shape()
camera.start_preview()
try:
stream = io.BytesIO()
for _ in camera.capture_continuous(
stream, format='rgb', use_video_port=True, resize=(width, height)):
stream.truncate()
stream.seek(0)
input_tensor = np.frombuffer(stream.getvalue(), dtype=np.uint8)
start_ms = time.time()
results = engine.classify_with_input_tensor(input_tensor, top_k=1)
elapsed_ms = time.time() - start_ms
if results:
camera.annotate_text = '%s %.2f\n%.2fms' % (
labels[results[0][0]], results[0][1], elapsed_ms * 1000.0)
finally:
camera.stop_preview()
if __name__ == '__main__':
main() |
import argparse |
logging.go | package ari
// Logging represents a communication path to an
// Asterisk server for working with logging resources
type Logging interface {
// Create creates a new log. The levels are a comma-separated list of
// logging levels on which this channel should operate. The name of the
// channel should be the key's ID.
Create(key *Key, levels string) (*LogHandle, error)
// Data retrives the data for a logging channel
Data(key *Key) (*LogData, error)
// Data retrives the data for a logging channel
Get(key *Key) *LogHandle
// List the logs
List(filter *Key) ([]*Key, error)
// Rotate rotates the log
Rotate(key *Key) error
// Delete deletes the log
Delete(key *Key) error
}
// LogData represents the log data
type LogData struct {
// Key is the cluster-unique identifier for this logging channel
Key *Key `json:"key"`
// Name is the name of the logging channel
Name string `json:"channel"`
// Levels is a comma-separated list of logging levels for this channel
Levels string `json:"levels"`
// Type indicates the type of logs for this channel
Types string `json:"types"`
// Status indicates whether this logging channel is enabled
Status string `json:"status"`
}
// NewLogHandle builds a new log handle given the `Key` and `Logging`` client
func NewLogHandle(key *Key, l Logging) *LogHandle |
// LogHandle provides an interface to manipulate a logging channel
type LogHandle struct {
key *Key
c Logging
}
// ID returns the ID (name) of the logging channel
func (l *LogHandle) ID() string {
return l.key.ID
}
// Key returns the Key of the logging channel
func (l *LogHandle) Key() *Key {
return l.key
}
// Data returns the data for the logging channel
func (l *LogHandle) Data() (*LogData, error) {
return l.c.Data(l.key)
}
// Rotate causes the logging channel's logfiles to be rotated
func (l *LogHandle) Rotate() error {
return l.c.Rotate(l.key)
}
// Delete removes the logging channel from Asterisk
func (l *LogHandle) Delete() error {
return l.c.Delete(l.key)
}
| {
return &LogHandle{
key: key,
c: l,
}
} |
test.py | """
python test.py --model pointMLP --msg 20220209053148-404
"""
import argparse
import os
import datetime
import torch
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
from torch.utils.data import DataLoader
import models as models
from utils import progress_bar, IOStream
from data import ModelNet40
import sklearn.metrics as metrics
from helper import cal_loss
import numpy as np
import torch.nn.functional as F
model_names = sorted(
name for name in models.__dict__ if callable(models.__dict__[name])
)
def parse_args():
"""Parameters"""
parser = argparse.ArgumentParser("training")
parser.add_argument(
"-c",
"--checkpoint",
type=str,
metavar="PATH",
help="path to save checkpoint (default: checkpoint)",
)
parser.add_argument("--msg", type=str, help="message after checkpoint")
parser.add_argument(
"--batch_size", type=int, default=16, help="batch size in training"
)
parser.add_argument(
"--model", default="pointMLP", help="model name [default: pointnet_cls]"
)
parser.add_argument(
"--num_classes",
default=40,
type=int,
choices=[10, 40],
help="training on ModelNet10/40",
)
parser.add_argument("--num_points", type=int, default=1024, help="Point Number")
return parser.parse_args()
def main():
|
def validate(net, testloader, criterion, device):
net.eval()
test_loss = 0
correct = 0
total = 0
test_true = []
test_pred = []
time_cost = datetime.datetime.now()
with torch.no_grad():
for batch_idx, (data, label) in enumerate(testloader):
data, label = data.to(device), label.to(device).squeeze()
data = data.permute(0, 2, 1)
logits = net(data)
loss = criterion(logits, label)
test_loss += loss.item()
preds = logits.max(dim=1)[1]
test_true.append(label.cpu().numpy())
test_pred.append(preds.detach().cpu().numpy())
total += label.size(0)
correct += preds.eq(label).sum().item()
progress_bar(
batch_idx,
len(testloader),
"Loss: %.3f | Acc: %.3f%% (%d/%d)"
% (
test_loss / (batch_idx + 1),
100.0 * correct / total,
correct,
total,
),
)
time_cost = int((datetime.datetime.now() - time_cost).total_seconds())
test_true = np.concatenate(test_true)
test_pred = np.concatenate(test_pred)
return {
"loss": float("%.3f" % (test_loss / (batch_idx + 1))),
"acc": float("%.3f" % (100.0 * metrics.accuracy_score(test_true, test_pred))),
"acc_avg": float(
"%.3f" % (100.0 * metrics.balanced_accuracy_score(test_true, test_pred))
),
"time": time_cost,
}
if __name__ == "__main__":
main()
| args = parse_args()
print(f"args: {args}")
os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE"
if torch.cuda.is_available():
device = "cuda"
else:
device = "cpu"
print(f"==> Using device: {device}")
if args.msg is None:
message = str(datetime.datetime.now().strftime("-%Y%m%d%H%M%S"))
else:
message = "-" + args.msg
args.checkpoint = "checkpoints/" + args.model + message
print("==> Preparing data..")
test_loader = DataLoader(
ModelNet40(partition="test", num_points=args.num_points),
num_workers=4,
batch_size=args.batch_size,
shuffle=False,
drop_last=False,
)
# Model
print("==> Building model..")
net = models.__dict__[args.model]()
criterion = cal_loss
net = net.to(device)
checkpoint_path = os.path.join(args.checkpoint, "best_checkpoint.pth")
checkpoint = torch.load(checkpoint_path, map_location=torch.device("cpu"))
# criterion = criterion.to(device)
if device == "cuda":
net = torch.nn.DataParallel(net)
cudnn.benchmark = True
net.load_state_dict(checkpoint["net"])
test_out = validate(net, test_loader, criterion, device)
print(f"Vanilla out: {test_out}") |
util.py | # -*- coding: utf-8 -*-
###############################################################################
# Copyright (c), Forschungszentrum Jülich GmbH, IAS-1/PGI-1, Germany. #
# All rights reserved. #
# This file is part of the aiida-jutools package. #
# (AiiDA JuDFT tools) #
# #
# The code is hosted on GitHub at https://github.com/judftteam/aiida-jutools. #
# For further information on the license, see the LICENSE.txt file. #
# For further information please visit http://judft.de/. #
# #
###############################################################################
"""Tools for working with AiiDA StructureData nodes: utils."""
import typing as _typing
from aiida import orm as _orm
from aiida.tools import groups as _aiida_groups
import aiida_jutools as _jutools
def query_elemental_structure(symbol: str,
group: _orm.Group = None) -> _typing.List[_orm.StructureData]:
"""Query structures for a single chemical element.
:param symbol: chemical element symbo case-senstive, like 'He'
:param group: optionally, search only within this group
:return: list of results
"""
qb = _orm.QueryBuilder()
if group:
qb.append(_orm.Group, filters={'label': group.label}, tag='group')
qb.append(_orm.StructureData, with_group='group', filters={'attributes.kinds.0.name': symbol})
else:
qb.append(_orm.StructureData, filters={'attributes.kinds.0.name': symbol}) # more general
# # alternative: require extras
# qb.append(StructureData, with_group='group', filters={"extras.symbol": symbol})
# return qb.first()[0]
return qb.all(flat=True)
# # DEVNOTE:
# # alternative: require extras
# the following eqvt. solution is ~3x times slower (for a group of ~1e2 structures):
# return next((structure for structure in structures if structure.extras['symbol'] == symbol), None)
def l | input_structure_group: _orm.Group,
output_structure_group_label: str,
scale_factor: _orm.Float,
set_extra: bool = True,
dry_run: bool = True,
silent: bool = False) -> _orm.Group:
"""Rescale a group of structures and put them in a new or existing group.
Only input structures which do not already have a rescaled output structure in the output structure group
will be rescaled.
:param input_structure_group: group with StructureData nodes to rescale. Ignores other nodes in the group.
:param output_structure_group_label: name of group for rescaled structures. Create if not exist.
:param scale_factor: scale factor with which to scale the lattice constant of the input structure
:param set_extra: True: set extra 'scale_factor' : scale_factor.value to structures rescaled in this run.
:param dry_run: default True: perform a dry run and print what the method *would* do.
:param silent: True: do not print info messages
:return: output group of rescaled structures
"""
assert isinstance(scale_factor, _orm.Float)
would_or_will = "would" if dry_run else "will"
out_structure_grouppath = _aiida_groups.GroupPath(path=output_structure_group_label)
out_structure_group, created = out_structure_grouppath.get_or_create_group()
inp_structures = {node.uuid: node for node in input_structure_group.nodes if isinstance(node, _orm.StructureData)}
already_rescaled = {}
if dry_run or not silent:
print(40 * '-')
print(f"Task: Rescale {len(inp_structures.keys())} {_orm.StructureData.__name__} nodes in group "
f"'{input_structure_group.label}' with scale factor {scale_factor.value}.\nPerform dry run: {dry_run}.")
# try load structures
out_structures_existing = [node for node in out_structure_group.nodes if isinstance(node, _orm.StructureData)]
# now pop out the input nodes which already have been rescaled
for out_struc in out_structures_existing:
inps_from_out = query_modified_input_structure(modified_structure=out_struc, invariant_kinds=True)
if inps_from_out:
uuids = [inp.uuid for inp in inps_from_out]
for uuid in uuids:
if uuid in inp_structures:
already_rescaled[uuid] = inp_structures.pop(uuid)
# now rescale the remaining ones
if dry_run or not silent:
print(
f"I {would_or_will} rescale {len(inp_structures.keys())} {_orm.StructureData.__name__} nodes from "
f"the input group. I would add the new nodes to output group '{output_structure_group_label}'.\n"
f"{len(already_rescaled.keys())} {_orm.StructureData.__name__} of the input nodes already have been "
f"rescaled and added to this output target previously.")
if not dry_run:
for inp_structure in inp_structures.values():
out_structure = _jutools.process_functions.rescale_structure(input_structure=inp_structure,
scale_factor=scale_factor)
if set_extra:
out_structure.set_extra("scale_factor", scale_factor.value)
out_structure_group.add_nodes([out_structure])
if not dry_run and not silent:
print(
f"Created {len(inp_structures.keys())} {_orm.StructureData.__name__} nodes and added them to group "
f"'{output_structure_group_label}'.")
return out_structure_group
def query_modified_input_structure(modified_structure: _orm.StructureData,
invariant_kinds: bool = False) -> _typing.List[_orm.StructureData]:
"""Given a structure modified via a CalcFunction, query its input structure(s).
:param modified_structure: structure modified via a single CalcFunction
:param invariant_kinds: to make query more precise., assume that the 'kinds' attribute has not been modified.
:return: list of input structures, if any.
"""
def _filter_from_attribute(attribute: list) -> dict:
"""Unpack a complex attribute into an 'and'-query filter.
:param attribute: attribute of a node. Assumes list of dicts of simple types or list thereof.
:return: a query filter for nodes with that attribute and those values
"""
filters = {'and': []}
for i, kind in enumerate(attribute):
for key, value in kind.items():
if not isinstance(value, list):
filters['and'].append({f"attributes.kinds.{i}.{key}": attribute[i][key]})
else:
for j, val in enumerate(value):
filters['and'].append({f"attributes.kinds.{i}.{key}.{j}": attribute[i][key][j]})
return filters
if not invariant_kinds:
input_structure_filters = {}
else:
output_kinds = modified_structure.attributes['kinds']
input_structure_filters = _filter_from_attribute(attribute=output_kinds)
qb = _orm.QueryBuilder()
# qb.append(Group, filters={'label': output_structure_group.label}, tag='group')
# qb.append(StructureData, with_group='group', filters={"uuid" : modified_structure.uuid}, tag='out_struc')
qb.append(_orm.StructureData, filters={"uuid": modified_structure.uuid}, tag='out_struc')
qb.append(_orm.CalcFunctionNode, with_outgoing='out_struc', tag='calc_fun')
qb.append(_orm.StructureData, with_outgoing='calc_fun', filters=input_structure_filters)
return qb.all(flat=True)
| oad_or_rescale_structures( |
__init__.py | # Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
''' NeuroM, lightweight and fast
Examples:
Obtain some morphometrics
>>> ap_seg_len = fst.get('segment_lengths', nrn, neurite_type=neurom.APICAL_DENDRITE)
>>> ax_sec_len = fst.get('section_lengths', nrn, neurite_type=neurom.AXON)
'''
import numpy as _np
from . import _neuritefunc as _nrt
from . import _neuronfunc as _nrn
from ..core import NeuriteType as _ntype
from ..core import iter_neurites as _ineurites
from ..core.types import tree_type_checker as _is_type
from ..exceptions import NeuroMError
from ._core import FstNeuron
NEURITEFEATURES = {
'total_length': _nrt.total_length,
'total_length_per_neurite': _nrt.total_length_per_neurite,
'neurite_lengths': _nrt.total_length_per_neurite,
'terminal_path_lengths_per_neurite': _nrt.terminal_path_lengths_per_neurite,
'section_lengths': _nrt.section_lengths,
'section_term_lengths': _nrt.section_term_lengths,
'section_bif_lengths': _nrt.section_bif_lengths,
'neurite_volumes': _nrt.total_volume_per_neurite,
'neurite_volume_density': _nrt.neurite_volume_density,
'section_volumes': _nrt.section_volumes,
'section_areas': _nrt.section_areas,
'section_tortuosity': _nrt.section_tortuosity,
'section_path_distances': _nrt.section_path_lengths,
'number_of_sections': _nrt.number_of_sections,
'number_of_sections_per_neurite': _nrt.number_of_sections_per_neurite,
'number_of_neurites': _nrt.number_of_neurites,
'number_of_bifurcations': _nrt.number_of_bifurcations,
'number_of_forking_points': _nrt.number_of_forking_points,
'number_of_terminations': _nrt.number_of_terminations,
'section_branch_orders': _nrt.section_branch_orders,
'section_term_branch_orders': _nrt.section_term_branch_orders,
'section_bif_branch_orders': _nrt.section_bif_branch_orders,
'section_radial_distances': _nrt.section_radial_distances,
'section_bif_radial_distances': _nrt.section_bif_radial_distances,
'section_term_radial_distances': _nrt.section_term_radial_distances,
'section_end_distances': _nrt.section_end_distances,
'section_strahler_orders': _nrt.section_strahler_orders,
'local_bifurcation_angles': _nrt.local_bifurcation_angles,
'remote_bifurcation_angles': _nrt.remote_bifurcation_angles,
'partition': _nrt.bifurcation_partitions,
'partition_asymmetry': _nrt.partition_asymmetries,
'partition_pairs': _nrt.partition_pairs,
'number_of_segments': _nrt.number_of_segments,
'segment_lengths': _nrt.segment_lengths,
'segment_volumes': _nrt.segment_volumes,
'segment_radii': _nrt.segment_radii,
'segment_midpoints': _nrt.segment_midpoints,
'segment_taper_rates': _nrt.segment_taper_rates,
'segment_radial_distances': _nrt.segment_radial_distances,
'segment_meander_angles': _nrt.segment_meander_angles,
'principal_direction_extents': _nrt.principal_direction_extents,
'total_area_per_neurite': _nrt.total_area_per_neurite,
}
NEURONFEATURES = {
'soma_radii': _nrn.soma_radii,
'soma_surface_areas': _nrn.soma_surface_areas,
'trunk_origin_radii': _nrn.trunk_origin_radii,
'trunk_origin_azimuths': _nrn.trunk_origin_azimuths,
'trunk_origin_elevations': _nrn.trunk_origin_elevations,
'trunk_section_lengths': _nrn.trunk_section_lengths,
'trunk_angles': _nrn.trunk_angles,
'trunk_vectors': _nrn.trunk_vectors,
'sholl_frequency': _nrn.sholl_frequency,
}
def register_neurite_feature(name, func):
'''Register a feature to be applied to neurites
Parameters:
name: name of the feature, used for access via get() function.
func: single parameter function of a neurite.
'''
if name in NEURITEFEATURES:
raise NeuroMError('Attempt to hide registered feature %s' % name)
def _fun(neurites, neurite_type=_ntype.all):
'''Wrap neurite function from outer scope and map into list'''
return list(func(n) for n in _ineurites(neurites, filt=_is_type(neurite_type)))
NEURONFEATURES[name] = _fun
def get(feature, obj, **kwargs):
'''Obtain a feature from a set of morphology objects
Parameters:
feature(string): feature to extract
obj: a neuron, population or neurite tree
**kwargs: parameters to forward to underlying worker functions
Returns:
features as a 1D or 2D numpy array.
'''
feature = (NEURITEFEATURES[feature] if feature in NEURITEFEATURES
else NEURONFEATURES[feature])
return _np.array(list(feature(obj, **kwargs)))
_INDENT = ' ' * 4
def _indent(string, count):
'''indent `string` by `count` * INDENT'''
indent = _INDENT * count
ret = indent + string.replace('\n', '\n' + indent)
return ret.rstrip()
def _get_doc():
|
get.__doc__ += _indent('\nFeatures:\n', 1) + _indent(_get_doc(), 2) # pylint: disable=no-member
| '''Get a description of all the known available features'''
def get_docstring(func):
'''extract doctstring, if possible'''
docstring = ':\n'
if func.__doc__:
docstring += _indent(func.__doc__, 2)
return docstring
ret = ['\nNeurite features (neurite, neuron, neuron population):']
ret.extend(_INDENT + '- ' + feature + get_docstring(func)
for feature, func in sorted(NEURITEFEATURES.items()))
ret.append('\nNeuron features (neuron, neuron population):')
ret.extend(_INDENT + '- ' + feature + get_docstring(func)
for feature, func in sorted(NEURONFEATURES.items()))
return '\n'.join(ret) |
integration_test.go | package test
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"math/rand"
"net/url"
"os"
"path"
"path/filepath"
"regexp"
"runtime"
"sort"
"strconv"
"strings"
"testing"
"time"
"cloud.google.com/go/storage"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/dynamodb"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/sts"
terraws "github.com/gruntwork-io/terratest/modules/aws"
"github.com/gruntwork-io/terratest/modules/git"
"github.com/hashicorp/go-multierror"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"google.golang.org/api/iterator"
"github.com/gruntwork-io/terragrunt/aws_helper"
"github.com/gruntwork-io/terragrunt/cli"
"github.com/gruntwork-io/terragrunt/codegen"
"github.com/gruntwork-io/terragrunt/config"
terragruntDynamoDb "github.com/gruntwork-io/terragrunt/dynamodb"
"github.com/gruntwork-io/terragrunt/errors"
"github.com/gruntwork-io/terragrunt/options"
"github.com/gruntwork-io/terragrunt/remote"
"github.com/gruntwork-io/terragrunt/shell"
"github.com/gruntwork-io/terragrunt/util"
)
// hard-code this to match the test fixture for now
const (
TERRAFORM_REMOTE_STATE_S3_REGION = "us-west-2"
TERRAFORM_REMOTE_STATE_GCP_REGION = "eu"
TEST_FIXTURE_PATH = "fixture/"
TEST_FIXTURE_CODEGEN_PATH = "fixture-codegen"
TEST_FIXTURE_GCS_PATH = "fixture-gcs/"
TEST_FIXTURE_GCS_BYO_BUCKET_PATH = "fixture-gcs-byo-bucket/"
TEST_FIXTURE_STACK = "fixture-stack/"
TEST_FIXTURE_GRAPH_DEPENDENCIES = "fixture-graph-dependencies"
TEST_FIXTURE_OUTPUT_ALL = "fixture-output-all"
TEST_FIXTURE_STDOUT = "fixture-download/stdout-test"
TEST_FIXTURE_EXTRA_ARGS_PATH = "fixture-extra-args/"
TEST_FIXTURE_ENV_VARS_BLOCK_PATH = "fixture-env-vars-block/"
TEST_FIXTURE_SKIP = "fixture-skip/"
TEST_FIXTURE_CONFIG_SINGLE_JSON_PATH = "fixture-config-files/single-json-config"
TEST_FIXTURE_PREVENT_DESTROY_OVERRIDE = "fixture-prevent-destroy-override/child"
TEST_FIXTURE_PREVENT_DESTROY_NOT_SET = "fixture-prevent-destroy-not-set/child"
TEST_FIXTURE_LOCAL_PREVENT_DESTROY = "fixture-download/local-with-prevent-destroy"
TEST_FIXTURE_LOCAL_PREVENT_DESTROY_DEPENDENCIES = "fixture-download/local-with-prevent-destroy-dependencies"
TEST_FIXTURE_LOCAL_INCLUDE_PREVENT_DESTROY_DEPENDENCIES = "fixture-download/local-include-with-prevent-destroy-dependencies"
TEST_FIXTURE_NOT_EXISTING_SOURCE = "fixture-download/invalid-path"
TEST_FIXTURE_EXTERNAL_DEPENDENCIE = "fixture-external-dependencies"
TEST_FIXTURE_MISSING_DEPENDENCIE = "fixture-missing-dependencies/main"
TEST_FIXTURE_GET_OUTPUT = "fixture-get-output"
TEST_FIXTURE_HOOKS_BEFORE_ONLY_PATH = "fixture-hooks/before-only"
TEST_FIXTURE_HOOKS_ALL_PATH = "fixture-hooks/all"
TEST_FIXTURE_HOOKS_AFTER_ONLY_PATH = "fixture-hooks/after-only"
TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_PATH = "fixture-hooks/before-and-after"
TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_MERGE_PATH = "fixture-hooks/before-and-after-merge"
TEST_FIXTURE_HOOKS_SKIP_ON_ERROR_PATH = "fixture-hooks/skip-on-error"
TEST_FIXTURE_HOOKS_ONE_ARG_ACTION_PATH = "fixture-hooks/one-arg-action"
TEST_FIXTURE_HOOKS_EMPTY_STRING_COMMAND_PATH = "fixture-hooks/bad-arg-action/empty-string-command"
TEST_FIXTURE_HOOKS_EMPTY_COMMAND_LIST_PATH = "fixture-hooks/bad-arg-action/empty-command-list"
TEST_FIXTURE_HOOKS_INTERPOLATIONS_PATH = "fixture-hooks/interpolations"
TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_NO_BACKEND = "fixture-hooks/init-once/no-source-no-backend"
TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_WITH_BACKEND = "fixture-hooks/init-once/no-source-with-backend"
TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_NO_BACKEND = "fixture-hooks/init-once/with-source-no-backend"
TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_WITH_BACKEND = "fixture-hooks/init-once/with-source-with-backend"
TEST_FIXTURE_FAILED_TERRAFORM = "fixture-failure"
TEST_FIXTURE_EXIT_CODE = "fixture-exit-code"
TEST_FIXTURE_AUTO_RETRY_RERUN = "fixture-auto-retry/re-run"
TEST_FIXTURE_AUTO_RETRY_EXHAUST = "fixture-auto-retry/exhaust"
TEST_FIXTURE_AUTO_RETRY_CUSTOM_ERRORS = "fixture-auto-retry/custom-errors"
TEST_FIXTURE_AUTO_RETRY_CUSTOM_ERRORS_NOT_SET = "fixture-auto-retry/custom-errors-not-set"
TEST_FIXTURE_AUTO_RETRY_APPLY_ALL_RETRIES = "fixture-auto-retry/apply-all"
TEST_FIXTURE_AUTO_RETRY_CONFIGURABLE_RETRIES = "fixture-auto-retry/configurable-retries"
TEST_FIXTURE_AUTO_RETRY_CONFIGURABLE_RETRIES_ERROR_1 = "fixture-auto-retry/configurable-retries-incorrect-retry-attempts"
TEST_FIXTURE_AUTO_RETRY_CONFIGURABLE_RETRIES_ERROR_2 = "fixture-auto-retry/configurable-retries-incorrect-sleep-interval"
TEST_FIXTURE_AWS_PROVIDER_PATCH = "fixture-aws-provider-patch"
TEST_FIXTURE_INPUTS = "fixture-inputs"
TEST_FIXTURE_LOCALS_ERROR_UNDEFINED_LOCAL = "fixture-locals-errors/undefined-local"
TEST_FIXTURE_LOCALS_ERROR_UNDEFINED_LOCAL_BUT_INPUT = "fixture-locals-errors/undefined-local-but-input"
TEST_FIXTURE_LOCALS_CANONICAL = "fixture-locals/canonical"
TEST_FIXTURE_LOCALS_IN_INCLUDE = "fixture-locals/local-in-include"
TEST_FIXTURE_LOCAL_RUN_ONCE = "fixture-locals/run-once"
TEST_FIXTURE_LOCAL_RUN_MULTIPLE = "fixture-locals/run-multiple"
TEST_FIXTURE_LOCALS_IN_INCLUDE_CHILD_REL_PATH = "qa/my-app"
TEST_FIXTURE_READ_CONFIG = "fixture-read-config"
TEST_FIXTURE_READ_IAM_ROLE = "fixture-read-config/iam_role_in_file"
TEST_FIXTURE_IAM_ROLES_MULTIPLE_MODULES = "fixture-read-config/iam_roles_multiple_modules"
TEST_FIXTURE_RELATIVE_INCLUDE_CMD = "fixture-relative-include-cmd"
TEST_FIXTURE_AWS_GET_CALLER_IDENTITY = "fixture-get-aws-caller-identity"
TEST_FIXTURE_GET_PLATFORM = "fixture-get-platform"
TEST_FIXTURE_GET_TERRAGRUNT_SOURCE_HCL = "fixture-get-terragrunt-source-hcl"
TEST_FIXTURE_GET_TERRAGRUNT_SOURCE_CLI = "fixture-get-terragrunt-source-cli"
TEST_FIXTURE_REGRESSIONS = "fixture-regressions"
TEST_FIXTURE_PLANFILE_ORDER = "fixture-planfile-order-test"
TEST_FIXTURE_DIRS_PATH = "fixture-dirs"
TEST_FIXTURE_PARALLELISM = "fixture-parallelism"
TEST_FIXTURE_SOPS = "fixture-sops"
TEST_FIXTURE_DESTROY_WARNING = "fixture-destroy-warning"
TEST_FIXTURE_INCLUDE_PARENT = "fixture-include-parent"
TERRAFORM_BINARY = "terraform"
TERRAFORM_FOLDER = ".terraform"
TERRAFORM_STATE = "terraform.tfstate"
TERRAFORM_STATE_BACKUP = "terraform.tfstate.backup"
TERRAGRUNT_CACHE = ".terragrunt-cache"
qaMyAppRelPath = "qa/my-app"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
func TestTerragruntInitHookNoSourceNoBackend(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_NO_BACKEND)
tmpEnvPath := copyEnvironment(t, "fixture-hooks/init-once")
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_NO_BACKEND)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
output := stderr.String()
if err != nil {
t.Errorf("Did not expect to get error: %s", err.Error())
}
assert.Equal(t, 1, strings.Count(output, "AFTER_INIT_ONLY_ONCE"), "Hooks on init command executed more than once")
// With no source, `init-from-module` should not execute
assert.NotContains(t, output, "AFTER_INIT_FROM_MODULE_ONLY_ONCE", "Hooks on init-from-module command executed when no source was specified")
}
func TestTerragruntInitHookNoSourceWithBackend(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_WITH_BACKEND)
tmpEnvPath := copyEnvironment(t, "fixture-hooks/init-once")
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_WITH_BACKEND)
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
rootTerragruntConfigPath := util.JoinPath(rootPath, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", TERRAFORM_REMOTE_STATE_S3_REGION)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
output := stderr.String()
if err != nil {
t.Errorf("Did not expect to get error: %s", err.Error())
}
assert.Equal(t, 1, strings.Count(output, "AFTER_INIT_ONLY_ONCE"), "Hooks on init command executed more than once")
// With no source, `init-from-module` should not execute
assert.NotContains(t, output, "AFTER_INIT_FROM_MODULE_ONLY_ONCE", "Hooks on init-from-module command executed when no source was specified")
}
func TestTerragruntInitHookWithSourceNoBackend(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_NO_BACKEND)
tmpEnvPath := copyEnvironment(t, "fixture-hooks/init-once")
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_NO_BACKEND)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s --terragrunt-log-level debug", rootPath), &stdout, &stderr)
logBufferContentsLineByLine(t, stdout, "apply stdout")
logBufferContentsLineByLine(t, stderr, "apply stderr")
output := stderr.String()
if err != nil {
t.Errorf("Did not expect to get error: %s", err.Error())
}
assert.Equal(t, 1, strings.Count(output, "AFTER_INIT_ONLY_ONCE\n"), "Hooks on init command executed more than once")
assert.Equal(t, 1, strings.Count(output, "AFTER_INIT_FROM_MODULE_ONLY_ONCE\n"), "Hooks on init-from-module command executed more than once")
}
func TestTerragruntInitHookWithSourceWithBackend(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_WITH_BACKEND)
tmpEnvPath := copyEnvironment(t, "fixture-hooks/init-once")
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_WITH_BACKEND)
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
rootTerragruntConfigPath := util.JoinPath(rootPath, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", TERRAFORM_REMOTE_STATE_S3_REGION)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
output := stderr.String()
if err != nil {
t.Errorf("Did not expect to get error: %s", err.Error())
}
// `init` hook should execute only once
assert.Equal(t, 1, strings.Count(output, "AFTER_INIT_ONLY_ONCE"), "Hooks on init command executed more than once")
// `init-from-module` hook should execute only once
assert.Equal(t, 1, strings.Count(output, "AFTER_INIT_FROM_MODULE_ONLY_ONCE"), "Hooks on init-from-module command executed more than once")
}
func TestTerragruntHookRunAllApply(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_ALL_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_ALL_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_ALL_PATH)
beforeOnlyPath := util.JoinPath(rootPath, "before-only")
afterOnlyPath := util.JoinPath(rootPath, "after-only")
runTerragrunt(t, fmt.Sprintf("terragrunt run-all apply -auto-approve --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
_, beforeErr := ioutil.ReadFile(beforeOnlyPath + "/file.out")
assert.NoError(t, beforeErr)
_, afterErr := ioutil.ReadFile(afterOnlyPath + "/file.out")
assert.NoError(t, afterErr)
}
func TestTerragruntHookApplyAll(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_ALL_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_ALL_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_ALL_PATH)
beforeOnlyPath := util.JoinPath(rootPath, "before-only")
afterOnlyPath := util.JoinPath(rootPath, "after-only")
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all -auto-approve --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
_, beforeErr := ioutil.ReadFile(beforeOnlyPath + "/file.out")
assert.NoError(t, beforeErr)
_, afterErr := ioutil.ReadFile(afterOnlyPath + "/file.out")
assert.NoError(t, afterErr)
}
func TestTerragruntBeforeHook(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_BEFORE_ONLY_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_BEFORE_ONLY_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_BEFORE_ONLY_PATH)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
_, exception := ioutil.ReadFile(rootPath + "/file.out")
assert.NoError(t, exception)
}
func TestTerragruntHookWorkingDir(t *testing.T) {
t.Parallel()
fixturePath := "fixture-hooks/working_dir"
cleanupTerraformFolder(t, fixturePath)
tmpEnvPath := copyEnvironment(t, fixturePath)
rootPath := util.JoinPath(tmpEnvPath, fixturePath)
runTerragrunt(t, fmt.Sprintf("terragrunt validate --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
}
func TestTerragruntAfterHook(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_AFTER_ONLY_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_AFTER_ONLY_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_AFTER_ONLY_PATH)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
_, exception := ioutil.ReadFile(rootPath + "/file.out")
assert.NoError(t, exception)
}
func TestTerragruntBeforeAndAfterHook(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_PATH)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
_, beforeException := ioutil.ReadFile(rootPath + "/before.out")
_, afterException := ioutil.ReadFile(rootPath + "/after.out")
output := stderr.String()
if err != nil {
t.Errorf("Did not expect to get error: %s", err.Error())
}
assert.Equal(t, 0, strings.Count(output, "BEFORE_TERRAGRUNT_READ_CONFIG"), "terragrunt-read-config before_hook should not be triggered")
t.Logf("output: %s", output)
assert.Equal(t, 1, strings.Count(output, "AFTER_TERRAGRUNT_READ_CONFIG"), "Hooks on terragrunt-read-config command executed more than once")
assert.NoError(t, beforeException)
assert.NoError(t, afterException)
}
func TestTerragruntBeforeAndAfterMergeHook(t *testing.T) {
t.Parallel()
childPath := util.JoinPath(TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_MERGE_PATH, qaMyAppRelPath)
cleanupTerraformFolder(t, childPath)
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
t.Logf("bucketName: %s", s3BucketName)
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
tmpTerragruntConfigPath := createTmpTerragruntConfigWithParentAndChild(t, TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_MERGE_PATH, qaMyAppRelPath, s3BucketName, config.DefaultTerragruntConfigPath, config.DefaultTerragruntConfigPath)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntConfigPath, childPath))
_, beforeException := ioutil.ReadFile(childPath + "/before.out")
_, beforeChildException := ioutil.ReadFile(childPath + "/before-child.out")
_, beforeOverriddenParentException := ioutil.ReadFile(childPath + "/before-parent.out")
_, afterException := ioutil.ReadFile(childPath + "/after.out")
_, afterParentException := ioutil.ReadFile(childPath + "/after-parent.out")
assert.NoError(t, beforeException)
assert.NoError(t, beforeChildException)
assert.NoError(t, afterException)
assert.NoError(t, afterParentException)
// PathError because no file found
assert.Error(t, beforeOverriddenParentException)
}
func TestTerragruntSkipOnError(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_SKIP_ON_ERROR_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_SKIP_ON_ERROR_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_SKIP_ON_ERROR_PATH)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
output := stderr.String()
if err != nil {
assert.Contains(t, output, "BEFORE_SHOULD_DISPLAY")
assert.NotContains(t, output, "BEFORE_NODISPLAY")
assert.Contains(t, output, "AFTER_SHOULD_DISPLAY")
assert.NotContains(t, output, "AFTER_NODISPLAY")
} else {
t.Error("Expected NO terragrunt execution due to previous errors but it did run.")
}
}
func TestTerragruntBeforeOneArgAction(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_ONE_ARG_ACTION_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_ONE_ARG_ACTION_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_ONE_ARG_ACTION_PATH)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s --terragrunt-log-level debug", rootPath), &stdout, &stderr)
output := stderr.String()
if err != nil {
t.Error("Expected successful execution of terragrunt with 1 before hook execution.")
} else {
assert.Contains(t, output, "Running command: date")
}
}
func TestTerragruntEmptyStringCommandHook(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_EMPTY_STRING_COMMAND_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_EMPTY_STRING_COMMAND_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_EMPTY_STRING_COMMAND_PATH)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
if err != nil {
assert.Contains(t, err.Error(), "Need at least one non-empty argument in 'execute'.")
} else {
t.Error("Expected an Error with message: 'Need at least one argument'")
}
}
func TestTerragruntEmptyCommandListHook(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_EMPTY_COMMAND_LIST_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_EMPTY_COMMAND_LIST_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_EMPTY_COMMAND_LIST_PATH)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
if err != nil {
assert.Contains(t, err.Error(), "Need at least one non-empty argument in 'execute'.")
} else {
t.Error("Expected an Error with message: 'Need at least one argument'")
}
}
func TestTerragruntHookInterpolation(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_INTERPOLATIONS_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_INTERPOLATIONS_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_INTERPOLATIONS_PATH)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
erroutput := stderr.String()
homePath := os.Getenv("HOME")
if homePath == "" {
homePath = "HelloWorld"
}
if err != nil {
t.Errorf("Did not expect to get error: %s", err.Error())
}
assert.Contains(t, erroutput, homePath)
}
func TestTerragruntWorksWithLocalTerraformVersion(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_PATH)
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
lockTableName := fmt.Sprintf("terragrunt-test-locks-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
defer cleanupTableForTest(t, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
tmpTerragruntConfigPath := createTmpTerragruntConfig(t, TEST_FIXTURE_PATH, s3BucketName, lockTableName, config.DefaultTerragruntConfigPath)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntConfigPath, TEST_FIXTURE_PATH))
var expectedS3Tags = map[string]string{
"owner": "terragrunt integration test",
"name": "Terraform state storage"}
validateS3BucketExistsAndIsTagged(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName, expectedS3Tags)
var expectedDynamoDBTableTags = map[string]string{
"owner": "terragrunt integration test",
"name": "Terraform lock table"}
validateDynamoDBTableExistsAndIsTagged(t, TERRAFORM_REMOTE_STATE_S3_REGION, lockTableName, expectedDynamoDBTableTags)
}
// Regression test to ensure that `accesslogging_bucket_name` and `accesslogging_target_prefix` are taken into account
// & the TargetLogs bucket is set to a new S3 bucket, different from the origin S3 bucket
// & the logs objects are prefixed with the `accesslogging_target_prefix` value
func TestTerragruntSetsAccessLoggingForTfSTateS3BuckeToADifferentBucketWithGivenTargetPrefix(t *testing.T) {
t.Parallel()
examplePath := filepath.Join(TEST_FIXTURE_REGRESSIONS, "accesslogging-bucket/with-target-prefix-input")
cleanupTerraformFolder(t, examplePath)
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
s3BucketLogsName := fmt.Sprintf("%s-tf-state-logs", s3BucketName)
s3BucketLogsTargetPrefix := "logs/"
lockTableName := fmt.Sprintf("terragrunt-test-locks-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
defer cleanupTableForTest(t, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
tmpTerragruntConfigPath := createTmpTerragruntConfig(
t,
examplePath,
s3BucketName,
lockTableName,
"remote_terragrunt.hcl",
)
runTerragrunt(t, fmt.Sprintf("terragrunt validate --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntConfigPath, examplePath))
targetLoggingBucket := terraws.GetS3BucketLoggingTarget(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
targetLoggingBucketPrefix := terraws.GetS3BucketLoggingTargetPrefix(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
assert.Equal(t, s3BucketLogsName, targetLoggingBucket)
assert.Equal(t, s3BucketLogsTargetPrefix, targetLoggingBucketPrefix)
}
// Regression test to ensure that `accesslogging_bucket_name` is taken into account
// & when no `accesslogging_target_prefix` provided, then **default** value is used for TargetPrefix
func TestTerragruntSetsAccessLoggingForTfSTateS3BuckeToADifferentBucketWithDefaultTargetPrefix(t *testing.T) {
t.Parallel()
examplePath := filepath.Join(TEST_FIXTURE_REGRESSIONS, "accesslogging-bucket/no-target-prefix-input")
cleanupTerraformFolder(t, examplePath)
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
s3BucketLogsName := fmt.Sprintf("%s-tf-state-logs", s3BucketName)
lockTableName := fmt.Sprintf("terragrunt-test-locks-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
defer cleanupTableForTest(t, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
tmpTerragruntConfigPath := createTmpTerragruntConfig(
t,
examplePath,
s3BucketName,
lockTableName,
"remote_terragrunt.hcl",
)
runTerragrunt(t, fmt.Sprintf("terragrunt validate --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntConfigPath, examplePath))
targetLoggingBucket := terraws.GetS3BucketLoggingTarget(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
targetLoggingBucketPrefix := terraws.GetS3BucketLoggingTargetPrefix(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
assert.Equal(t, s3BucketLogsName, targetLoggingBucket)
assert.Equal(t, remote.DefaultS3BucketAccessLoggingTargetPrefix, targetLoggingBucketPrefix)
}
func TestTerragruntWorksWithGCSBackend(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GCS_PATH)
// We need a project to create the bucket in, so we pull one from the recommended environment variable.
project := os.Getenv("GOOGLE_CLOUD_PROJECT")
gcsBucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
defer deleteGCSBucket(t, gcsBucketName)
tmpTerragruntGCSConfigPath := createTmpTerragruntGCSConfig(t, TEST_FIXTURE_GCS_PATH, project, TERRAFORM_REMOTE_STATE_GCP_REGION, gcsBucketName, config.DefaultTerragruntConfigPath)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntGCSConfigPath, TEST_FIXTURE_GCS_PATH))
var expectedGCSLabels = map[string]string{
"owner": "terragrunt_test",
"name": "terraform_state_storage"}
validateGCSBucketExistsAndIsLabeled(t, TERRAFORM_REMOTE_STATE_GCP_REGION, gcsBucketName, expectedGCSLabels)
}
func TestTerragruntWorksWithExistingGCSBucket(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GCS_BYO_BUCKET_PATH)
// We need a project to create the bucket in, so we pull one from the recommended environment variable.
project := os.Getenv("GOOGLE_CLOUD_PROJECT")
gcsBucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
defer deleteGCSBucket(t, gcsBucketName)
// manually create the GCS bucket outside the US (default) to test Terragrunt works correctly with an existing bucket.
location := TERRAFORM_REMOTE_STATE_GCP_REGION
createGCSBucket(t, project, location, gcsBucketName)
tmpTerragruntGCSConfigPath := createTmpTerragruntGCSConfig(t, TEST_FIXTURE_GCS_BYO_BUCKET_PATH, project, TERRAFORM_REMOTE_STATE_GCP_REGION, gcsBucketName, config.DefaultTerragruntConfigPath)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntGCSConfigPath, TEST_FIXTURE_GCS_BYO_BUCKET_PATH))
validateGCSBucketExistsAndIsLabeled(t, location, gcsBucketName, nil)
}
func TestTerragruntWorksWithSingleJsonConfig(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_CONFIG_SINGLE_JSON_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_CONFIG_SINGLE_JSON_PATH)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_CONFIG_SINGLE_JSON_PATH)
runTerragrunt(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", rootTerragruntConfigPath))
}
func TestTerragruntReportsTerraformErrorsWithPlanAll(t *testing.T) {
cleanupTerraformFolder(t, TEST_FIXTURE_FAILED_TERRAFORM)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_FAILED_TERRAFORM)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, "fixture-failure")
cmd := fmt.Sprintf("terragrunt plan-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootTerragruntConfigPath)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
// Call runTerragruntCommand directly because this command contains failures (which causes runTerragruntRedirectOutput to abort) but we don't care.
if err := runTerragruntCommand(t, cmd, &stdout, &stderr); err == nil {
t.Fatalf("Failed to properly fail command: %v. The terraform should be bad", cmd)
}
output := stdout.String()
errOutput := stderr.String()
fmt.Printf("STDERR is %s.\n STDOUT is %s", errOutput, output)
assert.True(t, strings.Contains(errOutput, "missingvar1") || strings.Contains(output, "missingvar1"))
assert.True(t, strings.Contains(errOutput, "missingvar2") || strings.Contains(output, "missingvar2"))
}
func TestTerragruntGraphDependenciesCommand(t *testing.T) {
t.Parallel()
// this test doesn't even run plan, it exits right after the stack was created
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GRAPH_DEPENDENCIES)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GRAPH_DEPENDENCIES, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
environmentPath := fmt.Sprintf("%s/%s/root", tmpEnvPath, TEST_FIXTURE_GRAPH_DEPENDENCIES)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
runTerragruntRedirectOutput(t, fmt.Sprintf("terragrunt graph-dependencies --terragrunt-working-dir %s", environmentPath), &stdout, &stderr)
output := stdout.String()
assert.True(t, strings.Contains(output, strings.TrimSpace(`
digraph {
"backend-app" ;
"backend-app" -> "mysql";
"backend-app" -> "redis";
"backend-app" -> "vpc";
"frontend-app" ;
"frontend-app" -> "backend-app";
"frontend-app" -> "vpc";
"mysql" ;
"mysql" -> "vpc";
"redis" ;
"redis" -> "vpc";
"vpc" ;
}
`)))
}
func TestTerragruntRunAllCommand(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_OUTPUT_ALL)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
environmentPath := fmt.Sprintf("%s/%s/env1", tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL)
runTerragrunt(t, fmt.Sprintf("terragrunt run-all init --terragrunt-non-interactive --terragrunt-working-dir %s", environmentPath))
}
func TestTerragruntOutputAllCommand(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_OUTPUT_ALL)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
environmentPath := fmt.Sprintf("%s/%s/env1", tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL)
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", environmentPath))
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
runTerragruntRedirectOutput(t, fmt.Sprintf("terragrunt output-all --terragrunt-non-interactive --terragrunt-working-dir %s", environmentPath), &stdout, &stderr)
output := stdout.String()
assert.True(t, strings.Contains(output, "app1 output"))
assert.True(t, strings.Contains(output, "app2 output"))
assert.True(t, strings.Contains(output, "app3 output"))
assert.True(t, (strings.Index(output, "app3 output") < strings.Index(output, "app1 output")) && (strings.Index(output, "app1 output") < strings.Index(output, "app2 output")))
}
func TestTerragruntValidateAllCommand(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_OUTPUT_ALL)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
environmentPath := fmt.Sprintf("%s/%s/env1", tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL)
runTerragrunt(t, fmt.Sprintf("terragrunt validate-all --terragrunt-non-interactive --terragrunt-working-dir %s", environmentPath))
}
// Check that Terragrunt does not pollute stdout with anything
func TestTerragruntStdOut(t *testing.T) {
t.Parallel()
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_STDOUT))
runTerragruntRedirectOutput(t, fmt.Sprintf("terragrunt output foo --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_STDOUT), &stdout, &stderr)
output := stdout.String()
assert.Equal(t, "\"foo\"\n", output)
}
func TestTerragruntOutputAllCommandSpecificVariableIgnoreDependencyErrors(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_OUTPUT_ALL)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
environmentPath := fmt.Sprintf("%s/%s/env1", tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL)
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", environmentPath))
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
// Call runTerragruntCommand directly because this command contains failures (which causes runTerragruntRedirectOutput to abort) but we don't care.
runTerragruntCommand(t, fmt.Sprintf("terragrunt output-all app2_text --terragrunt-ignore-dependency-errors --terragrunt-non-interactive --terragrunt-working-dir %s", environmentPath), &stdout, &stderr)
output := stdout.String()
logBufferContentsLineByLine(t, stdout, "output-all stdout")
logBufferContentsLineByLine(t, stderr, "output-all stderr")
// Without --terragrunt-ignore-dependency-errors, app2 never runs because its dependencies have "errors" since they don't have the output "app2_text".
assert.True(t, strings.Contains(output, "app2 output"))
}
func testRemoteFixtureParallelism(t *testing.T, parallelism int, numberOfModules int, timeToDeployEachModule time.Duration) (string, int, error) {
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
// folders inside the fixture
//fixtureTemplate := path.Join(TEST_FIXTURE_PARALLELISM, "template")
//fixtureApp := path.Join(TEST_FIXTURE_PARALLELISM, "app")
// copy the template `numberOfModules` times into the app
tmpEnvPath, err := ioutil.TempDir("", "terragrunt-test")
if err != nil {
t.Fatalf("Failed to create temp dir due to error: %v", err)
}
for i := 0; i < numberOfModules; i++ {
err := util.CopyFolderContents(TEST_FIXTURE_PARALLELISM, tmpEnvPath, ".terragrunt-test", nil)
if err != nil {
return "", 0, err
}
err = os.Rename(
path.Join(tmpEnvPath, "template"),
path.Join(tmpEnvPath, "app"+strconv.Itoa(i)))
if err != nil {
return "", 0, err
}
}
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
environmentPath := fmt.Sprintf("%s", tmpEnvPath)
// forces plugin download & initialization (no parallelism control)
runTerragrunt(t, fmt.Sprintf("terragrunt plan-all --terragrunt-non-interactive --terragrunt-working-dir %s -var sleep_seconds=%d", environmentPath, timeToDeployEachModule/time.Second))
// apply all with parallelism set
// NOTE: we can't run just apply-all and not plan-all because the time to initialize the plugins skews the results of the test
testStart := int(time.Now().Unix())
t.Logf("apply-all start time = %d, %s", testStart, time.Now().Format(time.RFC3339))
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-parallelism %d --terragrunt-non-interactive --terragrunt-working-dir %s -var sleep_seconds=%d", parallelism, environmentPath, timeToDeployEachModule/time.Second))
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
// Call runTerragruntCommand directly because this command contains failures (which causes runTerragruntRedirectOutput to abort) but we don't care.
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt output-all --terragrunt-non-interactive --terragrunt-working-dir %s", environmentPath), &stdout, &stderr)
if err != nil {
return "", 0, err
}
return stdout.String(), testStart, nil
}
func TestTerragruntStackCommands(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
lockTableName := fmt.Sprintf("terragrunt-test-locks-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
defer cleanupTableForTest(t, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_STACK)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, "fixture-stack", config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, lockTableName, "not-used")
mgmtEnvironmentPath := fmt.Sprintf("%s/fixture-stack/mgmt", tmpEnvPath)
stageEnvironmentPath := fmt.Sprintf("%s/fixture-stack/stage", tmpEnvPath)
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", mgmtEnvironmentPath))
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", stageEnvironmentPath))
runTerragrunt(t, fmt.Sprintf("terragrunt output-all --terragrunt-non-interactive --terragrunt-working-dir %s", mgmtEnvironmentPath))
runTerragrunt(t, fmt.Sprintf("terragrunt output-all --terragrunt-non-interactive --terragrunt-working-dir %s", stageEnvironmentPath))
runTerragrunt(t, fmt.Sprintf("terragrunt destroy-all --terragrunt-non-interactive --terragrunt-working-dir %s", stageEnvironmentPath))
runTerragrunt(t, fmt.Sprintf("terragrunt destroy-all --terragrunt-non-interactive --terragrunt-working-dir %s", mgmtEnvironmentPath))
}
func TestTerragruntStackCommandsWithPlanFile(t *testing.T) {
t.Parallel()
disjointEnvironmentPath := "fixture-stack/disjoint"
cleanupTerraformFolder(t, disjointEnvironmentPath)
runTerragrunt(t, fmt.Sprintf("terragrunt plan-all -out=plan.tfplan --terragrunt-log-level info --terragrunt-non-interactive --terragrunt-working-dir %s", disjointEnvironmentPath))
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all plan.tfplan --terragrunt-log-level info --terragrunt-non-interactive --terragrunt-working-dir %s", disjointEnvironmentPath))
}
func TestInvalidSource(t *testing.T) {
t.Parallel()
generateTestCase := TEST_FIXTURE_NOT_EXISTING_SOURCE
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr)
require.Error(t, err)
_, ok := errors.Unwrap(err).(cli.WorkingDirNotFound)
assert.True(t, ok)
}
// Run terragrunt plan -detailed-exitcode on a folder with some uncreated resources and make sure that you get an exit
// code of "2", which means there are changes to apply.
func TestExitCode(t *testing.T) {
t.Parallel()
rootPath := copyEnvironment(t, TEST_FIXTURE_EXIT_CODE)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_EXIT_CODE)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan -detailed-exitcode --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), os.Stdout, os.Stderr)
exitCode, exitCodeErr := shell.GetExitCode(err)
assert.Nil(t, exitCodeErr)
assert.Equal(t, 2, exitCode)
}
func TestAutoRetryBasicRerun(t *testing.T) {
t.Parallel()
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_RERUN)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_RERUN)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.Nil(t, err)
assert.Contains(t, out.String(), "Apply complete!")
}
func TestAutoRetrySkip(t *testing.T) {
t.Parallel()
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_RERUN)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_RERUN)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-no-auto-retry --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.NotNil(t, err)
assert.NotContains(t, out.String(), "Apply complete!")
}
func TestPlanfileOrder(t *testing.T) {
t.Parallel()
rootPath := copyEnvironment(t, TEST_FIXTURE_PLANFILE_ORDER)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_PLANFILE_ORDER)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-working-dir %s", modulePath), os.Stdout, os.Stderr)
assert.Nil(t, err)
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-working-dir %s", modulePath), os.Stdout, os.Stderr)
assert.Nil(t, err)
}
func TestAutoRetryExhaustRetries(t *testing.T) {
t.Parallel()
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_EXHAUST)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_EXHAUST)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.NotNil(t, err)
assert.Contains(t, out.String(), "Failed to load backend")
assert.NotContains(t, out.String(), "Apply complete!")
}
func TestAutoRetryCustomRetryableErrors(t *testing.T) {
t.Parallel()
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_CUSTOM_ERRORS)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_CUSTOM_ERRORS)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply --auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.Nil(t, err)
assert.Contains(t, out.String(), "My own little error")
assert.Contains(t, out.String(), "Apply complete!")
}
func TestAutoRetryCustomRetryableErrorsFailsWhenRetryableErrorsNotSet(t *testing.T) {
t.Parallel()
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_CUSTOM_ERRORS_NOT_SET)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_CUSTOM_ERRORS_NOT_SET)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply --auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.NotNil(t, err)
assert.Contains(t, out.String(), "My own little error")
assert.NotContains(t, out.String(), "Apply complete!")
}
func TestAutoRetryFlagWithRecoverableError(t *testing.T) {
t.Parallel()
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_RERUN)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_RERUN)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-no-auto-retry --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.NotNil(t, err)
assert.NotContains(t, out.String(), "Apply complete!")
}
func TestAutoRetryEnvVarWithRecoverableError(t *testing.T) {
os.Setenv("TERRAGRUNT_AUTO_RETRY", "false")
defer os.Unsetenv("TERRAGRUNT_AUTO_RETRY")
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_RERUN)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_RERUN)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.NotNil(t, err)
assert.NotContains(t, out.String(), "Apply complete!")
}
func TestAutoRetryApplyAllDependentModuleRetries(t *testing.T) {
t.Parallel()
out := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_APPLY_ALL_RETRIES)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_APPLY_ALL_RETRIES)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), out, os.Stderr)
assert.Nil(t, err)
s := out.String()
assert.Contains(t, s, "app1 output")
assert.Contains(t, s, "app2 output")
assert.Contains(t, s, "app3 output")
assert.Contains(t, s, "Apply complete!")
}
func TestAutoRetryConfigurableRetries(t *testing.T) {
t.Parallel()
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AUTO_RETRY_CONFIGURABLE_RETRIES)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AUTO_RETRY_CONFIGURABLE_RETRIES)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), stdout, stderr)
sleeps := regexp.MustCompile("Sleeping 0s before retrying.").FindAllStringIndex(stderr.String(), -1)
assert.Nil(t, err)
assert.Equal(t, 4, len(sleeps)) // 5 retries, so 4 sleeps
assert.Contains(t, stdout.String(), "Apply complete!")
}
func TestAutoRetryConfigurableRetriesErrors(t *testing.T) {
t.Parallel()
testCases := []struct {
fixture string
errorMessage string
}{
{TEST_FIXTURE_AUTO_RETRY_CONFIGURABLE_RETRIES_ERROR_1, "Cannot have less than 1 max retry"},
{TEST_FIXTURE_AUTO_RETRY_CONFIGURABLE_RETRIES_ERROR_2, "Cannot sleep for less than 0 seconds"},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.fixture, func(t *testing.T) {
t.Parallel()
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
rootPath := copyEnvironment(t, tc.fixture)
modulePath := util.JoinPath(rootPath, tc.fixture)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), stdout, stderr)
assert.NotNil(t, err)
assert.NotContains(t, stdout.String(), "Apply complete!")
assert.Contains(t, err.Error(), tc.errorMessage)
})
}
}
func TestAwsProviderPatch(t *testing.T) {
t.Parallel()
stderr := new(bytes.Buffer)
rootPath := copyEnvironment(t, TEST_FIXTURE_AWS_PROVIDER_PATCH)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_AWS_PROVIDER_PATCH)
mainTFFile := filepath.Join(modulePath, "main.tf")
// fill in branch so we can test against updates to the test case file
mainContents, err := util.ReadFileAsString(mainTFFile)
require.NoError(t, err)
branchName := git.GetCurrentBranchName(t)
// https://www.terraform.io/docs/language/modules/sources.html#modules-in-package-sub-directories
// https://github.com/gruntwork-io/terragrunt/issues/1778
branchName = url.QueryEscape(branchName)
mainContents = strings.Replace(mainContents, "__BRANCH_NAME__", branchName, -1)
require.NoError(t, ioutil.WriteFile(mainTFFile, []byte(mainContents), 0444))
assert.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt aws-provider-patch --terragrunt-override-attr region=\"eu-west-1\" --terragrunt-override-attr allowed_account_ids=[\"00000000000\"] --terragrunt-working-dir %s --terragrunt-log-level debug", modulePath), os.Stdout, stderr),
)
t.Log(stderr.String())
assert.Regexp(t, "Patching AWS provider in .+test/fixture-aws-provider-patch/example-module/main.tf", stderr.String())
// Make sure the resulting terraform code is still valid
assert.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt validate --terragrunt-working-dir %s", modulePath), os.Stdout, os.Stderr),
)
}
// This tests terragrunt properly passes through terraform commands and any number of specified args
func TestTerraformCommandCliArgs(t *testing.T) {
t.Parallel()
testCases := []struct {
command []string
expected string
}{
{
[]string{"version"},
"terraform version",
},
{
[]string{"version", "foo"},
"terraform version foo",
},
{
[]string{"version", "foo", "bar", "baz"},
"terraform version foo bar baz",
},
{
[]string{"version", "foo", "bar", "baz", "foobar"},
"terraform version foo bar baz foobar",
},
}
for _, testCase := range testCases {
cmd := fmt.Sprintf("terragrunt %s --terragrunt-non-interactive --terragrunt-log-level debug --terragrunt-working-dir %s", strings.Join(testCase.command, " "), TEST_FIXTURE_EXTRA_ARGS_PATH)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
runTerragruntRedirectOutput(t, cmd, &stdout, &stderr)
output := stdout.String()
errOutput := stderr.String()
assert.True(t, strings.Contains(errOutput, testCase.expected) || strings.Contains(output, testCase.expected))
}
}
// This tests terragrunt properly passes through terraform commands with sub commands
// and any number of specified args
func TestTerraformSubcommandCliArgs(t *testing.T) {
t.Parallel()
testCases := []struct {
command []string
expected string
}{
{
[]string{"force-unlock"},
"terraform force-unlock",
},
{
[]string{"force-unlock", "foo"},
"terraform force-unlock foo",
},
{
[]string{"force-unlock", "foo", "bar", "baz"},
"terraform force-unlock foo bar baz",
},
{
[]string{"force-unlock", "foo", "bar", "baz", "foobar"},
"terraform force-unlock foo bar baz foobar",
},
}
for _, testCase := range testCases {
cmd := fmt.Sprintf("terragrunt %s --terragrunt-non-interactive --terragrunt-log-level debug --terragrunt-working-dir %s", strings.Join(testCase.command, " "), TEST_FIXTURE_EXTRA_ARGS_PATH)
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
// Call runTerragruntCommand directly because this command contains failures (which causes runTerragruntRedirectOutput to abort) but we don't care.
if err := runTerragruntCommand(t, cmd, &stdout, &stderr); err == nil {
t.Fatalf("Failed to properly fail command: %v.", cmd)
}
output := stdout.String()
errOutput := stderr.String()
assert.True(t, strings.Contains(errOutput, testCase.expected) || strings.Contains(output, testCase.expected))
}
}
func TestPreventDestroyOverride(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_PREVENT_DESTROY_OVERRIDE)
assert.NoError(t, runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-working-dir %s", TEST_FIXTURE_PREVENT_DESTROY_OVERRIDE), os.Stdout, os.Stderr))
assert.NoError(t, runTerragruntCommand(t, fmt.Sprintf("terragrunt destroy -auto-approve --terragrunt-working-dir %s", TEST_FIXTURE_PREVENT_DESTROY_OVERRIDE), os.Stdout, os.Stderr))
}
func TestPreventDestroyNotSet(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_PREVENT_DESTROY_NOT_SET)
assert.NoError(t, runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-working-dir %s", TEST_FIXTURE_PREVENT_DESTROY_NOT_SET), os.Stdout, os.Stderr))
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt destroy -auto-approve --terragrunt-working-dir %s", TEST_FIXTURE_PREVENT_DESTROY_NOT_SET), os.Stdout, os.Stderr)
if assert.Error(t, err) {
underlying := errors.Unwrap(err)
assert.IsType(t, cli.ModuleIsProtected{}, underlying)
}
}
func TestPreventDestroy(t *testing.T) {
t.Parallel()
tmpEnvPath := copyEnvironment(t, "fixture-download")
fixtureRoot := util.JoinPath(tmpEnvPath, TEST_FIXTURE_LOCAL_PREVENT_DESTROY)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", fixtureRoot))
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt destroy -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", fixtureRoot), os.Stdout, os.Stderr)
if assert.Error(t, err) {
underlying := errors.Unwrap(err)
assert.IsType(t, cli.ModuleIsProtected{}, underlying)
}
}
func TestPreventDestroyApply(t *testing.T) {
t.Parallel()
tmpEnvPath := copyEnvironment(t, "fixture-download")
fixtureRoot := util.JoinPath(tmpEnvPath, TEST_FIXTURE_LOCAL_PREVENT_DESTROY)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", fixtureRoot))
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -destroy -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", fixtureRoot), os.Stdout, os.Stderr)
if assert.Error(t, err) {
underlying := errors.Unwrap(err)
assert.IsType(t, cli.ModuleIsProtected{}, underlying)
}
}
func TestPreventDestroyDependencies(t *testing.T) {
t.Parallel()
// Populate module paths.
moduleNames := []string{
"module-a",
"module-b",
"module-c",
"module-d",
"module-e",
}
modulePaths := make(map[string]string, len(moduleNames))
for _, moduleName := range moduleNames {
modulePaths[moduleName] = util.JoinPath(TEST_FIXTURE_LOCAL_PREVENT_DESTROY_DEPENDENCIES, moduleName)
}
// Cleanup all modules directories.
cleanupTerraformFolder(t, TEST_FIXTURE_LOCAL_PREVENT_DESTROY_DEPENDENCIES)
for _, modulePath := range modulePaths {
cleanupTerraformFolder(t, modulePath)
}
var (
applyAllStdout bytes.Buffer
applyAllStderr bytes.Buffer
)
// Apply and destroy all modules.
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCAL_PREVENT_DESTROY_DEPENDENCIES), &applyAllStdout, &applyAllStderr)
logBufferContentsLineByLine(t, applyAllStdout, "apply-all stdout")
logBufferContentsLineByLine(t, applyAllStderr, "apply-all stderr")
if err != nil {
t.Fatalf("apply-all in TestPreventDestroyDependencies failed with error: %v. Full std", err)
}
var (
destroyAllStdout bytes.Buffer
destroyAllStderr bytes.Buffer
)
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt destroy-all --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCAL_PREVENT_DESTROY_DEPENDENCIES), &destroyAllStdout, &destroyAllStderr)
logBufferContentsLineByLine(t, destroyAllStdout, "destroy-all stdout")
logBufferContentsLineByLine(t, destroyAllStderr, "destroy-all stderr")
if assert.Error(t, err) {
underlying := errors.Unwrap(err)
assert.IsType(t, &multierror.Error{}, underlying)
}
// Check that modules C, D and E were deleted and modules A and B weren't.
for moduleName, modulePath := range modulePaths {
var (
showStdout bytes.Buffer
showStderr bytes.Buffer
)
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt show --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), &showStdout, &showStderr)
logBufferContentsLineByLine(t, showStdout, fmt.Sprintf("show stdout for %s", modulePath))
logBufferContentsLineByLine(t, showStderr, fmt.Sprintf("show stderr for %s", modulePath))
assert.NoError(t, err)
output := showStdout.String()
switch moduleName {
case "module-a":
assert.Contains(t, output, "Hello, Module A")
case "module-b":
assert.Contains(t, output, "Hello, Module B")
case "module-c":
assert.NotContains(t, output, "Hello, Module C")
case "module-d":
assert.NotContains(t, output, "Hello, Module D")
case "module-e":
assert.NotContains(t, output, "Hello, Module E")
}
}
}
func validateInputs(t *testing.T, outputs map[string]TerraformOutput) {
assert.Equal(t, outputs["bool"].Value, true)
assert.Equal(t, outputs["list_bool"].Value, []interface{}{true, false})
assert.Equal(t, outputs["list_number"].Value, []interface{}{1.0, 2.0, 3.0})
assert.Equal(t, outputs["list_string"].Value, []interface{}{"a", "b", "c"})
assert.Equal(t, outputs["map_bool"].Value, map[string]interface{}{"foo": true, "bar": false, "baz": true})
assert.Equal(t, outputs["map_number"].Value, map[string]interface{}{"foo": 42.0, "bar": 12345.0})
assert.Equal(t, outputs["map_string"].Value, map[string]interface{}{"foo": "bar"})
assert.Equal(t, outputs["number"].Value, 42.0)
assert.Equal(t, outputs["object"].Value, map[string]interface{}{"list": []interface{}{1.0, 2.0, 3.0}, "map": map[string]interface{}{"foo": "bar"}, "num": 42.0, "str": "string"})
assert.Equal(t, outputs["string"].Value, "string")
assert.Equal(t, outputs["from_env"].Value, "default")
}
func TestInputsPassedThroughCorrectly(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_INPUTS)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_INPUTS)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_INPUTS)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
require.NoError(t, err)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
validateInputs(t, outputs)
}
func TestNoAutoInit(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_REGRESSIONS)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_REGRESSIONS)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_REGRESSIONS, "skip-init")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply --terragrunt-no-auto-init --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
logBufferContentsLineByLine(t, stdout, "no force apply stdout")
logBufferContentsLineByLine(t, stderr, "no force apply stderr")
require.Error(t, err)
require.Contains(t, stderr.String(), "This module is not yet installed.")
}
func TestLocalsParsing(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_LOCALS_CANONICAL)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCALS_CANONICAL))
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCALS_CANONICAL), &stdout, &stderr)
require.NoError(t, err)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["data"].Value, "Hello world\n")
assert.Equal(t, outputs["answer"].Value, float64(42))
}
func TestLocalsInInclude(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_LOCALS_IN_INCLUDE)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_LOCALS_IN_INCLUDE)
childPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_LOCALS_IN_INCLUDE, TEST_FIXTURE_LOCALS_IN_INCLUDE_CHILD_REL_PATH)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve -no-color --terragrunt-non-interactive --terragrunt-working-dir %s", childPath))
// Check the outputs of the dir functions referenced in locals to make sure they return what is expected
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(
t,
filepath.Join(tmpEnvPath, TEST_FIXTURE_LOCALS_IN_INCLUDE),
outputs["parent_terragrunt_dir"].Value,
)
assert.Equal(
t,
childPath,
outputs["terragrunt_dir"].Value,
)
assert.Equal(
t,
"apply",
outputs["terraform_command"].Value,
)
assert.Equal(
t,
"[\"apply\",\"-auto-approve\",\"-no-color\"]",
outputs["terraform_cli_args"].Value,
)
}
func TestUndefinedLocalsReferenceBreaks(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_LOCALS_ERROR_UNDEFINED_LOCAL)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCALS_ERROR_UNDEFINED_LOCAL), os.Stdout, os.Stderr)
assert.Error(t, err)
}
func TestUndefinedLocalsReferenceToInputsBreaks(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_LOCALS_ERROR_UNDEFINED_LOCAL_BUT_INPUT)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCALS_ERROR_UNDEFINED_LOCAL_BUT_INPUT), os.Stdout, os.Stderr)
assert.Error(t, err)
}
type TerraformOutput struct {
Sensitive bool
Type interface{}
Value interface{}
}
func TestPreventDestroyDependenciesIncludedConfig(t *testing.T) {
t.Parallel()
// Populate module paths.
moduleNames := []string{
"module-a",
"module-b",
"module-c",
}
modulePaths := make(map[string]string, len(moduleNames))
for _, moduleName := range moduleNames {
modulePaths[moduleName] = util.JoinPath(TEST_FIXTURE_LOCAL_INCLUDE_PREVENT_DESTROY_DEPENDENCIES, moduleName)
}
// Cleanup all modules directories.
cleanupTerraformFolder(t, TEST_FIXTURE_LOCAL_INCLUDE_PREVENT_DESTROY_DEPENDENCIES)
for _, modulePath := range modulePaths {
cleanupTerraformFolder(t, modulePath)
}
var (
applyAllStdout bytes.Buffer
applyAllStderr bytes.Buffer
)
// Apply and destroy all modules.
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCAL_INCLUDE_PREVENT_DESTROY_DEPENDENCIES), &applyAllStdout, &applyAllStderr)
logBufferContentsLineByLine(t, applyAllStdout, "apply-all stdout")
logBufferContentsLineByLine(t, applyAllStderr, "apply-all stderr")
if err != nil {
t.Fatalf("apply-all in TestPreventDestroyDependenciesIncludedConfig failed with error: %v. Full std", err)
}
var (
destroyAllStdout bytes.Buffer
destroyAllStderr bytes.Buffer
)
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt destroy-all --terragrunt-non-interactive --terragrunt-working-dir %s", TEST_FIXTURE_LOCAL_INCLUDE_PREVENT_DESTROY_DEPENDENCIES), &destroyAllStdout, &destroyAllStderr)
logBufferContentsLineByLine(t, destroyAllStdout, "destroy-all stdout")
logBufferContentsLineByLine(t, destroyAllStderr, "destroy-all stderr")
if assert.Error(t, err) {
underlying := errors.Unwrap(err)
assert.IsType(t, &multierror.Error{}, underlying)
}
// Check that modules C, D and E were deleted and modules A and B weren't.
for moduleName, modulePath := range modulePaths {
var (
showStdout bytes.Buffer
showStderr bytes.Buffer
)
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt show --terragrunt-non-interactive --terragrunt-working-dir %s", modulePath), &showStdout, &showStderr)
logBufferContentsLineByLine(t, showStdout, fmt.Sprintf("show stdout for %s", modulePath))
logBufferContentsLineByLine(t, showStderr, fmt.Sprintf("show stderr for %s", modulePath))
assert.NoError(t, err)
output := showStdout.String()
switch moduleName {
case "module-a":
assert.Contains(t, output, "Hello, Module A")
case "module-b":
assert.Contains(t, output, "Hello, Module B")
case "module-c":
assert.NotContains(t, output, "Hello, Module C")
}
}
}
func TestTerragruntMissingDependenciesFail(t *testing.T) {
t.Parallel()
generateTestCase := TEST_FIXTURE_MISSING_DEPENDENCIE
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr)
require.Error(t, err)
parsedError, ok := errors.Unwrap(err).(config.DependencyDirNotFound)
assert.True(t, ok)
assert.True(t, len(parsedError.Dir) == 1)
assert.Contains(t, parsedError.Dir[0], "hl3-release")
}
func TestTerragruntExcludeExternalDependencies(t *testing.T) {
t.Parallel()
excludedModule := "module-a"
includedModule := "module-b"
modules := []string{
excludedModule,
includedModule,
}
cleanupTerraformFolder(t, TEST_FIXTURE_EXTERNAL_DEPENDENCIE)
for _, module := range modules {
cleanupTerraformFolder(t, util.JoinPath(TEST_FIXTURE_EXTERNAL_DEPENDENCIE, module))
}
var (
applyAllStdout bytes.Buffer
applyAllStderr bytes.Buffer
)
rootPath := copyEnvironment(t, TEST_FIXTURE_EXTERNAL_DEPENDENCIE)
modulePath := util.JoinPath(rootPath, TEST_FIXTURE_EXTERNAL_DEPENDENCIE, includedModule)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-ignore-external-dependencies --terragrunt-working-dir %s", modulePath), &applyAllStdout, &applyAllStderr)
logBufferContentsLineByLine(t, applyAllStdout, "apply-all stdout")
logBufferContentsLineByLine(t, applyAllStderr, "apply-all stderr")
applyAllStdoutString := applyAllStdout.String()
if err != nil {
t.Errorf("Did not expect to get error: %s", err.Error())
}
assert.Contains(t, applyAllStdoutString, fmt.Sprintf("Hello World, %s", includedModule))
assert.NotContains(t, applyAllStdoutString, fmt.Sprintf("Hello World, %s", excludedModule))
}
func TestApplySkipTrue(t *testing.T) {
t.Parallel()
rootPath := copyEnvironment(t, TEST_FIXTURE_SKIP)
rootPath = util.JoinPath(rootPath, TEST_FIXTURE_SKIP, "skip-true")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-log-level info --terragrunt-non-interactive --terragrunt-working-dir %s --var person=Hobbs", rootPath), &showStdout, &showStderr)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
stdout := showStdout.String()
stderr := showStderr.String()
assert.Nil(t, err)
assert.Regexp(t, regexp.MustCompile("Skipping terragrunt module .*fixture-skip/skip-true/terragrunt.hcl due to skip = true."), stderr)
assert.NotContains(t, stdout, "hello, Hobbs")
}
func TestApplySkipFalse(t *testing.T) {
t.Parallel()
rootPath := copyEnvironment(t, TEST_FIXTURE_SKIP)
rootPath = util.JoinPath(rootPath, TEST_FIXTURE_SKIP, "skip-false")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
stderr := showStderr.String()
stdout := showStdout.String()
assert.Nil(t, err)
assert.Contains(t, stdout, "hello, Hobbs")
assert.NotContains(t, stderr, "Skipping terragrunt module")
}
func TestApplyAllSkipTrue(t *testing.T) {
t.Parallel()
rootPath := copyEnvironment(t, TEST_FIXTURE_SKIP)
rootPath = util.JoinPath(rootPath, TEST_FIXTURE_SKIP, "skip-true")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s --terragrunt-log-level info", rootPath), &showStdout, &showStderr)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
stdout := showStdout.String()
stderr := showStderr.String()
assert.Nil(t, err)
assert.Regexp(t, regexp.MustCompile("Skipping terragrunt module .*fixture-skip/skip-true/terragrunt.hcl due to skip = true."), stderr)
assert.Contains(t, stdout, "hello, Ernie")
assert.Contains(t, stdout, "hello, Bert")
}
func TestApplyAllSkipFalse(t *testing.T) {
t.Parallel()
rootPath := copyEnvironment(t, TEST_FIXTURE_SKIP)
rootPath = util.JoinPath(rootPath, TEST_FIXTURE_SKIP, "skip-false")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
stdout := showStdout.String()
stderr := showStderr.String()
assert.Nil(t, err)
assert.Contains(t, stdout, "hello, Hobbs")
assert.Contains(t, stdout, "hello, Ernie")
assert.Contains(t, stdout, "hello, Bert")
assert.NotContains(t, stderr, "Skipping terragrunt module")
}
func TestTerragruntInfo(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_NO_BACKEND)
tmpEnvPath := copyEnvironment(t, "fixture-hooks/init-once")
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_INIT_ONCE_WITH_SOURCE_NO_BACKEND)
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt terragrunt-info --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr)
assert.Nil(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
var dat cli.TerragruntInfoGroup
err_unmarshal := json.Unmarshal(showStdout.Bytes(), &dat)
assert.Nil(t, err_unmarshal)
assert.Equal(t, dat.DownloadDir, fmt.Sprintf("%s/%s", rootPath, TERRAGRUNT_CACHE))
assert.Equal(t, dat.TerraformBinary, TERRAFORM_BINARY)
assert.Equal(t, dat.IamRole, "")
}
// Test case for yamldecode bug: https://github.com/gruntwork-io/terragrunt/issues/834
func TestYamlDecodeRegressions(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_REGRESSIONS)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_REGRESSIONS)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_REGRESSIONS, "yamldecode")
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// Check the output of yamldecode and make sure it doesn't parse the string incorrectly
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["test1"].Value, "003")
assert.Equal(t, outputs["test2"].Value, "1.00")
assert.Equal(t, outputs["test3"].Value, "0ba")
}
// We test the path with remote_state blocks by:
// - Applying all modules initially
// - Deleting the local state of the nested deep dependency
// - Running apply on the root module
// If output optimization is working, we should still get the same correct output even though the state of the upmost
// module has been destroyed.
func TestDependencyOutputOptimization(t *testing.T) {
expectOutputLogs := []string{
`Running command: terraform init -get=false prefix=\[.*fixture-get-output/nested-optimization/dep\]`,
}
dependencyOutputOptimizationTest(t, "nested-optimization", true, expectOutputLogs)
}
func TestDependencyOutputOptimizationSkipInit(t *testing.T) {
expectOutputLogs := []string{
`Detected module .*nested-optimization/dep/terragrunt.hcl is already init-ed. Retrieving outputs directly from working directory. prefix=\[.*fixture-get-output/nested-optimization/dep\]`,
}
dependencyOutputOptimizationTest(t, "nested-optimization", false, expectOutputLogs)
}
func TestDependencyOutputOptimizationNoGenerate(t *testing.T) {
expectOutputLogs := []string{
`Running command: terraform init -get=false prefix=\[.*fixture-get-output/nested-optimization-nogen/dep\]`,
}
dependencyOutputOptimizationTest(t, "nested-optimization-nogen", true, expectOutputLogs)
}
func dependencyOutputOptimizationTest(t *testing.T, moduleName string, forceInit bool, expectedOutputLogs []string) {
t.Parallel()
expectedOutput := `They said, "No, The answer is 42"`
generatedUniqueId := uniqueId()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, moduleName)
rootTerragruntConfigPath := filepath.Join(rootPath, config.DefaultTerragruntConfigPath)
livePath := filepath.Join(rootPath, "live")
deepDepPath := filepath.Join(rootPath, "deepdep")
depPath := filepath.Join(rootPath, "dep")
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(generatedUniqueId))
lockTableName := fmt.Sprintf("terragrunt-test-locks-%s", strings.ToLower(generatedUniqueId))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
defer cleanupTableForTest(t, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// We need to bust the output cache that stores the dependency outputs so that the second run pulls the outputs.
// This is only a problem during testing, where the process is shared across terragrunt runs.
config.ClearOutputCache()
// verify expected output
stdout, _, err := runTerragruntCommandWithOutput(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s", livePath))
require.NoError(t, err)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout), &outputs))
assert.Equal(t, expectedOutput, outputs["output"].Value)
// If we want to force reinit, delete the relevant .terraform directories
if forceInit {
cleanupTerraformFolder(t, depPath)
}
// Now delete the deepdep state and verify still works (note we need to bust the cache again)
config.ClearOutputCache()
require.NoError(t, os.Remove(filepath.Join(deepDepPath, "terraform.tfstate")))
reout, reerr, err := runTerragruntCommandWithOutput(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s", livePath))
require.NoError(t, err)
require.NoError(t, json.Unmarshal([]byte(reout), &outputs))
assert.Equal(t, expectedOutput, outputs["output"].Value)
for _, logRegexp := range expectedOutputLogs {
re, err := regexp.Compile(logRegexp)
require.NoError(t, err)
matches := re.FindAllString(reerr, -1)
assert.Greater(t, len(matches), 0)
}
}
func TestDependencyOutputOptimizationDisableTest(t *testing.T) {
t.Parallel()
expectedOutput := `They said, "No, The answer is 42"`
generatedUniqueId := uniqueId()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "nested-optimization-disable")
rootTerragruntConfigPath := filepath.Join(rootPath, config.DefaultTerragruntConfigPath)
livePath := filepath.Join(rootPath, "live")
deepDepPath := filepath.Join(rootPath, "deepdep")
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(generatedUniqueId))
lockTableName := fmt.Sprintf("terragrunt-test-locks-%s", strings.ToLower(generatedUniqueId))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
defer cleanupTableForTest(t, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// We need to bust the output cache that stores the dependency outputs so that the second run pulls the outputs.
// This is only a problem during testing, where the process is shared across terragrunt runs.
config.ClearOutputCache()
// verify expected output
stdout, _, err := runTerragruntCommandWithOutput(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", livePath))
require.NoError(t, err)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout), &outputs))
assert.Equal(t, expectedOutput, outputs["output"].Value)
// Now delete the deepdep state and verify it no longer works, because it tries to fetch the deepdep dependency
config.ClearOutputCache()
require.NoError(t, os.Remove(filepath.Join(deepDepPath, "terraform.tfstate")))
require.NoError(t, os.RemoveAll(filepath.Join(deepDepPath, ".terraform")))
_, _, err = runTerragruntCommandWithOutput(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", livePath))
require.Error(t, err)
}
func TestDependencyOutput(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "integration")
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// verify expected output 42
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
app3Path := util.JoinPath(rootPath, "app3")
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", app3Path), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, int(outputs["z"].Value.(float64)), 42)
}
func TestDependencyOutputErrorBeforeApply(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "integration")
app3Path := filepath.Join(rootPath, "app3")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", app3Path), &showStdout, &showStderr)
assert.Error(t, err)
// Verify that we fail because the dependency is not applied yet
assert.Contains(t, err.Error(), "has not been applied yet")
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
}
func TestDependencyOutputSkipOutputs(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "integration")
emptyPath := filepath.Join(rootPath, "empty")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
// Test that even if the dependency (app1) is not applied, using skip_outputs will skip pulling the outputs so there
// will be no errors.
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", emptyPath), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
}
func TestDependencyOutputSkipOutputsWithMockOutput(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs")
dependent3Path := filepath.Join(rootPath, "dependent3")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", dependent3Path), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// verify expected output when mocks are used: The answer is 0
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", dependent3Path), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["truth"].Value, "The answer is 0")
// Now apply-all so that the dependency is applied, and verify it still uses the mock output
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// verify expected output when mocks are used: The answer is 0
stdout = bytes.Buffer{}
stderr = bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", dependent3Path), &stdout, &stderr),
)
outputs = map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["truth"].Value, "The answer is 0")
}
// Test that when you have a mock_output on a dependency, the dependency will use the mock as the output instead
// of erroring out.
func TestDependencyMockOutput(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs")
dependent1Path := filepath.Join(rootPath, "dependent1")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", dependent1Path), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// verify expected output when mocks are used: The answer is 0
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", dependent1Path), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["truth"].Value, "The answer is 0")
// We need to bust the output cache that stores the dependency outputs so that the second run pulls the outputs.
// This is only a problem during testing, where the process is shared across terragrunt runs.
config.ClearOutputCache()
// Now apply-all so that the dependency is applied, and verify it uses the dependency output
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// verify expected output when mocks are used: The answer is 0
stdout = bytes.Buffer{}
stderr = bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", dependent1Path), &stdout, &stderr),
)
outputs = map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["truth"].Value, "The answer is 42")
}
// Test default behavior when mock_outputs_merge_with_state is not set. It should behave, as before this parameter was added
// It will fail on any command if the parent state is not applied, because the state of the parent exists and it alread has an output
// but not the newly added output.
func TestDependencyMockOutputMergeWithStateDefault(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs-merge-with-state", "merge-with-state-default", "live")
parentPath := filepath.Join(rootPath, "parent")
childPath := filepath.Join(rootPath, "child")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", parentPath), &stdout, &stderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, stdout, "plan stdout")
logBufferContentsLineByLine(t, stderr, "plan stderr")
// Verify we have the default behavior if mock_outputs_merge_with_state is not set
stdout.Reset()
stderr.Reset()
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr)
assert.Error(t, err)
// Verify that we fail because the dependency is not applied yet, and the new attribue is not available and in
// this case, mocked outputs are not used.
assert.Contains(t, err.Error(), "This object does not have an attribute named \"test_output2\"")
logBufferContentsLineByLine(t, stdout, "plan stdout")
logBufferContentsLineByLine(t, stderr, "plan stderr")
}
// Test when mock_outputs_merge_with_state is explicitly set to false. It should behave, as before this parameter was added
// It will fail on any command if the parent state is not applied, because the state of the parent exists and it alread has an output
// but not the newly added output.
func TestDependencyMockOutputMergeWithStateFalse(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs-merge-with-state", "merge-with-state-false", "live")
parentPath := filepath.Join(rootPath, "parent")
childPath := filepath.Join(rootPath, "child")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", parentPath), &stdout, &stderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, stdout, "plan stdout")
logBufferContentsLineByLine(t, stderr, "plan stderr")
// Verify we have the default behavior if mock_outputs_merge_with_state is set to false
stdout.Reset()
stderr.Reset()
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr)
assert.Error(t, err)
// Verify that we fail because the dependency is not applied yet, and the new attribue is not available and in
// this case, mocked outputs are not used.
assert.Contains(t, err.Error(), "This object does not have an attribute named \"test_output2\"")
logBufferContentsLineByLine(t, stdout, "plan stdout")
logBufferContentsLineByLine(t, stderr, "plan stderr")
}
// Test when mock_outputs_merge_with_state is explicitly set to true.
// It will mock the newly added output from the parent as it was not already applied to the state.
func TestDependencyMockOutputMergeWithStateTrue(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs-merge-with-state", "merge-with-state-true", "live")
parentPath := filepath.Join(rootPath, "parent")
childPath := filepath.Join(rootPath, "child")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", parentPath), &stdout, &stderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, stdout, "plan stdout")
logBufferContentsLineByLine(t, stderr, "plan stderr")
// Verify mocked outputs are used if mock_outputs_merge_with_state is set to true and some output in the parent are not applied yet.
stdout.Reset()
stderr.Reset()
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, stdout, "apply stdout")
logBufferContentsLineByLine(t, stderr, "apply stderr")
// Now check the outputs to make sure they are as expected
stdout.Reset()
stderr.Reset()
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["test_output1_from_parent"].Value, "value1")
assert.Equal(t, outputs["test_output2_from_parent"].Value, "fake-data2")
logBufferContentsLineByLine(t, stdout, "output stdout")
logBufferContentsLineByLine(t, stderr, "output stderr")
}
// Test when mock_outputs_merge_with_state is explicitly set to true, but using an unallowed command. It should ignore
// the mock output.
func TestDependencyMockOutputMergeWithStateTrueNotAllowed(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs-merge-with-state", "merge-with-state-true-validate-only", "live")
parentPath := filepath.Join(rootPath, "parent")
childPath := filepath.Join(rootPath, "child")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", parentPath), &stdout, &stderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, stdout, "plan stdout")
logBufferContentsLineByLine(t, stderr, "plan stderr")
// Verify mocked outputs are used if mock_outputs_merge_with_state is set to true with an allowed command and some
// output in the parent are not applied yet.
stdout.Reset()
stderr.Reset()
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt validate --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr),
)
// ... but not when an unallowed command is used
require.Error(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr),
)
}
// Test when mock_outputs_merge_with_state is explicitly set to true.
// Mock should not be used as the parent state was already fully applied.
func TestDependencyMockOutputMergeWithStateNoOverride(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs-merge-with-state", "merge-with-state-no-override", "live")
parentPath := filepath.Join(rootPath, "parent")
childPath := filepath.Join(rootPath, "child")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", parentPath), &stdout, &stderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, stdout, "show stdout")
logBufferContentsLineByLine(t, stderr, "show stderr")
// Verify mocked outputs are not used if mock_outputs_merge_with_state is set to true and all outputs in the parent have been applied.
stdout.Reset()
stderr.Reset()
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr)
assert.NoError(t, err)
// Now check the outputs to make sure they are as expected
stdout.Reset()
stderr.Reset()
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", childPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["test_output1_from_parent"].Value, "value1")
assert.Equal(t, outputs["test_output2_from_parent"].Value, "value2")
logBufferContentsLineByLine(t, stdout, "show stdout")
logBufferContentsLineByLine(t, stderr, "show stderr")
}
// Test that when you have a mock_output on a dependency, the dependency will use the mock as the output instead
// of erroring out when running an allowed command.
func TestDependencyMockOutputRestricted(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "mock-outputs")
dependent2Path := filepath.Join(rootPath, "dependent2")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", dependent2Path), &showStdout, &showStderr)
assert.Error(t, err)
// Verify that we fail because the dependency is not applied yet
assert.Contains(t, err.Error(), "has not been applied yet")
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// Verify we can run when using one of the allowed commands
showStdout.Reset()
showStderr.Reset()
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt validate --terragrunt-non-interactive --terragrunt-working-dir %s", dependent2Path), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// Verify that validate-all works as well.
showStdout.Reset()
showStderr.Reset()
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt validate-all --terragrunt-non-interactive --terragrunt-working-dir %s", dependent2Path), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
showStdout.Reset()
showStderr.Reset()
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt validate-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr)
assert.NoError(t, err)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
}
func TestDependencyOutputTypeConversion(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
cleanupTerraformFolder(t, TEST_FIXTURE_INPUTS)
tmpEnvPath := copyEnvironment(t, ".")
inputsPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_INPUTS)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "type-conversion")
// First apply the inputs module
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", inputsPath))
// Then apply the outputs module
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
assert.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr),
)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// Now check the outputs to make sure they are as expected
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["bool"].Value, true)
assert.Equal(t, outputs["list_bool"].Value, []interface{}{true, false})
assert.Equal(t, outputs["list_number"].Value, []interface{}{1.0, 2.0, 3.0})
assert.Equal(t, outputs["list_string"].Value, []interface{}{"a", "b", "c"})
assert.Equal(t, outputs["map_bool"].Value, map[string]interface{}{"foo": true, "bar": false, "baz": true})
assert.Equal(t, outputs["map_number"].Value, map[string]interface{}{"foo": 42.0, "bar": 12345.0})
assert.Equal(t, outputs["map_string"].Value, map[string]interface{}{"foo": "bar"})
assert.Equal(t, outputs["number"].Value, 42.0)
assert.Equal(t, outputs["object"].Value, map[string]interface{}{"list": []interface{}{1.0, 2.0, 3.0}, "map": map[string]interface{}{"foo": "bar"}, "num": 42.0, "str": "string"})
assert.Equal(t, outputs["string"].Value, "string")
assert.Equal(t, outputs["from_env"].Value, "default")
}
// Regression testing for https://github.com/gruntwork-io/terragrunt/issues/1102: Ordering keys from
// maps to avoid random placements when terraform file is generated.
func TestOrderedMapOutputRegressions1102(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_GET_OUTPUT, "regression-1102")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
command := fmt.Sprintf("terragrunt apply --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase)
path := filepath.Join(generateTestCase, "backend.tf")
// runs terragrunt for the first time and checks the output "backend.tf" file.
require.NoError(
t,
runTerragruntCommand(t, command, &stdout, &stderr),
)
expected, _ := ioutil.ReadFile(path)
require.Contains(t, string(expected), "local")
// runs terragrunt again. All the outputs must be
// equal to the first run.
for i := 0; i < 20; i++ {
require.NoError(
t,
runTerragruntCommand(t, command, &stdout, &stderr),
)
actual, _ := ioutil.ReadFile(path)
require.Equal(t, expected, actual)
}
}
// Test that we get the expected error message about dependency cycles when there is a cycle in the dependency chain
func TestDependencyOutputCycleHandling(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
testCases := []string{
"aa",
"aba",
"abca",
"abcda",
}
for _, testCase := range testCases {
// Capture range variable into forloop so that the binding is consistent across runs.
testCase := testCase
t.Run(testCase, func(t *testing.T) {
t.Parallel()
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "cycle", testCase)
fooPath := util.JoinPath(rootPath, "foo")
planStdout := bytes.Buffer{}
planStderr := bytes.Buffer{}
err := runTerragruntCommand(
t,
fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", fooPath),
&planStdout,
&planStderr,
)
logBufferContentsLineByLine(t, planStdout, "plan stdout")
logBufferContentsLineByLine(t, planStderr, "plan stderr")
assert.Error(t, err)
assert.True(t, strings.Contains(err.Error(), "Found a dependency cycle between modules"))
})
}
}
// Regression testing for https://github.com/gruntwork-io/terragrunt/issues/854: Referencing a dependency that is a
// subdirectory of the current config, which includes an `include` block has problems resolving the correct relative
// path.
func TestDependencyOutputRegression854(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "regression-854", "root")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(
t,
fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath),
&stdout,
&stderr,
)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
require.NoError(t, err)
}
// Regression testing for https://github.com/gruntwork-io/terragrunt/issues/906
func TestDependencyOutputSameOutputConcurrencyRegression(t *testing.T) {
t.Parallel()
// Use func to isolate each test run to a single s3 bucket that is deleted. We run the test multiple times
// because the underlying error we are trying to test against is nondeterministic, and thus may not always work
// the first time.
testCase := func() {
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "regression-906")
// Make sure to fill in the s3 bucket to the config. Also ensure the bucket is deleted before the next for
// loop call.
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s%s", strings.ToLower(uniqueId()), strings.ToLower(uniqueId()))
defer deleteS3BucketWithRetry(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
commonDepConfigPath := util.JoinPath(rootPath, "common-dep", "terragrunt.hcl")
copyTerragruntConfigAndFillPlaceholders(t, commonDepConfigPath, commonDepConfigPath, s3BucketName, "not-used", "not-used")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(
t,
fmt.Sprintf("terragrunt apply-all --terragrunt-source-update --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath),
&stdout,
&stderr,
)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
require.NoError(t, err)
}
for i := 0; i < 3; i++ {
testCase()
// We need to bust the output cache that stores the dependency outputs so that the second run pulls the outputs.
// This is only a problem during testing, where the process is shared across terragrunt runs.
config.ClearOutputCache()
}
}
// Regression testing for bug where terragrunt output runs on dependency blocks are done in the terragrunt-cache for the
// child, not the parent.
func TestDependencyOutputCachePathBug(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "localstate", "live")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(
t,
fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath),
&stdout,
&stderr,
)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
require.NoError(t, err)
}
func TestDependencyOutputWithTerragruntSource(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "regression-1124", "live")
modulePath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "regression-1124", "modules")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(
t,
fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s --terragrunt-source %s", rootPath, modulePath),
&stdout,
&stderr,
)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
require.NoError(t, err)
}
func TestDependencyOutputWithHooks(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "regression-1273")
depPathFileOut := util.JoinPath(rootPath, "dep", "file.out")
mainPath := util.JoinPath(rootPath, "main")
mainPathFileOut := util.JoinPath(mainPath, "file.out")
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// We need to bust the output cache that stores the dependency outputs so that the second run pulls the outputs.
// This is only a problem during testing, where the process is shared across terragrunt runs.
config.ClearOutputCache()
// The file should exist in the first run.
assert.True(t, util.FileExists(depPathFileOut))
assert.False(t, util.FileExists(mainPathFileOut))
// Now delete file and run just main again. It should NOT create file.out.
require.NoError(t, os.Remove(depPathFileOut))
runTerragrunt(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", mainPath))
assert.False(t, util.FileExists(depPathFileOut))
assert.False(t, util.FileExists(mainPathFileOut))
}
func TestDeepDependencyOutputWithMock(t *testing.T) {
// Test that the terraform command flows through for mock output retrieval to deeper dependencies. Previously the
// terraform command was being overwritten, so by the time the deep dependency retrieval runs, it was replaced with
// "output" instead of the original one.
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "nested-mocks", "live")
// Since we haven't applied anything, this should only succeed if mock outputs are used.
runTerragrunt(t, fmt.Sprintf("terragrunt validate --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
}
func TestAWSGetCallerIdentityFunctions(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_AWS_GET_CALLER_IDENTITY)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_AWS_GET_CALLER_IDENTITY)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_AWS_GET_CALLER_IDENTITY)
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// verify expected outputs are not empty
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
// Get values from STS
sess, err := session.NewSession()
if err != nil {
t.Fatalf("Error while creating AWS session: %v", err)
}
identity, err := sts.New(sess).GetCallerIdentity(nil)
if err != nil {
t.Fatalf("Error while getting AWS caller identity: %v", err)
}
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["account"].Value, *identity.Account)
assert.Equal(t, outputs["arn"].Value, *identity.Arn)
assert.Equal(t, outputs["user_id"].Value, *identity.UserId)
}
func TestGetPlatform(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_PLATFORM)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_PLATFORM)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_GET_PLATFORM)
runTerragrunt(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// verify expected outputs are not empty
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
platform, hasPlatform := outputs["platform"]
require.True(t, hasPlatform)
require.Equal(t, platform.Value, runtime.GOOS)
}
func TestDataDir(t *testing.T) {
// Cannot be run in parallel with other tests as it modifies process' environment.
cleanupTerraformFolder(t, TEST_FIXTURE_DIRS_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_DIRS_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_DIRS_PATH)
os.Setenv("TF_DATA_DIR", util.JoinPath(tmpEnvPath, "data_dir"))
defer os.Unsetenv("TF_DATA_DIR")
var (
stdout bytes.Buffer
stderr bytes.Buffer
)
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
erroutput := stderr.String()
if err != nil {
t.Errorf("Did not expect to get an error: %s", err.Error())
}
assert.Contains(t, erroutput, "Initializing provider plugins")
var (
stdout2 bytes.Buffer
stderr2 bytes.Buffer
)
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt plan --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout2, &stderr2)
erroutput2 := stderr2.String()
if err != nil {
t.Errorf("Did not expect to get an error: %s", err.Error())
}
assert.NotContains(t, erroutput2, "Initializing provider plugins")
}
func TestReadTerragruntConfigWithDependency(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_READ_CONFIG)
cleanupTerraformFolder(t, TEST_FIXTURE_INPUTS)
tmpEnvPath := copyEnvironment(t, ".")
inputsPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_INPUTS)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_READ_CONFIG, "with_dependency")
// First apply the inputs module
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", inputsPath))
// Then apply the read config module
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
assert.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr),
)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// Now check the outputs to make sure they are as expected
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["bool"].Value, true)
assert.Equal(t, outputs["list_bool"].Value, []interface{}{true, false})
assert.Equal(t, outputs["list_number"].Value, []interface{}{1.0, 2.0, 3.0})
assert.Equal(t, outputs["list_string"].Value, []interface{}{"a", "b", "c"})
assert.Equal(t, outputs["map_bool"].Value, map[string]interface{}{"foo": true, "bar": false, "baz": true})
assert.Equal(t, outputs["map_number"].Value, map[string]interface{}{"foo": 42.0, "bar": 12345.0})
assert.Equal(t, outputs["map_string"].Value, map[string]interface{}{"foo": "bar"})
assert.Equal(t, outputs["number"].Value, 42.0)
assert.Equal(t, outputs["object"].Value, map[string]interface{}{"list": []interface{}{1.0, 2.0, 3.0}, "map": map[string]interface{}{"foo": "bar"}, "num": 42.0, "str": "string"})
assert.Equal(t, outputs["string"].Value, "string")
assert.Equal(t, outputs["from_env"].Value, "default")
}
func TestReadTerragruntConfigFromDependency(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_READ_CONFIG)
tmpEnvPath := copyEnvironment(t, ".")
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_READ_CONFIG, "from_dependency")
showStdout := bytes.Buffer{}
showStderr := bytes.Buffer{}
assert.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt apply-all --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &showStdout, &showStderr),
)
logBufferContentsLineByLine(t, showStdout, "show stdout")
logBufferContentsLineByLine(t, showStderr, "show stderr")
// Now check the outputs to make sure they are as expected
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["bar"].Value, "hello world")
}
func TestReadTerragruntConfigWithDefault(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_READ_CONFIG)
rootPath := util.JoinPath(TEST_FIXTURE_READ_CONFIG, "with_default")
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// check the outputs to make sure they are as expected
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["data"].Value, "default value")
}
func TestReadTerragruntConfigWithOriginalTerragruntDir(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_READ_CONFIG)
rootPath := util.JoinPath(TEST_FIXTURE_READ_CONFIG, "with_original_terragrunt_dir")
rootPathAbs, err := filepath.Abs(rootPath)
require.NoError(t, err)
fooPathAbs := filepath.Join(rootPathAbs, "foo")
depPathAbs := filepath.Join(rootPathAbs, "dep")
// Run apply on the dependency module and make sure we get the outputs we expect
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", depPathAbs))
depStdout := bytes.Buffer{}
depStderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", depPathAbs), &depStdout, &depStderr),
)
depOutputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(depStdout.String()), &depOutputs))
assert.Equal(t, depPathAbs, depOutputs["terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, depOutputs["original_terragrunt_dir"].Value)
assert.Equal(t, fooPathAbs, depOutputs["bar_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, depOutputs["bar_original_terragrunt_dir"].Value)
// Run apply on the root module and make sure we get the expected outputs
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
rootStdout := bytes.Buffer{}
rootStderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &rootStdout, &rootStderr),
)
rootOutputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(rootStdout.String()), &rootOutputs))
assert.Equal(t, fooPathAbs, rootOutputs["terragrunt_dir"].Value)
assert.Equal(t, rootPathAbs, rootOutputs["original_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, rootOutputs["dep_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, rootOutputs["dep_original_terragrunt_dir"].Value)
assert.Equal(t, fooPathAbs, rootOutputs["dep_bar_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, rootOutputs["dep_bar_original_terragrunt_dir"].Value)
// Run 'run-all apply' and make sure all the outputs are identical in the root module and the dependency module
runTerragrunt(t, fmt.Sprintf("terragrunt run-all apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
runAllRootStdout := bytes.Buffer{}
runAllRootStderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &runAllRootStdout, &runAllRootStderr),
)
runAllRootOutputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(runAllRootStdout.String()), &runAllRootOutputs))
runAllDepStdout := bytes.Buffer{}
runAllDepStderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", depPathAbs), &runAllDepStdout, &runAllDepStderr),
)
runAllDepOutputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(runAllDepStdout.String()), &runAllDepOutputs))
assert.Equal(t, fooPathAbs, runAllRootOutputs["terragrunt_dir"].Value)
assert.Equal(t, rootPathAbs, runAllRootOutputs["original_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, runAllRootOutputs["dep_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, runAllRootOutputs["dep_original_terragrunt_dir"].Value)
assert.Equal(t, fooPathAbs, runAllRootOutputs["dep_bar_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, runAllRootOutputs["dep_bar_original_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, runAllDepOutputs["terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, runAllDepOutputs["original_terragrunt_dir"].Value)
assert.Equal(t, fooPathAbs, runAllDepOutputs["bar_terragrunt_dir"].Value)
assert.Equal(t, depPathAbs, runAllDepOutputs["bar_original_terragrunt_dir"].Value)
}
func TestReadTerragruntConfigFull(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_READ_CONFIG)
rootPath := util.JoinPath(TEST_FIXTURE_READ_CONFIG, "full")
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
// check the outputs to make sure they are as expected
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
// Primitive config attributes
assert.Equal(t, outputs["terraform_binary"].Value, "terragrunt")
assert.Equal(t, outputs["terraform_version_constraint"].Value, "= 0.12.20")
assert.Equal(t, outputs["terragrunt_version_constraint"].Value, "= 0.23.18")
assert.Equal(t, outputs["download_dir"].Value, ".terragrunt-cache")
assert.Equal(t, outputs["iam_role"].Value, "TerragruntIAMRole")
assert.Equal(t, outputs["skip"].Value, "true")
assert.Equal(t, outputs["prevent_destroy"].Value, "true")
// Simple maps
localstgOut := map[string]interface{}{}
require.NoError(t, json.Unmarshal([]byte(outputs["localstg"].Value.(string)), &localstgOut))
assert.Equal(t, localstgOut, map[string]interface{}{"the_answer": float64(42)})
inputsOut := map[string]interface{}{}
require.NoError(t, json.Unmarshal([]byte(outputs["inputs"].Value.(string)), &inputsOut))
assert.Equal(t, inputsOut, map[string]interface{}{"doc": "Emmett Brown"})
// Complex blocks
depsOut := map[string]interface{}{}
require.NoError(t, json.Unmarshal([]byte(outputs["dependencies"].Value.(string)), &depsOut))
assert.Equal(
t,
depsOut,
map[string]interface{}{
"paths": []interface{}{"../../fixture"},
},
)
generateOut := map[string]interface{}{}
require.NoError(t, json.Unmarshal([]byte(outputs["generate"].Value.(string)), &generateOut))
assert.Equal(
t,
generateOut,
map[string]interface{}{
"provider": map[string]interface{}{
"path": "provider.tf",
"if_exists": "overwrite_terragrunt",
"comment_prefix": "# ",
"disable_signature": false,
"contents": `provider "aws" {
region = "us-east-1"
}
`,
},
},
)
remoteStateOut := map[string]interface{}{}
require.NoError(t, json.Unmarshal([]byte(outputs["remote_state"].Value.(string)), &remoteStateOut))
assert.Equal(
t,
remoteStateOut,
map[string]interface{}{
"backend": "local",
"disable_init": false,
"disable_dependency_optimization": false,
"generate": map[string]interface{}{"path": "backend.tf", "if_exists": "overwrite_terragrunt"},
"config": map[string]interface{}{"path": "foo.tfstate"},
},
)
terraformOut := map[string]interface{}{}
require.NoError(t, json.Unmarshal([]byte(outputs["terraformtg"].Value.(string)), &terraformOut))
assert.Equal(
t,
terraformOut,
map[string]interface{}{
"source": "./delorean",
"include_in_copy": []interface{}{"time_machine.*"},
"extra_arguments": map[string]interface{}{
"var-files": map[string]interface{}{
"name": "var-files",
"commands": []interface{}{"apply", "plan"},
"arguments": nil,
"required_var_files": []interface{}{"extra.tfvars"},
"optional_var_files": []interface{}{"optional.tfvars"},
"env_vars": map[string]interface{}{
"TF_VAR_custom_var": "I'm set in extra_arguments env_vars",
},
},
},
"before_hook": map[string]interface{}{
"before_hook_1": map[string]interface{}{
"name": "before_hook_1",
"commands": []interface{}{"apply", "plan"},
"execute": []interface{}{"touch", "before.out"},
"working_dir": nil,
"run_on_error": true,
},
},
"after_hook": map[string]interface{}{
"after_hook_1": map[string]interface{}{
"name": "after_hook_1",
"commands": []interface{}{"apply", "plan"},
"execute": []interface{}{"touch", "after.out"},
"working_dir": nil,
"run_on_error": true,
},
},
},
)
}
func logBufferContentsLineByLine(t *testing.T, out bytes.Buffer, label string) {
t.Logf("[%s] Full contents of %s:", t.Name(), label)
lines := strings.Split(out.String(), "\n")
for _, line := range lines {
t.Logf("[%s] %s", t.Name(), line)
}
}
func TestTerragruntGenerateBlockSkip(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "skip")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
assert.False(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
}
func TestTerragruntGenerateBlockOverwrite(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "overwrite")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
// If the state file was written as foo.tfstate, that means it overwrote the local backend config.
assert.True(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
assert.False(t, fileIsInFolder(t, "bar.tfstate", generateTestCase))
}
func TestTerragruntGenerateAttr(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-attr")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
text := "test-terragrunt-generate-attr-hello-world"
stdout, _, err := runTerragruntCommandWithOutput(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s -var text=\"%s\"", generateTestCase, text))
require.NoError(t, err)
require.Contains(t, stdout, text)
}
func TestTerragruntGenerateBlockOverwriteTerragruntSuccess(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "overwrite_terragrunt")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
// If the state file was written as foo.tfstate, that means it overwrote the local backend config.
assert.True(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
assert.False(t, fileIsInFolder(t, "bar.tfstate", generateTestCase))
}
func TestTerragruntGenerateBlockOverwriteTerragruntFail(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "overwrite_terragrunt_error")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr)
require.Error(t, err)
_, ok := errors.Unwrap(err).(codegen.GenerateFileExistsError)
assert.True(t, ok)
}
func TestTerragruntGenerateBlockNestedInherit(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "nested", "child_inherit")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
// If the state file was written as foo.tfstate, that means it inherited the config
assert.True(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
assert.False(t, fileIsInFolder(t, "bar.tfstate", generateTestCase))
// Also check to make sure the child config generate block was included
assert.True(t, fileIsInFolder(t, "random_file.txt", generateTestCase))
}
func TestTerragruntGenerateBlockNestedOverwrite(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "nested", "child_overwrite")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
// If the state file was written as bar.tfstate, that means it overwrite the parent config
assert.False(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
assert.True(t, fileIsInFolder(t, "bar.tfstate", generateTestCase))
// Also check to make sure the child config generate block was included
assert.True(t, fileIsInFolder(t, "random_file.txt", generateTestCase))
}
func TestTerragruntGenerateBlockDisableSignature(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "disable-signature")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
// Now check the outputs to make sure they are as expected
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
require.NoError(
t,
runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr),
)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["text"].Value, "Hello, World!")
}
func TestTerragruntGenerateBlockSameNameFail(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "same_name_error")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr)
require.Error(t, err)
parsedError, ok := errors.Unwrap(err).(config.DuplicatedGenerateBlocks)
assert.True(t, ok) | assert.True(t, len(parsedError.BlockName) == 1)
assert.Contains(t, parsedError.BlockName, "backend")
}
func TestTerragruntGenerateBlockMultipleSameNameFail(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "same_name_pair_error")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr)
require.Error(t, err)
parsedError, ok := errors.Unwrap(err).(config.DuplicatedGenerateBlocks)
assert.True(t, ok)
assert.True(t, len(parsedError.BlockName) == 2)
assert.Contains(t, parsedError.BlockName, "backend")
assert.Contains(t, parsedError.BlockName, "backend2")
}
func TestTerragruntRemoteStateCodegenGeneratesBackendBlock(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "remote-state", "base")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
// If the state file was written as foo.tfstate, that means it wrote out the local backend config.
assert.True(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
}
func TestTerragruntRemoteStateCodegenOverwrites(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "remote-state", "overwrite")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
// If the state file was written as foo.tfstate, that means it overwrote the local backend config.
assert.True(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
assert.False(t, fileIsInFolder(t, "bar.tfstate", generateTestCase))
}
func TestTerragruntRemoteStateCodegenGeneratesBackendBlockS3(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "remote-state", "s3")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
lockTableName := fmt.Sprintf("terragrunt-test-locks-%s", strings.ToLower(uniqueId()))
defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName)
defer cleanupTableForTest(t, lockTableName, TERRAFORM_REMOTE_STATE_S3_REGION)
tmpTerragruntConfigPath := createTmpTerragruntConfig(t, generateTestCase, s3BucketName, lockTableName, config.DefaultTerragruntConfigPath)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntConfigPath, generateTestCase))
}
func TestTerragruntRemoteStateCodegenErrorsIfExists(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "remote-state", "error")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr)
require.Error(t, err)
_, ok := errors.Unwrap(err).(codegen.GenerateFileExistsError)
assert.True(t, ok)
}
func TestTerragruntRemoteStateCodegenDoesNotGenerateWithSkip(t *testing.T) {
t.Parallel()
generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "remote-state", "skip")
cleanupTerraformFolder(t, generateTestCase)
cleanupTerragruntFolder(t, generateTestCase)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", generateTestCase))
assert.False(t, fileIsInFolder(t, "foo.tfstate", generateTestCase))
}
func TestTerragruntValidateAllWithVersionChecks(t *testing.T) {
t.Parallel()
tmpEnvPath := copyEnvironment(t, "fixture-version-check")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntVersionCommand(t, "v0.23.21", fmt.Sprintf("terragrunt validate-all --terragrunt-non-interactive --terragrunt-working-dir %s", tmpEnvPath), &stdout, &stderr)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
require.NoError(t, err)
}
func TestTerragruntIncludeParentHclFile(t *testing.T) {
t.Parallel()
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_INCLUDE_PARENT)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt run-all apply --terragrunt-modules-that-include parent.hcl --terragrunt-non-interactive --terragrunt-working-dir %s", tmpEnvPath), &stdout, &stderr)
require.NoError(t, err)
out := stderr.String()
assert.Equal(t, 1, strings.Count(out, "parent_hcl_file"))
}
func TestTerragruntVersionConstraints(t *testing.T) {
testCases := []struct {
name string
terragruntVersion string
terragruntConstraint string
shouldSucceed bool
}{
{
"version meets constraint equal",
"v0.23.18",
"terragrunt_version_constraint = \">= v0.23.18\"",
true,
},
{
"version meets constriant greater patch",
"v0.23.19",
"terragrunt_version_constraint = \">= v0.23.18\"",
true,
},
{
"version meets constriant greater major",
"v1.0.0",
"terragrunt_version_constraint = \">= v0.23.18\"",
true,
},
{
"version meets constriant less patch",
"v0.23.17",
"terragrunt_version_constraint = \">= v0.23.18\"",
false,
},
{
"version meets constriant less major",
"v0.22.18",
"terragrunt_version_constraint = \">= v0.23.18\"",
false,
},
}
for _, testCase := range testCases {
testCase := testCase
t.Run(testCase.name, func(t *testing.T) {
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_READ_CONFIG)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_READ_CONFIG, "with_constraints")
tmpTerragruntConfigPath := createTmpTerragruntConfigContent(t, testCase.terragruntConstraint, config.DefaultTerragruntConfigPath)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntVersionCommand(t, testCase.terragruntVersion, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntConfigPath, rootPath), &stdout, &stderr)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
if testCase.shouldSucceed {
require.NoError(t, err)
} else {
require.Error(t, err)
}
})
}
}
func TestReadTerragruntConfigIamRole(t *testing.T) {
t.Parallel()
identityArn, err := aws_helper.GetAWSIdentityArn(nil, &options.TerragruntOptions{})
assert.NoError(t, err)
cleanupTerraformFolder(t, TEST_FIXTURE_READ_IAM_ROLE)
// Execution outputs to be verified
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
// Invoke terragrunt and verify used IAM role
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", TEST_FIXTURE_READ_IAM_ROLE), &stdout, &stderr)
// Since are used not existing AWS accounts, for validation are used success and error outputs
output := fmt.Sprintf("%v %v %v", string(stderr.Bytes()), string(stdout.Bytes()), err.Error())
// Check that output contains value defined in IAM role
assert.Equal(t, 1, strings.Count(output, "666666666666"))
// Ensure that state file wasn't created with default IAM value
assert.True(t, util.FileNotExists(util.JoinPath(TEST_FIXTURE_READ_IAM_ROLE, identityArn+".txt")))
}
func TestIamRolesLoadingFromDifferentModules(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_IAM_ROLES_MULTIPLE_MODULES)
// Execution outputs to be verified
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
// Invoke terragrunt and verify used IAM roles for each dependency
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-log-level debug --terragrunt-debugreset --terragrunt-working-dir %s", TEST_FIXTURE_IAM_ROLES_MULTIPLE_MODULES), &stdout, &stderr)
// Taking all outputs in one string
output := fmt.Sprintf("%v %v %v", string(stderr.Bytes()), string(stdout.Bytes()), err.Error())
component1 := ""
component2 := ""
// scan each output line and get lines for component1 and component2
for _, line := range strings.Split(output, "\n") {
if strings.Contains(line, "Assuming IAM role arn:aws:iam::component1:role/terragrunt") {
component1 = line
continue
}
if strings.Contains(line, "Assuming IAM role arn:aws:iam::component2:role/terragrunt") {
component2 = line
continue
}
}
assert.NotEmptyf(t, component1, "Missing role for component 1")
assert.NotEmptyf(t, component2, "Missing role for component 2")
assert.Contains(t, component1, "iam_roles_multiple_modules/component")
assert.Contains(t, component2, "iam_roles_multiple_modules/component2")
}
func TestTerragruntVersionConstraintsPartialParse(t *testing.T) {
fixturePath := "fixture-partial-parse/terragrunt-version-constraint"
cleanupTerragruntFolder(t, fixturePath)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntVersionCommand(t, "0.21.23", fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", fixturePath), &stdout, &stderr)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
assert.Error(t, err)
_, isTgVersionError := errors.Unwrap(err).(cli.InvalidTerragruntVersion)
assert.True(t, isTgVersionError)
}
func cleanupTerraformFolder(t *testing.T, templatesPath string) {
removeFile(t, util.JoinPath(templatesPath, TERRAFORM_STATE))
removeFile(t, util.JoinPath(templatesPath, TERRAFORM_STATE_BACKUP))
removeFile(t, util.JoinPath(templatesPath, terragruntDebugFile))
removeFolder(t, util.JoinPath(templatesPath, TERRAFORM_FOLDER))
}
func cleanupTerragruntFolder(t *testing.T, templatesPath string) {
removeFolder(t, util.JoinPath(templatesPath, TERRAGRUNT_CACHE))
}
func removeFile(t *testing.T, path string) {
if util.FileExists(path) {
if err := os.Remove(path); err != nil {
t.Fatalf("Error while removing %s: %v", path, err)
}
}
}
func removeFolder(t *testing.T, path string) {
if util.FileExists(path) {
if err := os.RemoveAll(path); err != nil {
t.Fatalf("Error while removing %s: %v", path, err)
}
}
}
func runTerragruntCommand(t *testing.T, command string, writer io.Writer, errwriter io.Writer) error {
return runTerragruntVersionCommand(t, "TEST", command, writer, errwriter)
}
func runTerragruntVersionCommand(t *testing.T, version string, command string, writer io.Writer, errwriter io.Writer) error {
args := strings.Split(command, " ")
fmt.Println("runTerragruntVersionCommand after split")
fmt.Println(args)
app := cli.CreateTerragruntCli(version, writer, errwriter)
return app.Run(args)
}
func runTerragrunt(t *testing.T, command string) {
runTerragruntRedirectOutput(t, command, os.Stdout, os.Stderr)
}
func runTerragruntCommandWithOutput(t *testing.T, command string) (string, string, error) {
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, command, &stdout, &stderr)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
return stdout.String(), stderr.String(), err
}
func runTerragruntRedirectOutput(t *testing.T, command string, writer io.Writer, errwriter io.Writer) {
if err := runTerragruntCommand(t, command, writer, errwriter); err != nil {
stdout := "(see log output above)"
if stdoutAsBuffer, stdoutIsBuffer := writer.(*bytes.Buffer); stdoutIsBuffer {
stdout = stdoutAsBuffer.String()
}
stderr := "(see log output above)"
if stderrAsBuffer, stderrIsBuffer := errwriter.(*bytes.Buffer); stderrIsBuffer {
stderr = stderrAsBuffer.String()
}
t.Fatalf("Failed to run Terragrunt command '%s' due to error: %s\n\nStdout: %s\n\nStderr: %s", command, errors.PrintErrorWithStackTrace(err), stdout, stderr)
}
}
func copyEnvironment(t *testing.T, environmentPath string) string {
tmpDir, err := ioutil.TempDir("", "terragrunt-test")
if err != nil {
t.Fatalf("Failed to create temp dir due to error: %v", err)
}
t.Logf("Copying %s to %s", environmentPath, tmpDir)
require.NoError(t, util.CopyFolderContents(environmentPath, util.JoinPath(tmpDir, environmentPath), ".terragrunt-test", nil))
return tmpDir
}
func copyEnvironmentToPath(t *testing.T, environmentPath, targetPath string) {
if err := os.MkdirAll(targetPath, 0777); err != nil {
t.Fatalf("Failed to create temp dir %s due to error %v", targetPath, err)
}
copyErr := util.CopyFolderContents(environmentPath, util.JoinPath(targetPath, environmentPath), ".terragrunt-test", nil)
require.NoError(t, copyErr)
}
func createTmpTerragruntConfigWithParentAndChild(t *testing.T, parentPath string, childRelPath string, s3BucketName string, parentConfigFileName string, childConfigFileName string) string {
tmpDir, err := ioutil.TempDir("", "terragrunt-parent-child-test")
if err != nil {
t.Fatalf("Failed to create temp dir due to error: %v", err)
}
childDestPath := util.JoinPath(tmpDir, childRelPath)
if err := os.MkdirAll(childDestPath, 0777); err != nil {
t.Fatalf("Failed to create temp dir %s due to error %v", childDestPath, err)
}
parentTerragruntSrcPath := util.JoinPath(parentPath, parentConfigFileName)
parentTerragruntDestPath := util.JoinPath(tmpDir, parentConfigFileName)
copyTerragruntConfigAndFillPlaceholders(t, parentTerragruntSrcPath, parentTerragruntDestPath, s3BucketName, "not-used", "not-used")
childTerragruntSrcPath := util.JoinPath(util.JoinPath(parentPath, childRelPath), childConfigFileName)
childTerragruntDestPath := util.JoinPath(childDestPath, childConfigFileName)
copyTerragruntConfigAndFillPlaceholders(t, childTerragruntSrcPath, childTerragruntDestPath, s3BucketName, "not-used", "not-used")
return childTerragruntDestPath
}
func createTmpTerragruntConfig(t *testing.T, templatesPath string, s3BucketName string, lockTableName string, configFileName string) string {
tmpFolder, err := ioutil.TempDir("", "terragrunt-test")
if err != nil {
t.Fatalf("Failed to create temp folder due to error: %v", err)
}
tmpTerragruntConfigFile := util.JoinPath(tmpFolder, configFileName)
originalTerragruntConfigPath := util.JoinPath(templatesPath, configFileName)
copyTerragruntConfigAndFillPlaceholders(t, originalTerragruntConfigPath, tmpTerragruntConfigFile, s3BucketName, lockTableName, "not-used")
return tmpTerragruntConfigFile
}
func createTmpTerragruntConfigContent(t *testing.T, contents string, configFileName string) string {
tmpFolder, err := ioutil.TempDir("", "terragrunt-test")
if err != nil {
t.Fatalf("Failed to create temp folder due to error: %v", err)
}
tmpTerragruntConfigFile := util.JoinPath(tmpFolder, configFileName)
if err := ioutil.WriteFile(tmpTerragruntConfigFile, []byte(contents), 0444); err != nil {
t.Fatalf("Error writing temp Terragrunt config to %s: %v", tmpTerragruntConfigFile, err)
}
return tmpTerragruntConfigFile
}
func createTmpTerragruntGCSConfig(t *testing.T, templatesPath string, project string, location string, gcsBucketName string, configFileName string) string {
tmpFolder, err := ioutil.TempDir("", "terragrunt-test")
if err != nil {
t.Fatalf("Failed to create temp folder due to error: %v", err)
}
tmpTerragruntConfigFile := util.JoinPath(tmpFolder, configFileName)
originalTerragruntConfigPath := util.JoinPath(templatesPath, configFileName)
copyTerragruntGCSConfigAndFillPlaceholders(t, originalTerragruntConfigPath, tmpTerragruntConfigFile, project, location, gcsBucketName)
return tmpTerragruntConfigFile
}
func copyTerragruntConfigAndFillPlaceholders(t *testing.T, configSrcPath string, configDestPath string, s3BucketName string, lockTableName string, region string) {
contents, err := util.ReadFileAsString(configSrcPath)
if err != nil {
t.Fatalf("Error reading Terragrunt config at %s: %v", configSrcPath, err)
}
contents = strings.Replace(contents, "__FILL_IN_BUCKET_NAME__", s3BucketName, -1)
contents = strings.Replace(contents, "__FILL_IN_LOCK_TABLE_NAME__", lockTableName, -1)
contents = strings.Replace(contents, "__FILL_IN_REGION__", region, -1)
contents = strings.Replace(contents, "__FILL_IN_LOGS_BUCKET_NAME__", s3BucketName+"-tf-state-logs", -1)
if err := ioutil.WriteFile(configDestPath, []byte(contents), 0444); err != nil {
t.Fatalf("Error writing temp Terragrunt config to %s: %v", configDestPath, err)
}
}
func copyTerragruntGCSConfigAndFillPlaceholders(t *testing.T, configSrcPath string, configDestPath string, project string, location string, gcsBucketName string) {
contents, err := util.ReadFileAsString(configSrcPath)
if err != nil {
t.Fatalf("Error reading Terragrunt config at %s: %v", configSrcPath, err)
}
contents = strings.Replace(contents, "__FILL_IN_PROJECT__", project, -1)
contents = strings.Replace(contents, "__FILL_IN_LOCATION__", location, -1)
contents = strings.Replace(contents, "__FILL_IN_BUCKET_NAME__", gcsBucketName, -1)
if err := ioutil.WriteFile(configDestPath, []byte(contents), 0444); err != nil {
t.Fatalf("Error writing temp Terragrunt config to %s: %v", configDestPath, err)
}
}
// Returns a unique (ish) id we can attach to resources and tfstate files so they don't conflict with each other
// Uses base 62 to generate a 6 character string that's unlikely to collide with the handful of tests we run in
// parallel. Based on code here: http://stackoverflow.com/a/9543797/483528
func uniqueId() string {
const BASE_62_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
const UNIQUE_ID_LENGTH = 6 // Should be good for 62^6 = 56+ billion combinations
var out bytes.Buffer
for i := 0; i < UNIQUE_ID_LENGTH; i++ {
out.WriteByte(BASE_62_CHARS[rand.Intn(len(BASE_62_CHARS))])
}
return out.String()
}
// Check that the S3 Bucket of the given name and region exists. Terragrunt should create this bucket during the test.
// Also check if bucket got tagged properly and that public access is disabled completely.
func validateS3BucketExistsAndIsTagged(t *testing.T, awsRegion string, bucketName string, expectedTags map[string]string) {
mockOptions, err := options.NewTerragruntOptionsForTest("integration_test")
if err != nil {
t.Fatalf("Error creating mockOptions: %v", err)
}
sessionConfig := &aws_helper.AwsSessionConfig{
Region: awsRegion,
}
s3Client, err := remote.CreateS3Client(sessionConfig, mockOptions)
if err != nil {
t.Fatalf("Error creating S3 client: %v", err)
}
remoteStateConfig := remote.RemoteStateConfigS3{Bucket: bucketName, Region: awsRegion}
assert.True(t, remote.DoesS3BucketExist(s3Client, &remoteStateConfig.Bucket), "Terragrunt failed to create remote state S3 bucket %s", bucketName)
if expectedTags != nil {
assertS3Tags(expectedTags, bucketName, s3Client, t)
}
assertS3PublicAccessBlocks(t, s3Client, bucketName)
}
// Check that the DynamoDB table of the given name and region exists. Terragrunt should create this table during the test.
// Also check if table got tagged properly
func validateDynamoDBTableExistsAndIsTagged(t *testing.T, awsRegion string, tableName string, expectedTags map[string]string) {
client := createDynamoDbClientForTest(t, awsRegion)
var description, err = client.DescribeTable(&dynamodb.DescribeTableInput{TableName: aws.String(tableName)})
if err != nil {
// This is a ResourceNotFoundException in case the table does not exist
t.Fatal(err)
}
var tags, err2 = client.ListTagsOfResource(&dynamodb.ListTagsOfResourceInput{ResourceArn: description.Table.TableArn})
if err2 != nil {
t.Fatal(err2)
}
var actualTags = make(map[string]string)
for _, element := range tags.Tags {
actualTags[*element.Key] = *element.Value
}
assert.Equal(t, expectedTags, actualTags, "Did not find expected tags on dynamo table.")
}
func assertS3Tags(expectedTags map[string]string, bucketName string, client *s3.S3, t *testing.T) {
var in = s3.GetBucketTaggingInput{}
in.SetBucket(bucketName)
var tags, err2 = client.GetBucketTagging(&in)
if err2 != nil {
t.Fatal(err2)
}
var actualTags = make(map[string]string)
for _, element := range tags.TagSet {
actualTags[*element.Key] = *element.Value
}
assert.Equal(t, expectedTags, actualTags, "Did not find expected tags on s3 bucket.")
}
func assertS3PublicAccessBlocks(t *testing.T, client *s3.S3, bucketName string) {
resp, err := client.GetPublicAccessBlock(
&s3.GetPublicAccessBlockInput{Bucket: aws.String(bucketName)},
)
require.NoError(t, err)
publicAccessBlockConfig := resp.PublicAccessBlockConfiguration
assert.True(t, aws.BoolValue(publicAccessBlockConfig.BlockPublicAcls))
assert.True(t, aws.BoolValue(publicAccessBlockConfig.BlockPublicPolicy))
assert.True(t, aws.BoolValue(publicAccessBlockConfig.IgnorePublicAcls))
assert.True(t, aws.BoolValue(publicAccessBlockConfig.RestrictPublicBuckets))
}
// deleteS3BucketWithRetry will attempt to delete the specified S3 bucket, retrying up to 3 times if there are errors to
// handle eventual consistency issues.
func deleteS3BucketWithRetry(t *testing.T, awsRegion string, bucketName string) {
for i := 0; i < 3; i++ {
err := deleteS3BucketE(t, awsRegion, bucketName)
if err == nil {
return
}
t.Logf("Error deleting s3 bucket %s. Sleeping for 10 seconds before retrying.", bucketName)
time.Sleep(10 * time.Second)
}
t.Fatalf("Max retries attempting to delete s3 bucket %s in region %s", bucketName, awsRegion)
}
// Delete the specified S3 bucket to clean up after a test
func deleteS3Bucket(t *testing.T, awsRegion string, bucketName string) {
require.NoError(t, deleteS3BucketE(t, awsRegion, bucketName))
}
func deleteS3BucketE(t *testing.T, awsRegion string, bucketName string) error {
mockOptions, err := options.NewTerragruntOptionsForTest("integration_test")
if err != nil {
t.Logf("Error creating mockOptions: %v", err)
return err
}
sessionConfig := &aws_helper.AwsSessionConfig{
Region: awsRegion,
}
s3Client, err := remote.CreateS3Client(sessionConfig, mockOptions)
if err != nil {
t.Logf("Error creating S3 client: %v", err)
return err
}
t.Logf("Deleting test s3 bucket %s", bucketName)
out, err := s3Client.ListObjectVersions(&s3.ListObjectVersionsInput{Bucket: aws.String(bucketName)})
if err != nil {
t.Logf("Failed to list object versions in s3 bucket %s: %v", bucketName, err)
return err
}
objectIdentifiers := []*s3.ObjectIdentifier{}
for _, version := range out.Versions {
objectIdentifiers = append(objectIdentifiers, &s3.ObjectIdentifier{
Key: version.Key,
VersionId: version.VersionId,
})
}
if len(objectIdentifiers) > 0 {
deleteInput := &s3.DeleteObjectsInput{
Bucket: aws.String(bucketName),
Delete: &s3.Delete{Objects: objectIdentifiers},
}
if _, err := s3Client.DeleteObjects(deleteInput); err != nil {
t.Logf("Error deleting all versions of all objects in bucket %s: %v", bucketName, err)
return err
}
}
if _, err := s3Client.DeleteBucket(&s3.DeleteBucketInput{Bucket: aws.String(bucketName)}); err != nil {
t.Logf("Failed to delete S3 bucket %s: %v", bucketName, err)
return err
}
return nil
}
// Create an authenticated client for DynamoDB
func createDynamoDbClient(awsRegion, awsProfile string, iamRoleArn string) (*dynamodb.DynamoDB, error) {
mockOptions, err := options.NewTerragruntOptionsForTest("integration_test")
if err != nil {
return nil, err
}
sessionConfig := &aws_helper.AwsSessionConfig{
Region: awsRegion,
Profile: awsProfile,
RoleArn: iamRoleArn,
}
session, err := aws_helper.CreateAwsSession(sessionConfig, mockOptions)
if err != nil {
return nil, err
}
return dynamodb.New(session), nil
}
func createDynamoDbClientForTest(t *testing.T, awsRegion string) *dynamodb.DynamoDB {
client, err := createDynamoDbClient(awsRegion, "", "")
if err != nil {
t.Fatal(err)
}
return client
}
func cleanupTableForTest(t *testing.T, tableName string, awsRegion string) {
client := createDynamoDbClientForTest(t, awsRegion)
err := terragruntDynamoDb.DeleteTable(tableName, client)
assert.NoError(t, err)
}
// Check that the GCS Bucket of the given name and location exists. Terragrunt should create this bucket during the test.
// Also check if bucket got labeled properly.
func validateGCSBucketExistsAndIsLabeled(t *testing.T, location string, bucketName string, expectedLabels map[string]string) {
remoteStateConfig := remote.RemoteStateConfigGCS{Bucket: bucketName}
gcsClient, err := remote.CreateGCSClient(remoteStateConfig)
if err != nil {
t.Fatalf("Error creating GCS client: %v", err)
}
// verify the bucket exists
assert.True(t, remote.DoesGCSBucketExist(gcsClient, &remoteStateConfig), "Terragrunt failed to create remote state GCS bucket %s", bucketName)
// verify the bucket location
ctx := context.Background()
bucket := gcsClient.Bucket(bucketName)
attrs, err := bucket.Attrs(ctx)
if err != nil {
t.Fatal(err)
}
assert.Equal(t, strings.ToUpper(location), attrs.Location, "Did not find GCS bucket in expected location.")
if expectedLabels != nil {
assertGCSLabels(t, expectedLabels, bucketName, gcsClient)
}
}
func assertGCSLabels(t *testing.T, expectedLabels map[string]string, bucketName string, client *storage.Client) {
ctx := context.Background()
bucket := client.Bucket(bucketName)
attrs, err := bucket.Attrs(ctx)
if err != nil {
t.Fatal(err)
}
var actualLabels = make(map[string]string)
for key, value := range attrs.Labels {
actualLabels[key] = value
}
assert.Equal(t, expectedLabels, actualLabels, "Did not find expected labels on GCS bucket.")
}
// Create the specified GCS bucket
func createGCSBucket(t *testing.T, projectID string, location string, bucketName string) {
var gcsConfig remote.RemoteStateConfigGCS
gcsClient, err := remote.CreateGCSClient(gcsConfig)
if err != nil {
t.Fatalf("Error creating GCS client: %v", err)
}
t.Logf("Creating test GCS bucket %s in project %s, location %s", bucketName, projectID, location)
ctx := context.Background()
bucket := gcsClient.Bucket(bucketName)
bucketAttrs := &storage.BucketAttrs{
Location: location,
VersioningEnabled: true,
}
if err := bucket.Create(ctx, projectID, bucketAttrs); err != nil {
t.Fatalf("Failed to create GCS bucket %s: %v", bucketName, err)
}
}
// Delete the specified GCS bucket to clean up after a test
func deleteGCSBucket(t *testing.T, bucketName string) {
var gcsConfig remote.RemoteStateConfigGCS
gcsClient, err := remote.CreateGCSClient(gcsConfig)
if err != nil {
t.Fatalf("Error creating GCS client: %v", err)
}
t.Logf("Deleting test GCS bucket %s", bucketName)
ctx := context.Background()
// List all objects including their versions in the bucket
bucket := gcsClient.Bucket(bucketName)
q := &storage.Query{
Versions: true,
}
it := bucket.Objects(ctx, q)
for {
objectAttrs, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
t.Fatalf("Failed to list objects and versions in GCS bucket %s: %v", bucketName, err)
}
// purge the object version
if err := bucket.Object(objectAttrs.Name).Generation(objectAttrs.Generation).Delete(ctx); err != nil {
t.Fatalf("Failed to delete GCS bucket object %s: %v", objectAttrs.Name, err)
}
}
// remote empty bucket
if err := bucket.Delete(ctx); err != nil {
t.Fatalf("Failed to delete GCS bucket %s: %v", bucketName, err)
}
}
func fileIsInFolder(t *testing.T, name string, path string) bool {
found := false
err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
assert.NoError(t, err)
if filepath.Base(path) == name {
found = true
}
return nil
})
require.NoError(t, err)
return found
}
func runValidateAllWithIncludeAndGetIncludedModules(t *testing.T, rootModulePath string, includeModulePaths []string, strictInclude bool) []string {
cmd_parts := []string{
"terragrunt", "run-all", "validate",
"--terragrunt-non-interactive",
"--terragrunt-log-level", "debug",
"--terragrunt-working-dir", rootModulePath,
}
for _, module := range includeModulePaths {
cmd_parts = append(cmd_parts, "--terragrunt-include-dir", module)
}
if strictInclude {
cmd_parts = append(cmd_parts, "--terragrunt-strict-include")
}
cmd := strings.Join(cmd_parts, " ")
validateAllStdout := bytes.Buffer{}
validateAllStderr := bytes.Buffer{}
err := runTerragruntCommand(
t,
cmd,
&validateAllStdout,
&validateAllStderr,
)
logBufferContentsLineByLine(t, validateAllStdout, "validate-all stdout")
logBufferContentsLineByLine(t, validateAllStderr, "validate-all stderr")
require.NoError(t, err)
currentDir, err := os.Getwd()
require.NoError(t, err)
includedModulesRegexp, err := regexp.Compile(
fmt.Sprintf(
`=> Module %s/%s/(.+) \(excluded: (true|false)`,
currentDir,
rootModulePath,
),
)
require.NoError(t, err)
matches := includedModulesRegexp.FindAllStringSubmatch(string(validateAllStderr.Bytes()), -1)
includedModules := []string{}
for _, match := range matches {
if match[2] == "false" {
includedModules = append(includedModules, match[1])
}
}
sort.Strings(includedModules)
return includedModules
}
// sops decrypting for inputs
func TestSopsDecryptedCorrectly(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_SOPS)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_SOPS)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_SOPS)
runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
require.NoError(t, err)
outputs := map[string]TerraformOutput{}
require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs))
assert.Equal(t, outputs["json_bool_array"].Value, []interface{}{true, false})
assert.Equal(t, outputs["json_string_array"].Value, []interface{}{"example_value1", "example_value2"})
assert.Equal(t, outputs["json_number"].Value, 1234.56789)
assert.Equal(t, outputs["json_string"].Value, "example_value")
assert.Equal(t, outputs["json_hello"].Value, "Welcome to SOPS! Edit this file as you please!")
assert.Equal(t, outputs["yaml_bool_array"].Value, []interface{}{true, false})
assert.Equal(t, outputs["yaml_string_array"].Value, []interface{}{"example_value1", "example_value2"})
assert.Equal(t, outputs["yaml_number"].Value, 1234.5679)
assert.Equal(t, outputs["yaml_string"].Value, "example_value")
assert.Equal(t, outputs["yaml_hello"].Value, "Welcome to SOPS! Edit this file as you please!")
assert.Equal(t, outputs["text_value"].Value, "Raw Secret Example")
assert.Contains(t, outputs["env_value"].Value, "DB_PASSWORD=tomato")
assert.Contains(t, outputs["ini_value"].Value, "password = potato")
}
func TestTerragruntRunAllCommandPrompt(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_OUTPUT_ALL)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
environmentPath := fmt.Sprintf("%s/%s/env1", tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt run-all apply --terragrunt-working-dir %s", environmentPath), &stdout, &stderr)
logBufferContentsLineByLine(t, stdout, "stdout")
logBufferContentsLineByLine(t, stderr, "stderr")
assert.Contains(t, stderr.String(), "Are you sure you want to run 'terragrunt apply' in each folder of the stack described above? (y/n)")
assert.Error(t, err)
}
func TestTerragruntInitOnce(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_LOCAL_RUN_ONCE)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", TEST_FIXTURE_LOCAL_RUN_ONCE), &stdout, &stderr)
errout := string(stderr.Bytes())
assert.Equal(t, 1, strings.Count(errout, "foo"))
}
func TestTerragruntInitRunCmd(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_LOCAL_RUN_MULTIPLE)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", TEST_FIXTURE_LOCAL_RUN_MULTIPLE), &stdout, &stderr)
errout := string(stderr.Bytes())
// Check for cached values between locals and inputs sections
assert.Equal(t, 1, strings.Count(errout, "potato"))
assert.Equal(t, 1, strings.Count(errout, "carrot"))
assert.Equal(t, 1, strings.Count(errout, "bar"))
assert.Equal(t, 1, strings.Count(errout, "foo"))
assert.Equal(t, 1, strings.Count(errout, "input_variable"))
// Commands executed multiple times because of different arguments
assert.Equal(t, 4, strings.Count(errout, "uuid"))
assert.Equal(t, 6, strings.Count(errout, "random_arg"))
assert.Equal(t, 4, strings.Count(errout, "another_arg"))
}
func TestShowWarningWithDependentModulesBeforeDestroy(t *testing.T) {
rootPath := copyEnvironment(t, TEST_FIXTURE_DESTROY_WARNING)
rootPath = util.JoinPath(rootPath, TEST_FIXTURE_DESTROY_WARNING)
vpcPath := util.JoinPath(rootPath, "vpc")
appPath := util.JoinPath(rootPath, "app")
cleanupTerraformFolder(t, rootPath)
cleanupTerraformFolder(t, vpcPath)
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt run-all init --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
assert.NoError(t, err)
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt run-all apply --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr)
assert.NoError(t, err)
// try to destroy vpc module and check if warning is printed in output
stdout = bytes.Buffer{}
stderr = bytes.Buffer{}
err = runTerragruntCommand(t, fmt.Sprintf("terragrunt destroy --terragrunt-non-interactive --terragrunt-working-dir %s", vpcPath), &stdout, &stderr)
assert.NoError(t, err)
output := string(stderr.Bytes())
assert.Equal(t, 1, strings.Count(output, appPath))
}
func TestShowErrorWhenRunAllInvokedWithoutArguments(t *testing.T) {
t.Parallel()
appPath := TEST_FIXTURE_STACK
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt run-all --terragrunt-non-interactive --terragrunt-working-dir %s", appPath), &stdout, &stderr)
require.Error(t, err)
_, ok := errors.Unwrap(err).(cli.MissingCommand)
assert.True(t, ok)
}
func TestPathRelativeToIncludeInvokedInCorrectPathFromChild(t *testing.T) {
t.Parallel()
appPath := path.Join(TEST_FIXTURE_RELATIVE_INCLUDE_CMD, "app")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt version --terragrunt-log-level trace --terragrunt-non-interactive --terragrunt-working-dir %s", appPath), &stdout, &stderr)
require.NoError(t, err)
errout := string(stderr.Bytes())
assert.Equal(t, 1, strings.Count(errout, "\npath_relative_to_inclue: app\n"))
assert.Equal(t, 0, strings.Count(errout, "\npath_relative_to_inclue: .\n"))
}
func TestTerragruntInitConfirmation(t *testing.T) {
t.Parallel()
s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId()))
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_OUTPUT_ALL)
rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL, config.DefaultTerragruntConfigPath)
copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used")
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
err := runTerragruntCommand(t, fmt.Sprintf("terragrunt run-all init --terragrunt-working-dir %s", tmpEnvPath), &stdout, &stderr)
require.Error(t, err)
errout := string(stderr.Bytes())
assert.Equal(t, 1, strings.Count(errout, "does not exist or you don't have permissions to access it. Would you like Terragrunt to create it? (y/n)"))
} | |
interfaces.go | package main
import (
"fmt"
"math"
)
type geometry interface {
area() float64
perim() float64
}
type rect struct {
width, height float64
}
type circle struct {
radius float64
}
func (r rect) area() float64 {
return r.width * r.height
}
func (r rect) perim() float64 {
return 2*r.width + 2*r.height
}
func (c circle) area() float64 {
return math.Pi * c.radius * c.radius
}
func (c circle) perim() float64 {
return 2 * math.Pi * c.radius
}
//This function going to show Polymorphism
func | (g geometry) {
fmt.Println(g)
fmt.Println(g.area())
fmt.Println(g.perim())
}
func main() {
r := rect{width: 3, height: 4}
c := circle{radius: 5}
measure(r)
measure(c)
}
| measure |
deployment.rs | // Generated from definition io.k8s.api.extensions.v1beta1.Deployment
/// DEPRECATED - This group version of Deployment is deprecated by apps/v1beta2/Deployment. See the release notes for more information. Deployment enables declarative updates for Pods and ReplicaSets.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct Deployment {
/// Standard object metadata.
pub metadata: Option<crate::v1_8::apimachinery::pkg::apis::meta::v1::ObjectMeta>,
/// Specification of the desired behavior of the Deployment.
pub spec: Option<crate::v1_8::api::extensions::v1beta1::DeploymentSpec>,
/// Most recently observed status of the Deployment.
pub status: Option<crate::v1_8::api::extensions::v1beta1::DeploymentStatus>,
}
// Begin extensions/v1beta1/Deployment
// Generated from operation createExtensionsV1beta1NamespacedDeployment
impl Deployment {
/// create a Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`CreateNamespacedDeploymentResponse`]`>` constructor, or [`CreateNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn create_namespaced_deployment(
namespace: &str,
body: &crate::v1_8::api::extensions::v1beta1::Deployment,
optional: CreateNamespacedDeploymentOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<CreateNamespacedDeploymentResponse>), crate::RequestError> {
let CreateNamespacedDeploymentOptional {
pretty,
} = optional;
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::post(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`Deployment::create_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct CreateNamespacedDeploymentOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<CreateNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::create_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum CreateNamespacedDeploymentResponse {
Ok(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for CreateNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((CreateNamespacedDeploymentResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((CreateNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation deleteExtensionsV1beta1CollectionNamespacedDeployment
impl Deployment {
/// delete collection of Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`DeleteCollectionNamespacedDeploymentResponse`]`>` constructor, or [`DeleteCollectionNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `delete_optional`
///
/// Delete options. Use `Default::default()` to not pass any.
///
/// * `list_optional`
///
/// List options. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_collection_namespaced_deployment(
namespace: &str,
delete_optional: crate::v1_8::DeleteOptional<'_>,
list_optional: crate::v1_8::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<DeleteCollectionNamespacedDeploymentResponse>), crate::RequestError> {
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
list_optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&delete_optional).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<DeleteCollectionNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::delete_collection_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum DeleteCollectionNamespacedDeploymentResponse {
OkStatus(crate::v1_8::apimachinery::pkg::apis::meta::v1::Status),
OkValue(crate::v1_8::api::extensions::v1beta1::DeploymentList),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for DeleteCollectionNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result: serde_json::Map<String, serde_json::Value> = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
let is_status = match result.get("kind") {
Some(serde_json::Value::String(s)) if s == "Status" => true,
_ => false,
};
if is_status {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteCollectionNamespacedDeploymentResponse::OkStatus(result), buf.len()))
}
else {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteCollectionNamespacedDeploymentResponse::OkValue(result), buf.len()))
}
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((DeleteCollectionNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation deleteExtensionsV1beta1NamespacedDeployment
impl Deployment {
/// delete a Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`DeleteNamespacedDeploymentResponse`]`>` constructor, or [`DeleteNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_namespaced_deployment(
name: &str,
namespace: &str,
optional: crate::v1_8::DeleteOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<DeleteNamespacedDeploymentResponse>), crate::RequestError> {
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&optional).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<DeleteNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::delete_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum DeleteNamespacedDeploymentResponse {
OkStatus(crate::v1_8::apimachinery::pkg::apis::meta::v1::Status),
OkValue(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for DeleteNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result: serde_json::Map<String, serde_json::Value> = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
let is_status = match result.get("kind") {
Some(serde_json::Value::String(s)) if s == "Status" => true,
_ => false,
};
if is_status {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteNamespacedDeploymentResponse::OkStatus(result), buf.len()))
}
else {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteNamespacedDeploymentResponse::OkValue(result), buf.len()))
}
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((DeleteNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation listExtensionsV1beta1DeploymentForAllNamespaces
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`ListDeploymentForAllNamespacesResponse`]`>` constructor, or [`ListDeploymentForAllNamespacesResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_deployment_for_all_namespaces(
optional: crate::v1_8::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ListDeploymentForAllNamespacesResponse>), crate::RequestError> {
let __url = "/apis/extensions/v1beta1/deployments?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<ListDeploymentForAllNamespacesResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::list_deployment_for_all_namespaces`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ListDeploymentForAllNamespacesResponse {
Ok(crate::v1_8::api::extensions::v1beta1::DeploymentList),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ListDeploymentForAllNamespacesResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ListDeploymentForAllNamespacesResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else | ;
Ok((ListDeploymentForAllNamespacesResponse::Other(result), read))
},
}
}
}
// Generated from operation listExtensionsV1beta1NamespacedDeployment
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`ListNamespacedDeploymentResponse`]`>` constructor, or [`ListNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_namespaced_deployment(
namespace: &str,
optional: crate::v1_8::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ListNamespacedDeploymentResponse>), crate::RequestError> {
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<ListNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::list_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ListNamespacedDeploymentResponse {
Ok(crate::v1_8::api::extensions::v1beta1::DeploymentList),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ListNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ListNamespacedDeploymentResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ListNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation patchExtensionsV1beta1NamespacedDeployment
impl Deployment {
/// partially update the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`PatchNamespacedDeploymentResponse`]`>` constructor, or [`PatchNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_deployment(
name: &str,
namespace: &str,
body: &crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::v1_8::PatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<PatchNamespacedDeploymentResponse>), crate::RequestError> {
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::patch(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static(match body {
crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<PatchNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::patch_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum PatchNamespacedDeploymentResponse {
Ok(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for PatchNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((PatchNamespacedDeploymentResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((PatchNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation patchExtensionsV1beta1NamespacedDeploymentStatus
impl Deployment {
/// partially update status of the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`PatchNamespacedDeploymentStatusResponse`]`>` constructor, or [`PatchNamespacedDeploymentStatusResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_deployment_status(
name: &str,
namespace: &str,
body: &crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::v1_8::PatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<PatchNamespacedDeploymentStatusResponse>), crate::RequestError> {
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::patch(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static(match body {
crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::v1_8::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<PatchNamespacedDeploymentStatusResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::patch_namespaced_deployment_status`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum PatchNamespacedDeploymentStatusResponse {
Ok(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for PatchNamespacedDeploymentStatusResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((PatchNamespacedDeploymentStatusResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((PatchNamespacedDeploymentStatusResponse::Other(result), read))
},
}
}
}
// Generated from operation readExtensionsV1beta1NamespacedDeployment
impl Deployment {
/// read the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedDeploymentResponse`]`>` constructor, or [`ReadNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_deployment(
name: &str,
namespace: &str,
optional: ReadNamespacedDeploymentOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReadNamespacedDeploymentResponse>), crate::RequestError> {
let ReadNamespacedDeploymentOptional {
exact,
export,
pretty,
} = optional;
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(exact) = exact {
__query_pairs.append_pair("exact", &exact.to_string());
}
if let Some(export) = export {
__query_pairs.append_pair("export", &export.to_string());
}
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`Deployment::read_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedDeploymentOptional<'a> {
/// Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
pub exact: Option<bool>,
/// Should this value be exported. Export strips fields that a user can not specify.
pub export: Option<bool>,
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::read_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedDeploymentResponse {
Ok(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedDeploymentResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation readExtensionsV1beta1NamespacedDeploymentStatus
impl Deployment {
/// read status of the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedDeploymentStatusResponse`]`>` constructor, or [`ReadNamespacedDeploymentStatusResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_deployment_status(
name: &str,
namespace: &str,
optional: ReadNamespacedDeploymentStatusOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReadNamespacedDeploymentStatusResponse>), crate::RequestError> {
let ReadNamespacedDeploymentStatusOptional {
pretty,
} = optional;
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`Deployment::read_namespaced_deployment_status`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedDeploymentStatusOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedDeploymentStatusResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::read_namespaced_deployment_status`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedDeploymentStatusResponse {
Ok(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedDeploymentStatusResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedDeploymentStatusResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedDeploymentStatusResponse::Other(result), read))
},
}
}
}
// Generated from operation replaceExtensionsV1beta1NamespacedDeployment
impl Deployment {
/// replace the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReplaceNamespacedDeploymentResponse`]`>` constructor, or [`ReplaceNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_deployment(
name: &str,
namespace: &str,
body: &crate::v1_8::api::extensions::v1beta1::Deployment,
optional: ReplaceNamespacedDeploymentOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReplaceNamespacedDeploymentResponse>), crate::RequestError> {
let ReplaceNamespacedDeploymentOptional {
pretty,
} = optional;
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::put(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`Deployment::replace_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReplaceNamespacedDeploymentOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReplaceNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::replace_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReplaceNamespacedDeploymentResponse {
Ok(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReplaceNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReplaceNamespacedDeploymentResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReplaceNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation replaceExtensionsV1beta1NamespacedDeploymentStatus
impl Deployment {
/// replace status of the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReplaceNamespacedDeploymentStatusResponse`]`>` constructor, or [`ReplaceNamespacedDeploymentStatusResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_deployment_status(
name: &str,
namespace: &str,
body: &crate::v1_8::api::extensions::v1beta1::Deployment,
optional: ReplaceNamespacedDeploymentStatusOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReplaceNamespacedDeploymentStatusResponse>), crate::RequestError> {
let ReplaceNamespacedDeploymentStatusOptional {
pretty,
} = optional;
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::put(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`Deployment::replace_namespaced_deployment_status`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReplaceNamespacedDeploymentStatusOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReplaceNamespacedDeploymentStatusResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::replace_namespaced_deployment_status`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReplaceNamespacedDeploymentStatusResponse {
Ok(crate::v1_8::api::extensions::v1beta1::Deployment),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReplaceNamespacedDeploymentStatusResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReplaceNamespacedDeploymentStatusResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReplaceNamespacedDeploymentStatusResponse::Other(result), read))
},
}
}
}
// Generated from operation watchExtensionsV1beta1DeploymentForAllNamespaces
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`WatchDeploymentForAllNamespacesResponse`]`>` constructor, or [`WatchDeploymentForAllNamespacesResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_deployment_for_all_namespaces(
optional: crate::v1_8::WatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<WatchDeploymentForAllNamespacesResponse>), crate::RequestError> {
let __url = "/apis/extensions/v1beta1/deployments?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<WatchDeploymentForAllNamespacesResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::watch_deployment_for_all_namespaces`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum WatchDeploymentForAllNamespacesResponse {
Ok(crate::v1_8::apimachinery::pkg::apis::meta::v1::WatchEvent<Deployment>),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for WatchDeploymentForAllNamespacesResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let mut deserializer = serde_json::Deserializer::from_slice(buf).into_iter();
let (result, byte_offset) = match deserializer.next() {
Some(Ok(value)) => (value, deserializer.byte_offset()),
Some(Err(ref err)) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Some(Err(err)) => return Err(crate::ResponseError::Json(err)),
None => return Err(crate::ResponseError::NeedMoreData),
};
Ok((WatchDeploymentForAllNamespacesResponse::Ok(result), byte_offset))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((WatchDeploymentForAllNamespacesResponse::Other(result), read))
},
}
}
}
// Generated from operation watchExtensionsV1beta1NamespacedDeployment
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`WatchNamespacedDeploymentResponse`]`>` constructor, or [`WatchNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_namespaced_deployment(
namespace: &str,
optional: crate::v1_8::WatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<WatchNamespacedDeploymentResponse>), crate::RequestError> {
let __url = format!("/apis/extensions/v1beta1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<WatchNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::watch_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum WatchNamespacedDeploymentResponse {
Ok(crate::v1_8::apimachinery::pkg::apis::meta::v1::WatchEvent<Deployment>),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for WatchNamespacedDeploymentResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let mut deserializer = serde_json::Deserializer::from_slice(buf).into_iter();
let (result, byte_offset) = match deserializer.next() {
Some(Ok(value)) => (value, deserializer.byte_offset()),
Some(Err(ref err)) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Some(Err(err)) => return Err(crate::ResponseError::Json(err)),
None => return Err(crate::ResponseError::NeedMoreData),
};
Ok((WatchNamespacedDeploymentResponse::Ok(result), byte_offset))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((WatchNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// End extensions/v1beta1/Deployment
impl crate::Resource for Deployment {
fn api_version() -> &'static str {
"extensions/v1beta1"
}
fn group() -> &'static str {
"extensions"
}
fn kind() -> &'static str {
"Deployment"
}
fn version() -> &'static str {
"v1beta1"
}
}
impl crate::Metadata for Deployment {
type Ty = crate::v1_8::apimachinery::pkg::apis::meta::v1::ObjectMeta;
fn metadata(&self) -> Option<&<Self as crate::Metadata>::Ty> {
self.metadata.as_ref()
}
}
impl<'de> serde::Deserialize<'de> for Deployment {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_version,
Key_kind,
Key_metadata,
Key_spec,
Key_status,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"metadata" => Field::Key_metadata,
"spec" => Field::Key_spec,
"status" => Field::Key_status,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Deployment;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct Deployment")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_metadata: Option<crate::v1_8::apimachinery::pkg::apis::meta::v1::ObjectMeta> = None;
let mut value_spec: Option<crate::v1_8::api::extensions::v1beta1::DeploymentSpec> = None;
let mut value_status: Option<crate::v1_8::api::extensions::v1beta1::DeploymentStatus> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::api_version() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::api_version()));
}
},
Field::Key_kind => {
let value_kind: String = serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::kind() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::kind()));
}
},
Field::Key_metadata => value_metadata = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_spec => value_spec = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_status => value_status = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(Deployment {
metadata: value_metadata,
spec: value_spec,
status: value_status,
})
}
}
deserializer.deserialize_struct(
"Deployment",
&[
"apiVersion",
"kind",
"metadata",
"spec",
"status",
],
Visitor,
)
}
}
impl serde::Serialize for Deployment {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"Deployment",
2 +
self.metadata.as_ref().map_or(0, |_| 1) +
self.spec.as_ref().map_or(0, |_| 1) +
self.status.as_ref().map_or(0, |_| 1),
)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::api_version())?;
serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::kind())?;
if let Some(value) = &self.metadata {
serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", value)?;
}
if let Some(value) = &self.spec {
serde::ser::SerializeStruct::serialize_field(&mut state, "spec", value)?;
}
if let Some(value) = &self.status {
serde::ser::SerializeStruct::serialize_field(&mut state, "status", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
} |
lib.rs | // The &'static here means the return type has a static lifetime.
// This is a Rust feature that you don't need to worry about now.
pub fn hello() -> &'static str | { "Hello, World!" } |
|
todoJunto.py | #!/usr/bin/env python
# coding: utf-8
import codecs
import sys
import sklearn as sk
import pandas as pd
import numpy as np
import math
from sklearn import preprocessing
from sklearn.decomposition import PCA
from src.pca.algoritmo_QR import eigenvectores_eigenvalores_QR_vf
from src.pca.metodo_potencia_deflation import power_iteration
from src.pca.metodo_potencia_deflation import power_deflation
def PCA_from_sklearn(X):
"""
componentes_principales(X): Función que devuelve las componentes principales.
Parámetros
----------
n_components: número de componentes.
svd_solver: str {‘auto’, ‘full’, ‘arpack’, ‘randomized’}
Se elige 'full', lo que significa que se ejecuta completamente SVD llamando al
solucionador estándar LAPACK a través de scipy.linalg.svd y se seleccionan los componentes mediante postprocessing.
Atributos
---------
varianza_explicada: porcentaje de varianza explicada por cada componente.
valores_singulares: valores singulares correspondientes a cada componente.
pca.components_: ejes principales que representan las direcciones de máxima varianza en los datos.
eigenvalues: son los valores propios utilizando la matriz de covarianza.
Método
---------
fit_transform: ajusta el modelo a los datos y aplica la reducción de dimensionalidad en los datos.
"""
X = pd.DataFrame(X)
n_components = len(X.columns)
pca_1 = PCA(n_components, svd_solver='full')
componentesprincipales_1 = pca_1.fit_transform(X)
pca_1.components_
var_exp = pca_1.explained_variance_ratio_
##Se obtiene el número de componentes a través de la varianza explicada acumulada de los componentes, la cual debe sumar 60%.
var_acumulada = var_exp.cumsum()
conteo = (var_acumulada) < 0.8
n_componentes = conteo.sum() + 1
pca = PCA(n_componentes, svd_solver='full')
componentesprincipales = pca.fit_transform(X)
pca.components_
varianza_explicada = pca.explained_variance_ratio_
eigenvalues = pca.explained_variance_
val_sing = pca.singular_values_
return pca, varianza_explicada, componentesprincipales, val_sing, pca.components_, eigenvalues
def PCA_from_SVD(A):
"""
Función para PCA a partir de la SVD de numpy
params: A matriz de datos
num_componentes número de componentes deseados
return: valores_singulares Los valores singulares de la descomposición SVD
componentes Los coeficientes para calcular los componentes principales
Z Los datos transformados (componentes principales)
varianza_explicada La varianza explicada por cada componente principal
"""
# Centrar los datos
A = np.array(A) # convertir los datos a un numpy array por si vienen de un DataFrame
A_centered = A - A.mean(axis=0)
# Calcular SVD
U, S, Vt = np.linalg.svd(A_centered, full_matrices=False)
# Los valores singulares
valores_singulares = S
# Los componentes (coeficientes)
componentes = ((Vt))
# Los datos transformados (componentes principales)
Z = [email protected](Vt)
# La varianza explicada
varianza_explicada = S**2/np.sum(S**2)
# Calcula número de componentes de manera automatica de acuerdo a la variana explicada
# Threshold de 60%
n = A.shape[1] #numero de columnas
varianza_acumulada = varianza_explicada.cumsum()
conteo = (varianza_acumulada) < 0.8
num_componentes = conteo.sum() + 1
# regresar 4 objetos
return valores_singulares[:num_componentes], componentes[:num_componentes], Z[:,:num_componentes], varianza_explicada[:num_componentes]
def PCA_from_SVD_jacobi(A):
"""
Función para PCA a partir de la SVD
params: A matriz de datos
num_componentes número de componentes deseados
return: valores_singulares Los valores singulares de la descomposición SVD
componentes Los coeficientes para calcular los componentes principales
Z Los datos transformados (componentes principales)
varianza_explicada La varianza explicada por cada componente principal
"""
# Centrar los datos
A = np.array(A) # convertir los datos a un numpy array por si vienen de un DataFrame
A_centered = A - A.mean(axis=0)
# Modificar esta línea de código, mandar a llamar la función creada por el equipo
# Calcular SVD
U, S, Vt = svd_jacobi_aprox(A_centered,1e-12,500)
# Los valores singulares
valores_singulares = S
# Los componentes (coeficientes)
componentes = ((Vt))
# Los datos transformados (componentes principales)
Z = [email protected](Vt)
# La varianza explicada
varianza_explicada = S**2/np.sum(S**2)
# Calcula número de componentes de manera automatica de acuerdo a la variana explicada
# Threshold de 60%
n = A.shape[1] #numero de columnas
varianza_acumulada = varianza_explicada.cumsum()
conteo = (varianza_acumulada) < 0.8
num_componentes = conteo.sum() + 1
# regresar 4 objetos
return valores_singulares[:(num_componentes)], componentes[:(num_componentes)], Z[:,:(num_componentes)], varianza_explicada[:(num_componentes)]
def PCA_from_QR_vf(data,niter = 450):
"""
Función para PCA a partir de los eigenvectores
params: data: matriz de datos
niter: número de iteraciones máximas
return: componentes Los coeficientes para calcular los componentes principales (eigenvectores de la matriz de covarianzas)
Z Los datos transformados (componentes principales)
varianza_explicada La varianza explicada por cada componente principal
Depende de la función: eigenvectores_QR
"""
# convertir a array
A = np.array(data)
# Centrar los datos
mean_vec = np.mean(A, axis=0)
datos_centrados = (A - mean_vec)
# Matriz de Covarianzas
#C = (datos_centrados.T@datos_centrados)/(datos_centrados.shape[0]-1)
C = (A - mean_vec).T.dot((A - mean_vec)) / (A.shape[0]-1)
# Calcular algoritmo QR
E, Q = eigenvectores_eigenvalores_QR_vf(C,niter)
# Los componentes (coeficientes)
componentes = Q.T
# Los datos transformados (componentes principales)
# Aquí marcaba error al filtrar porque no se reconocia a Z como numpy array
Z = datos_centrados@Q
# La varianza explicada
varianza_explicada = E/np.sum(E)
# Calcula número de componentes de manera automatica de acuerdo a la variana explicada
# Threshold de 60%
n = data.shape[1] #numero de columnas
varianza_acumulada = varianza_explicada.cumsum()
conteo = (varianza_acumulada) < 0.8
num_componentes = conteo.sum() + 1
# regresar 4 objetos
return E[:num_componentes], componentes[:num_componentes], Z[:,:num_componentes], varianza_explicada[:num_componentes] #, varianza_acumulada, num_componentes
def PCA_from_potencia(X):
"""
Función q | partir del método de la potencia y deflation de Hotteling
params: A: matriz de datos
return: eigenvalues Numpy array con los eigenvectores de A
eigenvectors Numpy array con los correspondientes eigenvectores de A
"""
prop = 0 # Proporción de varianza explicada
comp = 1
cur_var = 0
comp_vecs = np.zeros([X.shape[1], X.shape[1]])
# convertir a array
A = np.array(X)
# Centrar los datos
mean_vec = np.mean(A, axis=0)
datos_centrados = (A - mean_vec)
#Calculamos la matriz de covarianzas
cov = np.dot(X.T, X)/X.shape[0]
#Aplicamos el método de la potencia
evalues_pow, evectors_pow = power_deflation(cov,2000)
# La varianza explicada
varianza_explicada = evalues_pow/np.sum(evalues_pow)
# Los datos transformados (componentes principales)
Z = datos_centrados@evectors_pow
# Calcula número de componentes de manera automatica de acuerdo a la variana explicada
# Threshold de 80%
n = X.shape[1] #numero de columnas
varianza_acumulada = varianza_explicada.cumsum()
conteo = (varianza_acumulada) < 0.8
num_componentes = conteo.sum() + 1
return evalues_pow[:num_componentes], evectors_pow.T[:num_componentes], Z[:,:num_componentes], varianza_explicada[:num_componentes] | ue calcula PCA a |
logreg.py | # ImageNet-CoG Benchmark
# Copyright 2021-present NAVER Corp.
# 3-Clause BSD License
import argparse
import copy
import logging
import math
import os
import shutil
import time
import optuna
import torch as th
import feature_ops
import metrics
import utils
from iterators import TorchIterator
from meters import AverageMeter, ProgressMeter
logger = logging.getLogger()
class LogReg:
"""
Logistic regression classifier with mini-batch SGD.
"""
def __init__(self, args, cfg):
self.args = args
self.cfg = cfg
# load the training set features
trainset = feature_ops.load_feature_set(
args.train_features_path, "train", cfg.CLF.NORM_FTS
)
if args.val:
# randomly split the training set into train + val
logger.info("Splitting the training set into train and val")
trainset, testset = feature_ops.split_trainset(trainset, cfg.CLF.VAL_PERC)
else:
# load the test set
testset = feature_ops.load_feature_set(args.test_features_path, "test", cfg.CLF.NORM_FTS)
if cfg.CLF.N_SHOT > 0:
logger.info(
"Simulating few-shot learning setting, {} images per class.".format(
cfg.CLF.N_SHOT
)
)
trainset = feature_ops.make_fewshot_dataset(trainset, cfg.CLF.N_SHOT)
self.trainset = trainset
self.testset = testset
self.trainset.print_info()
self.testset.print_info()
# determine number of cases
if len(list(self.trainset.y.shape)) == 1:
classes = th.unique(self.trainset.y)
assert th.all(classes == th.unique(self.testset.y))
args.n_classes = classes.size(0)
# move all features to the device
if args.device == "cuda":
feature_ops.move_data_to_cuda([self.trainset, self.testset])
def __call__(self, trial=None):
"""
The function called by Optuna.
"""
# empty the cache allocated in the previous call
th.cuda.empty_cache()
args = copy.deepcopy(self.args)
cfg = self.cfg
x_train = self.trainset.x
y_train = self.trainset.y
x_test = self.testset.x
y_test = self.testset.y
# create training and test set iterators
train_iter = TorchIterator((x_train, y_train), cfg.CLF.BATCH_SIZE, shuffle=True)
test_iter = TorchIterator((x_test, y_test), cfg.CLF.BATCH_SIZE, shuffle=False)
# define logistic classifier
model = th.nn.Linear(x_train.size(1), args.n_classes).to(args.device)
crit = th.nn.CrossEntropyLoss().to(args.device)
# sample a learning rate and weight decay
if trial is not None:
lr_intv = cfg.CLF.LR_INTV
wd_intv = cfg.CLF.WD_INTV
args.lr = trial.suggest_loguniform("lr", lr_intv[0], lr_intv[1])
args.wd = trial.suggest_loguniform("wd", wd_intv[0], wd_intv[1])
optim = th.optim.SGD(
model.parameters(), lr=args.lr, momentum=args.mom, weight_decay=args.wd
)
args.exp_dir = os.path.join(
args.output_dir,
"{}-lr-{}_wd-{}".format("val" if args.val else "final", args.lr, args.wd),
)
os.makedirs(args.exp_dir, exist_ok=True)
# write the model definition into exp_dir
utils.write_to_file(str(model), os.path.join(args.exp_dir, "model.txt"))
# logs computed during training / evaluation
args.logs = {
"train/loss": [],
"train/top1": [],
"train/top5": [],
"test/loss": [],
"test/top1": [],
"test/top5": [],
"lr": [],
}
# predictions over the evaluation sets
args.preds = []
for epoch in range(cfg.CLF.N_EPOCHS):
if not args.val:
logger.info(f"**Epoch:{epoch}**")
args.epoch = epoch
train_stat = train(train_iter, model, crit, optim, epoch, args)
validate(test_iter, model, crit, args)
adjust_learning_rate(optim, args, cfg)
# if something went wrong during training
# e.g. SGD diverged | utils.save_pickle(args.logs, f"{args.exp_dir}/logs.pkl")
# save the predictions
utils.save_pickle(args.preds, f"{args.exp_dir}/preds.pkl")
# save the whole args, for ease of access
utils.save_pickle(vars(args), f"{args.exp_dir}/args.pkl")
# save also the final model
th.save(
{
"model": model.state_dict(),
},
f"{args.exp_dir}/model.pth",
)
# return the last test accuracy
return args.logs["test/top1"][-1]
def train(train_loader, model, criterion, optimizer, epoch, args):
"""
Train the classifier for one epoch.
"""
batch_time = AverageMeter("Time", ":6.3f")
losses = AverageMeter("Loss", ":.4e")
top1 = AverageMeter("Acc@1", ":6.2f")
top5 = AverageMeter("Acc@5", ":6.2f")
progress = ProgressMeter(
len(train_loader),
[batch_time, losses, top1, top5],
prefix="Epoch: [{}]".format(epoch),
)
# switch to train mode
model.train()
end = time.time()
for i, (fts, lbls) in enumerate(train_loader):
fts = fts.to(args.device)
lbls = lbls.to(args.device)
# compute output
output = model(fts)
loss = criterion(output, lbls)
if not th.isfinite(loss):
logger.info("Loss ({}) is not finite, terminating".format(loss.item()))
optimizer.zero_grad()
return -1
# measure accuracy and record loss
acc1, acc5 = metrics.accuracy(output, lbls, topk=(1, 5))
losses.update(loss.item(), fts.size(0))
top1.update(acc1.item(), fts.size(0))
top5.update(acc5.item(), fts.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if (not args.val) and (i % args.print_freq == 0):
progress.display(i)
args.logs["train/loss"].append(losses.avg)
args.logs["train/top1"].append(top1.avg)
args.logs["train/top5"].append(top5.avg)
return 0
def validate(val_loader, model, criterion, args):
losses = AverageMeter("Loss", ":.4e")
top1 = AverageMeter("Acc@1", ":6.2f")
top5 = AverageMeter("Acc@5", ":6.2f")
# switch to evaluate mode
model.eval()
# keep predictions per class
preds = th.ones(len(val_loader.tensors[0]), dtype=th.int32, device=args.device) * -1.
six = 0
with th.no_grad():
for i, (fts, lbls) in enumerate(val_loader):
fts = fts.to(args.device)
lbls = lbls.to(args.device)
bs = fts.size(0)
# compute output
output = model(fts)
loss = criterion(output, lbls)
# store the predicted classes
preds[six:six + bs] = th.argmax(output, dim=1)
six += bs
# measure accuracy and record loss
acc1, acc5 = metrics.accuracy(output, lbls, topk=(1, 5))
losses.update(loss.item(), bs)
top1.update(acc1[0].item(), bs)
top5.update(acc5[0].item(), bs)
# make sure that there is no invalid prediction
assert th.all(preds >= 0).item()
args.preds.append(preds.detach().cpu())
args.logs["test/loss"].append(losses.avg)
args.logs["test/top1"].append(top1.avg)
args.logs["test/top5"].append(top5.avg)
if not args.val:
logger.info(
" * Acc@1:{top1.avg:.3f} - Acc@5:{top5.avg:.3f}".format(
top1=top1, top5=top5
)
)
def adjust_learning_rate(optimizer, args, cfg):
"""Decay the learning rate based on cosine schedule"""
lr = args.lr
lr *= 0.5 * (1.0 + math.cos(math.pi * args.epoch / cfg.CLF.N_EPOCHS))
for param_group in optimizer.param_groups:
param_group["lr"] = lr
args.logs["lr"].append(lr)
def save_checkpoint(state, is_best, filename="checkpoint.pth.tar"):
th.save(state, filename)
if is_best:
shutil.copyfile(filename, "model_best.pth.tar")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--model', type=utils.none_or_string_flag,
help='Name of the model in the <model_title>_<architecture_name> form.'
'See the table of models in ./prepare_models/README.md for all the model names we support.'
'This is an optional argument that needs to be set along with --models_root_dir and --dataset.'
'When these three arguments are set, the script will load features from:'
'<models_root_dir>/<model_title>/<architecture_name>/<dataset>/features_*/X_Y.pth.'
'If you would like to load pre-extracted features from somewhere else'
'then ignore this argument and provide the --train_features_dir and --test_features_dir arguments accordingly')
parser.add_argument('--models_root_dir', type=utils.none_or_string_flag,
help='Root directory for all models, see prepare_models/README.md for a detailed explanation.'
'This is an optional argument that needs to be set along with --model and --dataset.'
'Please see the help message for the --model argument as well.')
parser.add_argument("--dataset", type=utils.none_or_string_flag,
help="On which dataset to learn classifiers"
'Possible values are ("in1k", "cog_l1", "cog_l2", "cog_l3", "cog_l4", "cog_l5")'
'This is an optional argument that needs to be set along with --models_root_dir and --model.'
'Please see the help message for the --model argument as well.')
parser.add_argument('--train_features_dir', type=utils.none_or_string_flag,
help='Path to the directory containing pre-extracted training set features.'
'We expect a features file "X_Y.pth" under <train_features_dir>.'
'This is an optional argument that needs to be set if --models_root_dir, --model and --dataset are not set.')
parser.add_argument('--test_features_dir', type=utils.none_or_string_flag,
help='Path to the directory containing pre-extracted test set features.'
'We expect a features file "X_Y.pth" under <test_features_dir>.'
'This is an optional argument that needs to be set if --models_root_dir, --model and --dataset are not set.')
parser.add_argument('--output_dir', type=utils.none_or_string_flag,
help='Where to log program logs.'
'This is an optional argument that needs to be set if --models_root_dir is not set.'
'If not provided, we try to save the logs under'
'<models_root_dir>/<model_title>/<architecture_name>/<dataset>/eval_logreg/seed*')
# learning rate and momentum are tuned in this program, do not manually set.
parser.add_argument("--lr", type=float, default=0.0, help="initial learning rate")
parser.add_argument("--wd", type=float, default=0.0, help="weight decay")
parser.add_argument("--mom", type=float, default=0.9, help="momentum")
# program-related options
parser.add_argument("--print_freq", default=100, type=int, help="print frequency (default: 10)")
parser.add_argument("--device", type=str, default="cuda")
# optionally to overwrite the default config
parser.add_argument("opts", default=None,
help="see configs/default.py for all options",
nargs=argparse.REMAINDER)
args = parser.parse_args()
if args.device == "cuda" and not th.cuda.is_available():
print("CUDA is not available, I will run on CPU.")
args.device = "cpu"
# load the config file
# create output directory,
# locate pre-extracted features,
# initialize program logger,
# save args and cfg
# this function sets the following arg variables:
# - train_features_path, type=str
# - test_features_path, type=str
# - output_dir, type=str
args, cfg = utils.init_program(args, _for="logreg")
# tune hyper-parameters with optuna
logger.info("Running Optuna...")
hps_sampler = optuna.samplers.TPESampler(multivariate=True, seed=cfg.EVAL.SEED)
study = optuna.create_study(sampler=hps_sampler, direction="maximize")
args.val = True
logreg = LogReg(args, cfg)
study.optimize(logreg, n_trials=cfg.CLF.N_TRIALS, n_jobs=1, show_progress_bar=False)
utils.save_pickle(study, os.path.join(args.output_dir, "study.pkl"))
logger.info("")
logger.info("*" * 50)
logger.info("Hyper-parameter search ended")
logger.info("best_trial:")
logger.info(str(study.best_trial))
logger.info("best_params:")
logger.info(str(study.best_params))
logger.info("*" * 50)
logger.info("")
# train the final classifier with the tuned hyper-parameters
del logreg
th.cuda.empty_cache()
args.lr = study.best_params["lr"]
args.wd = study.best_params["wd"]
args.val = False
logreg = LogReg(args, cfg)
logreg() | if train_stat == -1:
break
# save the logs |
test_tokenizer.py | # coding: utf-8
from __future__ import unicode_literals
import pytest
PUNCT_OPEN = ["(", "[", "{", "*"]
PUNCT_CLOSE = [")", "]", "}", "*"]
PUNCT_PAIRED = [("(", ")"), ("[", "]"), ("{", "}"), ("*", "*")]
@pytest.mark.parametrize("text", ["(", "((", "<"])
def test_uk_tokenizer_handles_only_punct(uk_tokenizer, text):
tokens = uk_tokenizer(text)
assert len(tokens) == len(text)
@pytest.mark.parametrize("punct", PUNCT_OPEN)
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"]
)
def test_uk_tokenizer_splits_open_punct(uk_tokenizer, punct, text):
tokens = uk_tokenizer(punct + text)
assert len(tokens) == 2
assert tokens[0].text == punct
assert tokens[1].text == text
@pytest.mark.parametrize("punct", PUNCT_CLOSE)
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"]
)
def test_uk_tokenizer_splits_close_punct(uk_tokenizer, punct, text):
tokens = uk_tokenizer(text + punct)
assert len(tokens) == 2
assert tokens[0].text == text
assert tokens[1].text == punct
@pytest.mark.parametrize("punct", PUNCT_OPEN)
@pytest.mark.parametrize("punct_add", ["`"])
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"]
)
def test_uk_tokenizer_splits_two_diff_open_punct(uk_tokenizer, punct, punct_add, text):
tokens = uk_tokenizer(punct + punct_add + text)
assert len(tokens) == 3
assert tokens[0].text == punct
assert tokens[1].text == punct_add
assert tokens[2].text == text
@pytest.mark.parametrize("punct", PUNCT_CLOSE)
@pytest.mark.parametrize("punct_add", ["'"])
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"]
)
def test_uk_tokenizer_splits_two_diff_close_punct(uk_tokenizer, punct, punct_add, text):
tokens = uk_tokenizer(text + punct + punct_add)
assert len(tokens) == 3
assert tokens[0].text == text
assert tokens[1].text == punct
assert tokens[2].text == punct_add
| )
def test_uk_tokenizer_splits_same_open_punct(uk_tokenizer, punct, text):
tokens = uk_tokenizer(punct + punct + punct + text)
assert len(tokens) == 4
assert tokens[0].text == punct
assert tokens[3].text == text
@pytest.mark.parametrize("punct", PUNCT_CLOSE)
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"]
)
def test_uk_tokenizer_splits_same_close_punct(uk_tokenizer, punct, text):
tokens = uk_tokenizer(text + punct + punct + punct)
assert len(tokens) == 4
assert tokens[0].text == text
assert tokens[1].text == punct
@pytest.mark.parametrize("text", ["'Тест"])
def test_uk_tokenizer_splits_open_appostrophe(uk_tokenizer, text):
tokens = uk_tokenizer(text)
assert len(tokens) == 2
assert tokens[0].text == "'"
@pytest.mark.xfail(reason="See #3327")
@pytest.mark.parametrize("text", ["Тест''"])
def test_uk_tokenizer_splits_double_end_quote(uk_tokenizer, text):
tokens = uk_tokenizer(text)
assert len(tokens) == 2
tokens_punct = uk_tokenizer("''")
assert len(tokens_punct) == 1
@pytest.mark.parametrize("punct_open,punct_close", PUNCT_PAIRED)
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"]
)
def test_uk_tokenizer_splits_open_close_punct(
uk_tokenizer, punct_open, punct_close, text
):
tokens = uk_tokenizer(punct_open + text + punct_close)
assert len(tokens) == 3
assert tokens[0].text == punct_open
assert tokens[1].text == text
assert tokens[2].text == punct_close
@pytest.mark.parametrize("punct_open,punct_close", PUNCT_PAIRED)
@pytest.mark.parametrize("punct_open2,punct_close2", [("`", "'")])
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"]
)
def test_uk_tokenizer_two_diff_punct(
uk_tokenizer, punct_open, punct_close, punct_open2, punct_close2, text
):
tokens = uk_tokenizer(punct_open2 + punct_open + text + punct_close + punct_close2)
assert len(tokens) == 5
assert tokens[0].text == punct_open2
assert tokens[1].text == punct_open
assert tokens[2].text == text
assert tokens[3].text == punct_close
assert tokens[4].text == punct_close2
@pytest.mark.parametrize(
"text", ["Привет.", "Привіт.", "Ґелґотати.", "З'єднання.", "Єдність.", "їхні."]
)
def test_uk_tokenizer_splits_trailing_dot(uk_tokenizer, text):
tokens = uk_tokenizer(text)
assert tokens[1].text == "."
def test_uk_tokenizer_splits_bracket_period(uk_tokenizer):
text = "(Раз, два, три, проверка)."
tokens = uk_tokenizer(text)
assert tokens[len(tokens) - 1].text == "." |
@pytest.mark.parametrize("punct", PUNCT_OPEN)
@pytest.mark.parametrize(
"text", ["Привет", "Привіт", "Ґелґотати", "З'єднання", "Єдність", "їхні"] |
parser.rs | //! Main module defining the lexer and parser.
use crate::api::custom_syntax::{markers::*, CustomSyntax};
use crate::api::events::VarDefInfo;
use crate::api::options::LanguageOptions;
use crate::ast::{
ASTFlags, BinaryExpr, ConditionalStmtBlock, CustomExpr, Expr, FnCallExpr, FnCallHashes, Ident,
OpAssignment, ScriptFnDef, Stmt, StmtBlock, StmtBlockContainer, SwitchCases, TryCatchBlock,
};
use crate::engine::{Precedence, KEYWORD_THIS, OP_CONTAINS};
use crate::eval::{EvalState, GlobalRuntimeState};
use crate::func::hashing::get_hasher;
use crate::tokenizer::{
is_keyword_function, is_valid_function_name, is_valid_identifier, Token, TokenStream,
TokenizerControl,
};
use crate::types::dynamic::AccessMode;
use crate::types::StringsInterner;
use crate::{
calc_fn_hash, Dynamic, Engine, EvalAltResult, EvalContext, ExclusiveRange, Identifier,
ImmutableString, InclusiveRange, LexError, OptimizationLevel, ParseError, Position, Scope,
Shared, SmartString, StaticVec, AST, INT, PERR,
};
#[cfg(feature = "no_std")]
use std::prelude::v1::*;
use std::{
collections::BTreeMap,
hash::{Hash, Hasher},
num::{NonZeroU8, NonZeroUsize},
};
pub type ParseResult<T> = Result<T, ParseError>;
type FnLib = BTreeMap<u64, Shared<ScriptFnDef>>;
/// Invalid variable name that acts as a search barrier in a [`Scope`].
const SCOPE_SEARCH_BARRIER_MARKER: &str = "$ BARRIER $";
/// The message: `TokenStream` never ends
const NEVER_ENDS: &str = "`Token`";
/// _(internals)_ A type that encapsulates the current state of the parser.
/// Exported under the `internals` feature only.
#[derive(Debug)]
pub struct ParseState<'e> {
/// Input stream buffer containing the next character to read.
pub tokenizer_control: TokenizerControl,
/// Interned strings.
interned_strings: StringsInterner,
/// Encapsulates a local stack with variable names to simulate an actual runtime scope.
pub stack: Scope<'e>,
/// Size of the local variables stack upon entry of the current block scope.
pub block_stack_len: usize,
/// Tracks a list of external variables (variables that are not explicitly declared in the scope).
#[cfg(not(feature = "no_closure"))]
pub external_vars: Vec<crate::ast::Ident>,
/// An indicator that disables variable capturing into externals one single time
/// up until the nearest consumed Identifier token.
/// If set to false the next call to [`access_var`][ParseState::access_var] will not capture the variable.
/// All consequent calls to [`access_var`][ParseState::access_var] will not be affected.
#[cfg(not(feature = "no_closure"))]
pub allow_capture: bool,
/// Encapsulates a local stack with imported [module][crate::Module] names.
#[cfg(not(feature = "no_module"))]
pub imports: StaticVec<Identifier>,
/// Maximum levels of expression nesting (0 for unlimited).
#[cfg(not(feature = "unchecked"))]
pub max_expr_depth: usize,
}
impl<'e> ParseState<'e> {
/// Create a new [`ParseState`].
#[inline(always)]
#[must_use]
pub fn new(engine: &Engine, tokenizer_control: TokenizerControl) -> Self {
Self {
tokenizer_control,
#[cfg(not(feature = "no_closure"))]
external_vars: Vec::new(),
#[cfg(not(feature = "no_closure"))]
allow_capture: true,
interned_strings: StringsInterner::new(),
stack: Scope::new(),
block_stack_len: 0,
#[cfg(not(feature = "no_module"))]
imports: StaticVec::new_const(),
#[cfg(not(feature = "unchecked"))]
max_expr_depth: engine.max_expr_depth(),
}
}
/// Find explicitly declared variable by name in the [`ParseState`], searching in reverse order.
///
/// The first return value is the offset to be deducted from `ParseState::stack::len()`,
/// i.e. the top element of [`ParseState`]'s variables stack is offset 1.
///
/// If the variable is not present in the scope, the first return value is zero.
///
/// The second return value indicates whether the barrier has been hit before finding the variable.
pub fn find_var(&self, name: &str) -> (usize, bool) {
let mut hit_barrier = false;
(
self.stack
.iter_rev_raw()
.enumerate()
.find(|&(.., (n, ..))| {
if n == SCOPE_SEARCH_BARRIER_MARKER {
// Do not go beyond the barrier
hit_barrier = true;
false
} else {
n == name
}
})
.map_or(0, |(i, ..)| i + 1),
hit_barrier,
)
}
/// Find explicitly declared variable by name in the [`ParseState`], searching in reverse order.
///
/// If the variable is not present in the scope adds it to the list of external variables.
///
/// The return value is the offset to be deducted from `ParseState::stack::len()`,
/// i.e. the top element of [`ParseState`]'s variables stack is offset 1.
///
/// Return `None` when the variable name is not found in the `stack`.
#[inline]
#[must_use]
pub fn access_var(&mut self, name: &str, pos: Position) -> Option<NonZeroUsize> {
let _pos = pos;
let (index, hit_barrier) = self.find_var(name);
#[cfg(not(feature = "no_closure"))]
if self.allow_capture {
if index == 0 && !self.external_vars.iter().any(|v| v.as_str() == name) {
self.external_vars.push(crate::ast::Ident {
name: name.into(),
pos: _pos,
});
}
} else {
self.allow_capture = true
}
if hit_barrier {
None
} else {
NonZeroUsize::new(index)
}
}
/// Find a module by name in the [`ParseState`], searching in reverse.
///
/// Returns the offset to be deducted from `Stack::len`,
/// i.e. the top element of the [`ParseState`] is offset 1.
///
/// Returns `None` when the variable name is not found in the [`ParseState`].
///
/// # Panics
///
/// Panics when called under `no_module`.
#[cfg(not(feature = "no_module"))]
#[inline]
#[must_use]
pub fn find_module(&self, name: &str) -> Option<NonZeroUsize> {
self.imports
.iter()
.rev()
.enumerate()
.find(|&(.., n)| n == name)
.and_then(|(i, ..)| NonZeroUsize::new(i + 1))
}
/// Get an interned identifier, creating one if it is not yet interned.
#[inline(always)]
#[must_use]
pub fn get_identifier(&mut self, prefix: impl AsRef<str>, text: impl AsRef<str>) -> Identifier {
self.interned_strings.get(prefix, text).into()
}
/// Get an interned string, creating one if it is not yet interned.
#[inline(always)]
#[allow(dead_code)]
#[must_use]
pub fn get_interned_string(
&mut self,
prefix: impl AsRef<str>,
text: impl AsRef<str>,
) -> ImmutableString {
self.interned_strings.get(prefix, text)
}
}
/// A type that encapsulates all the settings for a particular parsing function.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
struct ParseSettings {
/// Is the construct being parsed located at global level?
is_global: bool,
/// Is the construct being parsed located at function definition level?
#[cfg(not(feature = "no_function"))]
is_function_scope: bool,
/// Is the construct being parsed located inside a closure?
#[cfg(not(feature = "no_function"))]
#[cfg(not(feature = "no_closure"))]
is_closure_scope: bool,
/// Is the current position inside a loop?
is_breakable: bool,
/// Language options in effect (overrides Engine options).
options: LanguageOptions,
/// Current expression nesting level.
level: usize,
/// Current position.
pos: Position,
}
impl ParseSettings {
/// Create a new `ParseSettings` with one higher expression level.
#[inline(always)]
#[must_use]
pub const fn level_up(&self) -> Self {
Self {
level: self.level + 1,
..*self
}
}
/// Make sure that the current level of expression nesting is within the maximum limit.
///
/// If `limit` is zero, then checking is disabled.
#[cfg(not(feature = "unchecked"))]
#[inline]
pub fn ensure_level_within_max_limit(&self, limit: usize) -> ParseResult<()> {
if limit > 0 {
if self.level > limit {
return Err(PERR::ExprTooDeep.into_err(self.pos));
}
}
Ok(())
}
}
/// Make an anonymous function.
#[cfg(not(feature = "no_function"))]
#[inline]
#[must_use]
pub fn make_anonymous_fn(hash: u64) -> String {
format!("{}{:016x}", crate::engine::FN_ANONYMOUS, hash)
}
/// Is this function an anonymous function?
#[cfg(not(feature = "no_function"))]
#[inline(always)]
#[must_use]
pub fn is_anonymous_fn(fn_name: &str) -> bool {
fn_name.starts_with(crate::engine::FN_ANONYMOUS)
}
impl Expr {
/// Convert a [`Variable`][Expr::Variable] into a [`Property`][Expr::Property].
/// All other variants are untouched.
#[cfg(not(feature = "no_object"))]
#[inline]
#[must_use]
fn into_property(self, state: &mut ParseState) -> Self {
match self {
#[cfg(not(feature = "no_module"))]
Self::Variable(x, ..) if !x.1.is_empty() => unreachable!("qualified property"),
Self::Variable(x, .., pos) => {
let ident = x.3;
let getter = state.get_identifier(crate::engine::FN_GET, &ident);
let hash_get = calc_fn_hash(&getter, 1);
let setter = state.get_identifier(crate::engine::FN_SET, &ident);
let hash_set = calc_fn_hash(&setter, 2);
Self::Property(
Box::new((
(getter, hash_get),
(setter, hash_set),
state.get_interned_string("", &ident),
)),
pos,
)
}
_ => self,
}
}
/// Raise an error if the expression can never yield a boolean value.
fn ensure_bool_expr(self) -> ParseResult<Expr> {
let type_name = match self {
Expr::Unit(..) => "()",
Expr::DynamicConstant(ref v, ..) if !v.is::<bool>() => v.type_name(),
Expr::IntegerConstant(..) => "a number",
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(..) => "a floating-point number",
Expr::CharConstant(..) => "a character",
Expr::StringConstant(..) => "a string",
Expr::InterpolatedString(..) => "a string",
Expr::Array(..) => "an array",
Expr::Map(..) => "an object map",
_ => return Ok(self),
};
Err(
PERR::MismatchedType("a boolean expression".to_string(), type_name.to_string())
.into_err(self.start_position()),
)
}
/// Raise an error if the expression can never yield an iterable value.
fn ensure_iterable(self) -> ParseResult<Expr> {
let type_name = match self {
Expr::Unit(..) => "()",
Expr::BoolConstant(..) => "a boolean",
Expr::IntegerConstant(..) => "a number",
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(..) => "a floating-point number",
Expr::CharConstant(..) => "a character",
Expr::StringConstant(..) => "a string",
Expr::InterpolatedString(..) => "a string",
Expr::Map(..) => "an object map",
_ => return Ok(self),
};
Err(
PERR::MismatchedType("an iterable value".to_string(), type_name.to_string())
.into_err(self.start_position()),
)
}
}
/// Make sure that the next expression is not a statement expression (i.e. wrapped in `{}`).
#[inline]
fn ensure_not_statement_expr(input: &mut TokenStream, type_name: impl ToString) -> ParseResult<()> {
match input.peek().expect(NEVER_ENDS) {
(Token::LeftBrace, pos) => Err(PERR::ExprExpected(type_name.to_string()).into_err(*pos)),
_ => Ok(()),
}
}
/// Make sure that the next expression is not a mis-typed assignment (i.e. `a = b` instead of `a == b`).
#[inline]
fn ensure_not_assignment(input: &mut TokenStream) -> ParseResult<()> {
match input.peek().expect(NEVER_ENDS) {
(Token::Equals, pos) => Err(LexError::ImproperSymbol(
"=".to_string(),
"Possibly a typo of '=='?".to_string(),
)
.into_err(*pos)),
_ => Ok(()),
}
}
/// Consume a particular [token][Token], checking that it is the expected one.
///
/// # Panics
///
/// Panics if the next token is not the expected one.
#[inline]
fn eat_token(input: &mut TokenStream, expected_token: Token) -> Position {
let (t, pos) = input.next().expect(NEVER_ENDS);
if t != expected_token {
unreachable!(
"{} expected but gets {} at {}",
expected_token.syntax(),
t.syntax(),
pos
);
}
pos
}
/// Match a particular [token][Token], consuming it if matched.
#[inline]
fn match_token(input: &mut TokenStream, token: Token) -> (bool, Position) {
let (t, pos) = input.peek().expect(NEVER_ENDS);
if *t == token {
(true, eat_token(input, token))
} else {
(false, *pos)
}
}
/// Parse a variable name.
fn parse_var_name(input: &mut TokenStream) -> ParseResult<(SmartString, Position)> {
match input.next().expect(NEVER_ENDS) {
// Variable name
(Token::Identifier(s), pos) => Ok((s, pos)),
// Reserved keyword
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
Err(PERR::Reserved(s.to_string()).into_err(pos))
}
// Bad identifier
(Token::LexError(err), pos) => Err(err.into_err(pos)),
// Not a variable name
(.., pos) => Err(PERR::VariableExpected.into_err(pos)),
}
}
/// Parse a symbol.
fn parse_symbol(input: &mut TokenStream) -> ParseResult<(SmartString, Position)> {
match input.next().expect(NEVER_ENDS) {
// Symbol
(token, pos) if token.is_standard_symbol() => Ok((token.literal_syntax().into(), pos)),
// Reserved symbol
(Token::Reserved(s), pos) if !is_valid_identifier(s.chars()) => Ok((s, pos)),
// Bad symbol
(Token::LexError(err), pos) => Err(err.into_err(pos)),
// Not a symbol
(.., pos) => Err(PERR::MissingSymbol(String::new()).into_err(pos)),
}
}
impl Engine {
/// Parse `(` expr `)`
fn parse_paren_expr(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// ( ...
let mut settings = settings;
settings.pos = eat_token(input, Token::LeftParen);
if match_token(input, Token::RightParen).0 {
return Ok(Expr::Unit(settings.pos));
}
let expr = self.parse_expr(input, state, lib, settings.level_up())?;
match input.next().expect(NEVER_ENDS) {
// ( ... )
(Token::RightParen, ..) => Ok(expr),
// ( <error>
(Token::LexError(err), pos) => Err(err.into_err(pos)),
// ( ... ???
(.., pos) => Err(PERR::MissingToken(
Token::RightParen.into(),
"for a matching ( in this expression".into(),
)
.into_err(pos)),
}
}
/// Parse a function call.
fn parse_fn_call(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
id: Identifier,
capture_parent_scope: bool,
#[cfg(not(feature = "no_module"))] namespace: crate::ast::Namespace,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let (token, token_pos) = input.peek().expect(NEVER_ENDS);
#[cfg(not(feature = "no_module"))]
let mut namespace = namespace;
let mut args = StaticVec::new_const();
match token {
// id( <EOF>
Token::EOF => {
return Err(PERR::MissingToken(
Token::RightParen.into(),
format!("to close the arguments list of this function call '{}'", id),
)
.into_err(*token_pos))
}
// id( <error>
Token::LexError(err) => return Err(err.clone().into_err(*token_pos)),
// id()
Token::RightParen => {
eat_token(input, Token::RightParen);
#[cfg(not(feature = "no_module"))]
let hash = if !namespace.is_empty() {
let index = state.find_module(namespace.root());
#[cfg(not(feature = "no_function"))]
let relax = settings.is_function_scope;
#[cfg(feature = "no_function")]
let relax = false;
if !relax && settings.options.strict_var && index.is_none() {
return Err(PERR::ModuleUndefined(namespace.root().to_string())
.into_err(namespace.position()));
}
namespace.set_index(index);
crate::calc_qualified_fn_hash(namespace.iter().map(|m| m.as_str()), &id, 0)
} else {
calc_fn_hash(&id, 0)
};
#[cfg(feature = "no_module")]
let hash = calc_fn_hash(&id, 0);
let hashes = if is_valid_function_name(&id) {
hash.into()
} else {
FnCallHashes::from_native(hash)
};
args.shrink_to_fit();
return Ok(FnCallExpr {
name: state.get_identifier("", id),
capture_parent_scope,
#[cfg(not(feature = "no_module"))]
namespace,
hashes,
args,
pos: settings.pos,
}
.into_fn_call_expr(settings.pos));
}
// id...
_ => (),
}
let settings = settings.level_up();
loop {
match input.peek().expect(NEVER_ENDS) {
// id(...args, ) - handle trailing comma
(Token::RightParen, ..) => (),
_ => args.push(self.parse_expr(input, state, lib, settings)?),
}
match input.peek().expect(NEVER_ENDS) {
// id(...args)
(Token::RightParen, ..) => {
eat_token(input, Token::RightParen);
#[cfg(not(feature = "no_module"))]
let hash = if !namespace.is_empty() {
let index = state.find_module(namespace.root());
#[cfg(not(feature = "no_function"))]
let relax = settings.is_function_scope;
#[cfg(feature = "no_function")]
let relax = false;
if !relax && settings.options.strict_var && index.is_none() {
return Err(PERR::ModuleUndefined(namespace.root().to_string())
.into_err(namespace.position()));
}
namespace.set_index(index);
crate::calc_qualified_fn_hash(
namespace.iter().map(|m| m.as_str()),
&id,
args.len(),
)
} else {
calc_fn_hash(&id, args.len())
};
#[cfg(feature = "no_module")]
let hash = calc_fn_hash(&id, args.len());
let hashes = if is_valid_function_name(&id) {
hash.into()
} else {
FnCallHashes::from_native(hash)
};
args.shrink_to_fit();
return Ok(FnCallExpr {
name: state.get_identifier("", id),
capture_parent_scope,
#[cfg(not(feature = "no_module"))]
namespace,
hashes,
args,
pos: settings.pos,
}
.into_fn_call_expr(settings.pos));
}
// id(...args,
(Token::Comma, ..) => {
eat_token(input, Token::Comma);
}
// id(...args <EOF>
(Token::EOF, pos) => {
return Err(PERR::MissingToken(
Token::RightParen.into(),
format!("to close the arguments list of this function call '{}'", id),
)
.into_err(*pos))
}
// id(...args <error>
(Token::LexError(err), pos) => return Err(err.clone().into_err(*pos)),
// id(...args ???
(.., pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
format!("to separate the arguments to function call '{}'", id),
)
.into_err(*pos))
}
}
}
}
/// Parse an indexing chain.
/// Indexing binds to the right, so this call parses all possible levels of indexing following in the input.
#[cfg(not(feature = "no_index"))]
fn parse_index_chain(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
lhs: Expr,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
let idx_expr = self.parse_expr(input, state, lib, settings.level_up())?;
// Check type of indexing - must be integer or string
match idx_expr {
Expr::IntegerConstant(.., pos) => match lhs {
Expr::IntegerConstant(..)
| Expr::Array(..)
| Expr::StringConstant(..)
| Expr::InterpolatedString(..) => (),
Expr::Map(..) => {
return Err(PERR::MalformedIndexExpr(
"Object map access expects string index, not a number".into(),
)
.into_err(pos))
}
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(..) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.start_position()))
}
Expr::CharConstant(..)
| Expr::And(..)
| Expr::Or(..)
| Expr::BoolConstant(..)
| Expr::Unit(..) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.start_position()))
}
_ => (),
},
// lhs[string]
Expr::StringConstant(..) | Expr::InterpolatedString(..) => match lhs {
Expr::Map(..) => (),
Expr::Array(..) | Expr::StringConstant(..) | Expr::InterpolatedString(..) => {
return Err(PERR::MalformedIndexExpr(
"Array or string expects numeric index, not a string".into(),
)
.into_err(idx_expr.start_position()))
}
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(..) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.start_position()))
}
Expr::CharConstant(..)
| Expr::And(..)
| Expr::Or(..)
| Expr::BoolConstant(..)
| Expr::Unit(..) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.start_position()))
}
_ => (),
},
// lhs[float]
#[cfg(not(feature = "no_float"))]
x @ Expr::FloatConstant(..) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a float".into(),
)
.into_err(x.start_position()))
}
// lhs[char]
x @ Expr::CharConstant(..) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a character".into(),
)
.into_err(x.start_position()))
}
// lhs[()]
x @ Expr::Unit(..) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not ()".into(),
)
.into_err(x.start_position()))
}
// lhs[??? && ???], lhs[??? || ???]
x @ Expr::And(..) | x @ Expr::Or(..) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a boolean".into(),
)
.into_err(x.start_position()))
}
// lhs[true], lhs[false]
x @ Expr::BoolConstant(..) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a boolean".into(),
)
.into_err(x.start_position()))
}
// All other expressions
_ => (),
}
// Check if there is a closing bracket
match input.peek().expect(NEVER_ENDS) {
(Token::RightBracket, ..) => {
eat_token(input, Token::RightBracket);
// Any more indexing following?
match input.peek().expect(NEVER_ENDS) {
// If another indexing level, right-bind it
(Token::LeftBracket, ..) => {
let prev_pos = settings.pos;
settings.pos = eat_token(input, Token::LeftBracket);
// Recursively parse the indexing chain, right-binding each
let idx_expr = self.parse_index_chain(
input,
state,
lib,
idx_expr,
settings.level_up(),
)?;
// Indexing binds to right
Ok(Expr::Index(
BinaryExpr { lhs, rhs: idx_expr }.into(),
ASTFlags::NONE,
prev_pos,
))
}
// Otherwise terminate the indexing chain
_ => Ok(Expr::Index(
BinaryExpr { lhs, rhs: idx_expr }.into(),
ASTFlags::BREAK,
settings.pos,
)),
}
}
(Token::LexError(err), pos) => Err(err.clone().into_err(*pos)),
(.., pos) => Err(PERR::MissingToken(
Token::RightBracket.into(),
"for a matching [ in this index expression".into(),
)
.into_err(*pos)),
}
}
/// Parse an array literal.
#[cfg(not(feature = "no_index"))]
fn parse_array_literal(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// [ ...
let mut settings = settings;
settings.pos = eat_token(input, Token::LeftBracket);
let mut arr = StaticVec::new_const();
loop {
const MISSING_RBRACKET: &str = "to end this array literal";
#[cfg(not(feature = "unchecked"))]
if self.max_array_size() > 0 && arr.len() >= self.max_array_size() {
return Err(PERR::LiteralTooLarge(
"Size of array literal".to_string(),
self.max_array_size(),
)
.into_err(input.peek().expect(NEVER_ENDS).1));
}
match input.peek().expect(NEVER_ENDS) {
(Token::RightBracket, ..) => {
eat_token(input, Token::RightBracket);
break;
}
(Token::EOF, pos) => {
return Err(PERR::MissingToken(
Token::RightBracket.into(),
MISSING_RBRACKET.into(),
)
.into_err(*pos))
}
_ => {
let expr = self.parse_expr(input, state, lib, settings.level_up())?;
arr.push(expr);
}
}
match input.peek().expect(NEVER_ENDS) {
(Token::Comma, ..) => {
eat_token(input, Token::Comma);
}
(Token::RightBracket, ..) => (),
(Token::EOF, pos) => {
return Err(PERR::MissingToken(
Token::RightBracket.into(),
MISSING_RBRACKET.into(),
)
.into_err(*pos))
}
(Token::LexError(err), pos) => return Err(err.clone().into_err(*pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the items of this array literal".into(),
)
.into_err(*pos))
}
};
}
arr.shrink_to_fit();
Ok(Expr::Array(arr.into(), settings.pos))
}
/// Parse a map literal.
#[cfg(not(feature = "no_object"))]
fn parse_map_literal(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// #{ ...
let mut settings = settings;
settings.pos = eat_token(input, Token::MapStart);
let mut map = StaticVec::<(Ident, Expr)>::new();
let mut template = BTreeMap::<Identifier, crate::Dynamic>::new();
loop {
const MISSING_RBRACE: &str = "to end this object map literal";
match input.peek().expect(NEVER_ENDS) {
(Token::RightBrace, ..) => {
eat_token(input, Token::RightBrace);
break;
}
(Token::EOF, pos) => {
return Err(
PERR::MissingToken(Token::RightBrace.into(), MISSING_RBRACE.into())
.into_err(*pos),
)
}
_ => (),
}
let (name, pos) = match input.next().expect(NEVER_ENDS) {
(Token::Identifier(s), pos) | (Token::StringConstant(s), pos) => {
if map.iter().any(|(p, ..)| **p == s) {
return Err(PERR::DuplicatedProperty(s.to_string()).into_err(pos));
}
(s, pos)
}
(Token::InterpolatedString(..), pos) => {
return Err(PERR::PropertyExpected.into_err(pos))
}
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s.to_string()).into_err(pos));
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(Token::EOF, pos) => {
return Err(PERR::MissingToken(
Token::RightBrace.into(),
MISSING_RBRACE.into(),
)
.into_err(pos));
}
(.., pos) if map.is_empty() => {
return Err(PERR::MissingToken(
Token::RightBrace.into(),
MISSING_RBRACE.into(),
)
.into_err(pos));
}
(.., pos) => return Err(PERR::PropertyExpected.into_err(pos)),
};
match input.next().expect(NEVER_ENDS) {
(Token::Colon, ..) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::Colon.into(),
format!(
"to follow the property '{}' in this object map literal",
name
),
)
.into_err(pos))
}
};
#[cfg(not(feature = "unchecked"))]
if self.max_map_size() > 0 && map.len() >= self.max_map_size() {
return Err(PERR::LiteralTooLarge(
"Number of properties in object map literal".to_string(),
self.max_map_size(),
)
.into_err(input.peek().expect(NEVER_ENDS).1));
}
let expr = self.parse_expr(input, state, lib, settings.level_up())?;
let name = state.get_identifier("", name);
template.insert(name.clone(), crate::Dynamic::UNIT);
map.push((Ident { name, pos }, expr));
match input.peek().expect(NEVER_ENDS) {
(Token::Comma, ..) => {
eat_token(input, Token::Comma);
}
(Token::RightBrace, ..) => (),
(Token::Identifier(..), pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the items of this object map literal".into(),
)
.into_err(*pos))
}
(Token::LexError(err), pos) => return Err(err.clone().into_err(*pos)),
(.., pos) => {
return Err(
PERR::MissingToken(Token::RightBrace.into(), MISSING_RBRACE.into())
.into_err(*pos),
)
}
}
}
map.shrink_to_fit();
Ok(Expr::Map((map, template).into(), settings.pos))
}
/// Parse a switch expression.
fn parse_switch(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// switch ...
let mut settings = settings;
settings.pos = eat_token(input, Token::Switch);
let item = self.parse_expr(input, state, lib, settings.level_up())?;
match input.next().expect(NEVER_ENDS) {
(Token::LeftBrace, ..) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::LeftBrace.into(),
"to start a switch block".into(),
)
.into_err(pos))
}
}
let mut cases = BTreeMap::<u64, Box<ConditionalStmtBlock>>::new();
let mut ranges = StaticVec::<(INT, INT, bool, Box<ConditionalStmtBlock>)>::new();
let mut def_pos = Position::NONE;
let mut def_stmt = None;
loop {
const MISSING_RBRACE: &str = "to end this switch block";
let (expr, condition) = match input.peek().expect(NEVER_ENDS) {
(Token::RightBrace, ..) => {
eat_token(input, Token::RightBrace);
break;
}
(Token::EOF, pos) => {
return Err(
PERR::MissingToken(Token::RightBrace.into(), MISSING_RBRACE.into())
.into_err(*pos),
)
}
(Token::Underscore, pos) if def_stmt.is_none() => {
def_pos = *pos;
eat_token(input, Token::Underscore);
let (if_clause, if_pos) = match_token(input, Token::If);
if if_clause {
return Err(PERR::WrongSwitchCaseCondition.into_err(if_pos));
}
(None, None)
}
(Token::Underscore, pos) => return Err(PERR::DuplicatedSwitchCase.into_err(*pos)),
_ if def_stmt.is_some() => {
return Err(PERR::WrongSwitchDefaultCase.into_err(def_pos))
}
_ => {
let case_expr =
Some(self.parse_expr(input, state, lib, settings.level_up())?);
let condition = if match_token(input, Token::If).0 {
Some(self.parse_expr(input, state, lib, settings.level_up())?)
} else {
None
};
(case_expr, condition)
}
};
let (hash, range) = if let Some(expr) = expr {
let value = expr.get_literal_value().ok_or_else(|| {
PERR::ExprExpected("a literal".to_string()).into_err(expr.start_position())
})?;
let guard = value.read_lock::<ExclusiveRange>();
if let Some(range) = guard {
(None, Some((range.start, range.end, false)))
} else if let Some(range) = value.read_lock::<InclusiveRange>() {
(None, Some((*range.start(), *range.end(), true)))
} else if value.is::<INT>() && !ranges.is_empty() {
return Err(PERR::WrongSwitchIntegerCase.into_err(expr.start_position()));
} else {
let hasher = &mut get_hasher();
value.hash(hasher);
let hash = hasher.finish();
if !cases.is_empty() && cases.contains_key(&hash) {
return Err(PERR::DuplicatedSwitchCase.into_err(expr.start_position()));
}
(Some(hash), None)
}
} else {
(None, None)
};
match input.next().expect(NEVER_ENDS) {
(Token::DoubleArrow, ..) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::DoubleArrow.into(),
"in this switch case".to_string(),
)
.into_err(pos))
}
};
let stmt = self.parse_stmt(input, state, lib, settings.level_up())?;
let need_comma = !stmt.is_self_terminated();
def_stmt = match (hash, range) {
(None, Some(range)) => {
let is_empty = if range.2 {
(range.0..=range.1).is_empty()
} else {
(range.0..range.1).is_empty()
};
if !is_empty {
match (range.1.checked_sub(range.0), range.2) {
// Unroll single range
(Some(1), false) | (Some(0), true) => {
let value = Dynamic::from_int(range.0);
let hasher = &mut get_hasher();
value.hash(hasher);
let hash = hasher.finish();
cases.entry(hash).or_insert_with(|| {
let block: ConditionalStmtBlock = (condition, stmt).into();
block.into()
});
}
// Other range
_ => {
let block: ConditionalStmtBlock = (condition, stmt).into();
ranges.push((range.0, range.1, range.2, block.into()))
}
}
}
None
}
(Some(hash), None) => {
let block: ConditionalStmtBlock = (condition, stmt).into();
cases.insert(hash, block.into());
None
}
(None, None) => Some(Box::new(stmt.into())),
_ => unreachable!("both hash and range in switch statement case"),
};
match input.peek().expect(NEVER_ENDS) {
(Token::Comma, ..) => {
eat_token(input, Token::Comma);
}
(Token::RightBrace, ..) => (),
(Token::EOF, pos) => {
return Err(
PERR::MissingToken(Token::RightParen.into(), MISSING_RBRACE.into())
.into_err(*pos),
)
}
(Token::LexError(err), pos) => return Err(err.clone().into_err(*pos)),
(.., pos) if need_comma => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the items in this switch block".into(),
)
.into_err(*pos))
}
_ => (),
}
}
let cases = SwitchCases {
cases,
def_case: def_stmt.unwrap_or_else(|| StmtBlock::NONE.into()),
ranges,
};
Ok(Stmt::Switch((item, cases).into(), settings.pos))
}
/// Parse a primary expression.
fn parse_primary(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let (token, token_pos) = input.peek().expect(NEVER_ENDS);
let mut settings = settings;
settings.pos = *token_pos;
let root_expr = match token {
Token::EOF => return Err(PERR::UnexpectedEOF.into_err(settings.pos)),
Token::IntegerConstant(..)
| Token::CharConstant(..)
| Token::StringConstant(..)
| Token::True
| Token::False => match input.next().expect(NEVER_ENDS).0 {
Token::IntegerConstant(x) => Expr::IntegerConstant(x, settings.pos),
Token::CharConstant(c) => Expr::CharConstant(c, settings.pos),
Token::StringConstant(s) => {
Expr::StringConstant(state.get_interned_string("", s), settings.pos)
}
Token::True => Expr::BoolConstant(true, settings.pos),
Token::False => Expr::BoolConstant(false, settings.pos),
token => unreachable!("token is {:?}", token),
},
#[cfg(not(feature = "no_float"))]
Token::FloatConstant(x) => {
let x = *x;
input.next().expect(NEVER_ENDS);
Expr::FloatConstant(x, settings.pos)
}
#[cfg(feature = "decimal")]
Token::DecimalConstant(x) => {
let x = (*x).into();
input.next().expect(NEVER_ENDS);
Expr::DynamicConstant(Box::new(x), settings.pos)
}
// { - block statement as expression
Token::LeftBrace if settings.options.allow_stmt_expr => {
match self.parse_block(input, state, lib, settings.level_up())? {
block @ Stmt::Block(..) => Expr::Stmt(Box::new(block.into())),
stmt => unreachable!("Stmt::Block expected but gets {:?}", stmt),
}
}
// ( - grouped expression
Token::LeftParen => self.parse_paren_expr(input, state, lib, settings.level_up())?,
// If statement is allowed to act as expressions
Token::If if settings.options.allow_if_expr => Expr::Stmt(Box::new(
self.parse_if(input, state, lib, settings.level_up())?
.into(),
)),
// Switch statement is allowed to act as expressions
Token::Switch if settings.options.allow_switch_expr => Expr::Stmt(Box::new(
self.parse_switch(input, state, lib, settings.level_up())?
.into(),
)),
// | ...
#[cfg(not(feature = "no_function"))]
Token::Pipe | Token::Or if settings.options.allow_anonymous_fn => {
let mut new_state = ParseState::new(self, state.tokenizer_control.clone());
#[cfg(not(feature = "unchecked"))]
{
new_state.max_expr_depth = self.max_function_expr_depth();
}
let new_settings = ParseSettings {
is_global: false,
is_function_scope: true,
#[cfg(not(feature = "no_closure"))]
is_closure_scope: true,
is_breakable: false,
level: 0,
options: LanguageOptions {
strict_var: if cfg!(feature = "no_closure") {
settings.options.strict_var
} else {
// A capturing closure can access variables not defined locally
false
},
..self.options
},
..settings
};
let (expr, func) = self.parse_anon_fn(input, &mut new_state, lib, new_settings)?;
#[cfg(not(feature = "no_closure"))]
new_state.external_vars.iter().try_for_each(
|crate::ast::Ident { name, pos }| {
let index = state.access_var(name, *pos);
if settings.options.strict_var
&& !settings.is_closure_scope
&& index.is_none()
{
// If the parent scope is not inside another capturing closure
// then we can conclude that the captured variable doesn't exist.
// Under Strict Variables mode, this is not allowed.
Err(PERR::VariableUndefined(name.to_string()).into_err(*pos))
} else {
Ok::<_, ParseError>(())
}
},
)?;
let hash_script = calc_fn_hash(&func.name, func.params.len());
lib.insert(hash_script, func.into());
expr
}
// Interpolated string
Token::InterpolatedString(..) => {
let mut segments = StaticVec::<Expr>::new();
match input.next().expect(NEVER_ENDS) {
(Token::InterpolatedString(s), ..) if s.is_empty() => (),
(Token::InterpolatedString(s), pos) => {
segments.push(Expr::StringConstant(s.into(), pos))
}
token => {
unreachable!("Token::InterpolatedString expected but gets {:?}", token)
}
}
loop {
let expr = match self.parse_block(input, state, lib, settings.level_up())? {
block @ Stmt::Block(..) => Expr::Stmt(Box::new(block.into())),
stmt => unreachable!("Stmt::Block expected but gets {:?}", stmt),
};
match expr {
Expr::StringConstant(s, ..) if s.is_empty() => (),
_ => segments.push(expr),
}
// Make sure to parse the following as text
let mut control = state.tokenizer_control.get();
control.is_within_text = true;
state.tokenizer_control.set(control);
match input.next().expect(NEVER_ENDS) {
(Token::StringConstant(s), pos) => {
if !s.is_empty() {
segments.push(Expr::StringConstant(s.into(), pos));
}
// End the interpolated string if it is terminated by a back-tick.
break;
}
(Token::InterpolatedString(s), pos) => {
if !s.is_empty() {
segments.push(Expr::StringConstant(s.into(), pos));
}
}
(Token::LexError(err), pos)
if matches!(*err, LexError::UnterminatedString) =>
{
return Err(err.into_err(pos))
}
(token, ..) => unreachable!(
"string within an interpolated string literal expected but gets {:?}",
token
),
}
}
if segments.is_empty() {
Expr::StringConstant(state.get_interned_string("", ""), settings.pos)
} else {
segments.shrink_to_fit();
Expr::InterpolatedString(segments.into(), settings.pos)
}
}
// Array literal
#[cfg(not(feature = "no_index"))]
Token::LeftBracket => {
self.parse_array_literal(input, state, lib, settings.level_up())?
}
// Map literal
#[cfg(not(feature = "no_object"))]
Token::MapStart => self.parse_map_literal(input, state, lib, settings.level_up())?,
// Custom syntax.
Token::Custom(key) | Token::Reserved(key) | Token::Identifier(key)
if !self.custom_syntax.is_empty() && self.custom_syntax.contains_key(&**key) =>
{
let (key, syntax) = self.custom_syntax.get_key_value(&**key).unwrap();
let (.., pos) = input.next().expect(NEVER_ENDS);
let settings2 = settings.level_up();
self.parse_custom_syntax(input, state, lib, settings2, key, syntax, pos)?
}
// Identifier
Token::Identifier(..) => {
#[cfg(not(feature = "no_module"))]
let ns = crate::ast::Namespace::NONE;
#[cfg(feature = "no_module")]
let ns = ();
let s = match input.next().expect(NEVER_ENDS) {
(Token::Identifier(s), ..) => s,
token => unreachable!("Token::Identifier expected but gets {:?}", token),
};
match input.peek().expect(NEVER_ENDS).0 {
// Function call
Token::LeftParen | Token::Bang => {
#[cfg(not(feature = "no_closure"))]
{
// Once the identifier consumed we must enable next variables capturing
state.allow_capture = true;
}
Expr::Variable(
(None, ns, 0, state.get_identifier("", s)).into(),
None,
settings.pos,
)
}
// Namespace qualification
#[cfg(not(feature = "no_module"))]
Token::DoubleColon => {
#[cfg(not(feature = "no_closure"))]
{
// Once the identifier consumed we must enable next variables capturing
state.allow_capture = true;
}
Expr::Variable(
(None, ns, 0, state.get_identifier("", s)).into(),
None,
settings.pos,
)
}
// Normal variable access
_ => {
let index = state.access_var(&s, settings.pos);
if settings.options.strict_var && index.is_none() {
return Err(
PERR::VariableUndefined(s.to_string()).into_err(settings.pos)
);
}
let short_index = index.and_then(|x| {
if x.get() <= u8::MAX as usize {
NonZeroU8::new(x.get() as u8)
} else {
None
}
});
Expr::Variable(
(index, ns, 0, state.get_identifier("", s)).into(),
short_index,
settings.pos,
)
}
}
}
// Reserved keyword or symbol
Token::Reserved(..) => {
#[cfg(not(feature = "no_module"))]
let ns = crate::ast::Namespace::NONE;
#[cfg(feature = "no_module")]
let ns = ();
let s = match input.next().expect(NEVER_ENDS) {
(Token::Reserved(s), ..) => s,
token => unreachable!("Token::Reserved expected but gets {:?}", token),
};
match input.peek().expect(NEVER_ENDS).0 {
// Function call is allowed to have reserved keyword
Token::LeftParen | Token::Bang if is_keyword_function(&s) => Expr::Variable(
(None, ns, 0, state.get_identifier("", s)).into(),
None,
settings.pos,
),
// Access to `this` as a variable is OK within a function scope
#[cfg(not(feature = "no_function"))]
_ if &*s == KEYWORD_THIS && settings.is_function_scope => Expr::Variable(
(None, ns, 0, state.get_identifier("", s)).into(),
None,
settings.pos,
),
// Cannot access to `this` as a variable not in a function scope
_ if &*s == KEYWORD_THIS => {
let msg = format!("'{}' can only be used in functions", s);
return Err(
LexError::ImproperSymbol(s.to_string(), msg).into_err(settings.pos)
);
}
_ => return Err(PERR::Reserved(s.to_string()).into_err(settings.pos)),
}
}
Token::LexError(..) => match input.next().expect(NEVER_ENDS) {
(Token::LexError(err), ..) => return Err(err.into_err(settings.pos)),
token => unreachable!("Token::LexError expected but gets {:?}", token),
},
_ => {
return Err(
LexError::UnexpectedInput(token.syntax().to_string()).into_err(settings.pos)
)
}
};
self.parse_postfix(input, state, lib, root_expr, settings)
}
/// Tail processing of all possible postfix operators of a primary expression.
fn parse_postfix(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
mut lhs: Expr,
settings: ParseSettings,
) -> ParseResult<Expr> {
let mut settings = settings;
// Tail processing all possible postfix operators
loop {
let (tail_token, ..) = input.peek().expect(NEVER_ENDS);
if !lhs.is_valid_postfix(tail_token) {
break;
}
let (tail_token, tail_pos) = input.next().expect(NEVER_ENDS);
settings.pos = tail_pos;
lhs = match (lhs, tail_token) {
// Qualified function call with !
#[cfg(not(feature = "no_module"))]
(Expr::Variable(x, ..), Token::Bang) if !x.1.is_empty() => {
return if !match_token(input, Token::LeftParen).0 {
Err(LexError::UnexpectedInput(Token::Bang.syntax().to_string())
.into_err(tail_pos))
} else {
Err(LexError::ImproperSymbol(
"!".to_string(),
"'!' cannot be used to call module functions".to_string(),
)
.into_err(tail_pos))
};
}
// Function call with !
(Expr::Variable(x, .., pos), Token::Bang) => {
match match_token(input, Token::LeftParen) {
(false, pos) => {
return Err(PERR::MissingToken(
Token::LeftParen.syntax().into(),
"to start arguments list of function call".into(),
)
.into_err(pos))
}
_ => (),
}
let (.., _ns, _, name) = *x;
settings.pos = pos;
self.parse_fn_call(
input,
state,
lib,
name,
true,
#[cfg(not(feature = "no_module"))]
_ns,
settings.level_up(),
)?
}
// Function call
(Expr::Variable(x, .., pos), Token::LeftParen) => {
let (.., _ns, _, name) = *x;
settings.pos = pos;
self.parse_fn_call(
input,
state,
lib,
name,
false,
#[cfg(not(feature = "no_module"))]
_ns,
settings.level_up(),
)?
}
// module access
#[cfg(not(feature = "no_module"))]
(Expr::Variable(x, .., pos), Token::DoubleColon) => {
let (id2, pos2) = parse_var_name(input)?;
let (.., mut namespace, _, name) = *x;
let var_name_def = Ident { name, pos };
namespace.push(var_name_def);
Expr::Variable(
(None, namespace, 0, state.get_identifier("", id2)).into(),
None,
pos2,
)
}
// Indexing
#[cfg(not(feature = "no_index"))]
(expr, Token::LeftBracket) => {
self.parse_index_chain(input, state, lib, expr, settings.level_up())?
}
// Property access
#[cfg(not(feature = "no_object"))]
(expr, Token::Period) => {
// Expression after dot must start with an identifier
match input.peek().expect(NEVER_ENDS) {
(Token::Identifier(..), ..) => {
#[cfg(not(feature = "no_closure"))]
{
// Prevents capturing of the object properties as vars: xxx.<var>
state.allow_capture = false;
}
}
(Token::Reserved(s), ..) if is_keyword_function(s) => (),
(.., pos) => return Err(PERR::PropertyExpected.into_err(*pos)),
}
let rhs = self.parse_primary(input, state, lib, settings.level_up())?;
Self::make_dot_expr(state, expr, ASTFlags::NONE, rhs, tail_pos)?
}
// Unknown postfix operator
(expr, token) => unreachable!(
"unknown postfix operator '{}' for {:?}",
token.syntax(),
expr
),
}
}
// Cache the hash key for namespace-qualified variables
#[cfg(not(feature = "no_module"))]
let namespaced_variable = match lhs {
Expr::Variable(ref mut x, ..) if !x.1.is_empty() => Some(x.as_mut()),
Expr::Index(ref mut x, ..) | Expr::Dot(ref mut x, ..) => match x.lhs {
Expr::Variable(ref mut x, ..) if !x.1.is_empty() => Some(x.as_mut()),
_ => None,
},
_ => None,
};
#[cfg(not(feature = "no_module"))]
if let Some((.., namespace, hash, name)) = namespaced_variable {
if !namespace.is_empty() {
*hash = crate::calc_qualified_var_hash(namespace.iter().map(|v| v.as_str()), name);
#[cfg(not(feature = "no_module"))]
{
let index = state.find_module(namespace.root());
#[cfg(not(feature = "no_function"))]
let relax = settings.is_function_scope;
#[cfg(feature = "no_function")]
let relax = false;
if !relax && settings.options.strict_var && index.is_none() {
return Err(PERR::ModuleUndefined(namespace.root().to_string())
.into_err(namespace.position()));
}
namespace.set_index(index);
}
}
}
// Make sure identifiers are valid
Ok(lhs)
}
/// Parse a potential unary operator.
fn parse_unary(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let (token, token_pos) = input.peek().expect(NEVER_ENDS);
let mut settings = settings;
settings.pos = *token_pos;
match token {
// -expr
Token::Minus | Token::UnaryMinus => {
let token = token.clone();
let pos = eat_token(input, token);
match self.parse_unary(input, state, lib, settings.level_up())? {
// Negative integer
Expr::IntegerConstant(num, ..) => num
.checked_neg()
.map(|i| Expr::IntegerConstant(i, pos))
.or_else(|| {
#[cfg(not(feature = "no_float"))]
return Some(Expr::FloatConstant((-(num as crate::FLOAT)).into(), pos));
#[cfg(feature = "no_float")]
return None;
})
.ok_or_else(|| {
LexError::MalformedNumber(format!("-{}", num)).into_err(pos)
}),
// Negative float
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(x, ..) => Ok(Expr::FloatConstant((-(*x)).into(), pos)),
// Call negative function
expr => {
let mut args = StaticVec::new_const();
args.push(expr);
args.shrink_to_fit();
Ok(FnCallExpr {
name: state.get_identifier("", "-"),
hashes: FnCallHashes::from_native(calc_fn_hash("-", 1)),
args,
pos,
..Default::default()
}
.into_fn_call_expr(pos))
}
}
}
// +expr
Token::Plus | Token::UnaryPlus => {
let token = token.clone();
let pos = eat_token(input, token);
match self.parse_unary(input, state, lib, settings.level_up())? {
expr @ Expr::IntegerConstant(..) => Ok(expr),
#[cfg(not(feature = "no_float"))]
expr @ Expr::FloatConstant(..) => Ok(expr),
// Call plus function
expr => {
let mut args = StaticVec::new_const();
args.push(expr);
args.shrink_to_fit();
Ok(FnCallExpr {
name: state.get_identifier("", "+"),
hashes: FnCallHashes::from_native(calc_fn_hash("+", 1)),
args,
pos,
..Default::default()
}
.into_fn_call_expr(pos))
}
}
}
// !expr
Token::Bang => {
let pos = eat_token(input, Token::Bang);
let mut args = StaticVec::new_const();
args.push(self.parse_unary(input, state, lib, settings.level_up())?);
args.shrink_to_fit();
Ok(FnCallExpr {
name: state.get_identifier("", "!"),
hashes: FnCallHashes::from_native(calc_fn_hash("!", 1)),
args,
pos,
..Default::default()
}
.into_fn_call_expr(pos))
}
// <EOF>
Token::EOF => Err(PERR::UnexpectedEOF.into_err(settings.pos)),
// All other tokens
_ => self.parse_primary(input, state, lib, settings.level_up()),
}
}
/// Make an assignment statement.
fn make_assignment_stmt(
op: Option<Token>,
state: &mut ParseState,
lhs: Expr,
rhs: Expr,
op_pos: Position,
) -> ParseResult<Stmt> {
#[must_use]
fn check_lvalue(expr: &Expr, parent_is_dot: bool) -> Option<Position> {
match expr {
Expr::Index(x, options, ..) | Expr::Dot(x, options, ..) if parent_is_dot => {
match x.lhs {
Expr::Property(..) if !options.contains(ASTFlags::BREAK) => {
check_lvalue(&x.rhs, matches!(expr, Expr::Dot(..)))
}
Expr::Property(..) => None,
// Anything other than a property after dotting (e.g. a method call) is not an l-value
ref e => Some(e.position()),
}
}
Expr::Index(x, options, ..) | Expr::Dot(x, options, ..) => match x.lhs {
Expr::Property(..) => unreachable!("unexpected Expr::Property in indexing"),
_ if !options.contains(ASTFlags::BREAK) => {
check_lvalue(&x.rhs, matches!(expr, Expr::Dot(..)))
}
_ => None,
},
Expr::Property(..) if parent_is_dot => None,
Expr::Property(..) => unreachable!("unexpected Expr::Property in indexing"),
e if parent_is_dot => Some(e.position()),
_ => None,
}
}
let op_info = op.map(OpAssignment::new_from_token);
match lhs {
// const_expr = rhs
ref expr if expr.is_constant() => {
Err(PERR::AssignmentToConstant("".into()).into_err(lhs.start_position()))
}
// var (non-indexed) = rhs
Expr::Variable(ref x, None, _) if x.0.is_none() => Ok(Stmt::Assignment(
(op_info, (lhs, rhs).into()).into(),
op_pos,
)),
// var (indexed) = rhs
Expr::Variable(ref x, i, var_pos) => {
let (index, .., name) = x.as_ref();
let index = i.map_or_else(
|| index.expect("either long or short index is `None`").get(),
|n| n.get() as usize,
);
match state
.stack
.get_mut_by_index(state.stack.len() - index)
.access_mode()
{
AccessMode::ReadWrite => Ok(Stmt::Assignment(
(op_info, (lhs, rhs).into()).into(),
op_pos,
)),
// Constant values cannot be assigned to
AccessMode::ReadOnly => {
Err(PERR::AssignmentToConstant(name.to_string()).into_err(var_pos))
}
}
}
// xxx[???]... = rhs, xxx.prop... = rhs
Expr::Index(ref x, options, ..) | Expr::Dot(ref x, options, ..) => {
let valid_lvalue = if options.contains(ASTFlags::BREAK) {
None
} else {
check_lvalue(&x.rhs, matches!(lhs, Expr::Dot(..)))
};
match valid_lvalue {
None => {
match x.lhs {
// var[???] = rhs, var.??? = rhs
Expr::Variable(..) => Ok(Stmt::Assignment(
(op_info, (lhs, rhs).into()).into(),
op_pos,
)),
// expr[???] = rhs, expr.??? = rhs
ref expr => Err(PERR::AssignmentToInvalidLHS("".to_string())
.into_err(expr.position())),
}
}
Some(err_pos) => {
Err(PERR::AssignmentToInvalidLHS("".to_string()).into_err(err_pos))
}
}
}
// ??? && ??? = rhs, ??? || ??? = rhs
Expr::And(..) | Expr::Or(..) => Err(LexError::ImproperSymbol(
"=".to_string(),
"Possibly a typo of '=='?".to_string(),
)
.into_err(op_pos)),
// expr = rhs
_ => Err(PERR::AssignmentToInvalidLHS("".to_string()).into_err(lhs.position())),
}
}
/// Parse an operator-assignment expression (if any).
fn parse_op_assignment_stmt(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
lhs: Expr,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let (op, pos) = match input.peek().expect(NEVER_ENDS) {
// var = ...
(Token::Equals, ..) => (None, eat_token(input, Token::Equals)),
// var op= ...
(token, ..) if token.is_op_assignment() => input
.next()
.map(|(op, pos)| (Some(op), pos))
.expect(NEVER_ENDS),
// Not op-assignment
_ => return Ok(Stmt::Expr(lhs.into())),
};
let mut settings = settings;
settings.pos = pos;
let rhs = self.parse_expr(input, state, lib, settings.level_up())?;
Self::make_assignment_stmt(op, state, lhs, rhs, pos)
}
/// Make a dot expression.
#[cfg(not(feature = "no_object"))]
fn make_dot_expr(
state: &mut ParseState,
lhs: Expr,
parent_options: ASTFlags,
rhs: Expr,
op_pos: Position,
) -> ParseResult<Expr> {
match (lhs, rhs) {
// lhs[idx_expr].rhs
(Expr::Index(mut x, options, pos), rhs) => {
x.rhs = Self::make_dot_expr(state, x.rhs, options | parent_options, rhs, op_pos)?;
Ok(Expr::Index(x, ASTFlags::NONE, pos))
}
// lhs.module::id - syntax error
#[cfg(not(feature = "no_module"))]
(.., Expr::Variable(x, ..)) if !x.1.is_empty() => {
Err(PERR::PropertyExpected.into_err(x.1.position()))
}
// lhs.id
(lhs, var_expr @ Expr::Variable(..)) => {
let rhs = var_expr.into_property(state);
Ok(Expr::Dot(
BinaryExpr { lhs, rhs }.into(),
ASTFlags::NONE,
op_pos,
))
}
// lhs.prop
(lhs, prop @ Expr::Property(..)) => Ok(Expr::Dot(
BinaryExpr { lhs, rhs: prop }.into(),
ASTFlags::NONE,
op_pos,
)),
// lhs.nnn::func(...) - syntax error
#[cfg(not(feature = "no_module"))]
(.., Expr::FnCall(func, ..)) if func.is_qualified() => {
Err(PERR::PropertyExpected.into_err(func.namespace.position()))
}
// lhs.Fn() or lhs.eval()
(.., Expr::FnCall(func, func_pos))
if func.args.is_empty()
&& [crate::engine::KEYWORD_FN_PTR, crate::engine::KEYWORD_EVAL]
.contains(&func.name.as_ref()) =>
{
let err_msg = format!(
"'{}' should not be called in method style. Try {}(...);",
func.name, func.name
);
Err(LexError::ImproperSymbol(func.name.to_string(), err_msg).into_err(func_pos))
}
// lhs.func!(...)
(.., Expr::FnCall(func, func_pos)) if func.capture_parent_scope => {
Err(PERR::MalformedCapture(
"method-call style does not support running within the caller's scope".into(),
)
.into_err(func_pos))
}
// lhs.func(...)
(lhs, Expr::FnCall(mut func, func_pos)) => {
// Recalculate hash
func.hashes = FnCallHashes::from_all(
#[cfg(not(feature = "no_function"))]
calc_fn_hash(&func.name, func.args.len()),
calc_fn_hash(&func.name, func.args.len() + 1),
);
let rhs = Expr::MethodCall(func, func_pos);
Ok(Expr::Dot(
BinaryExpr { lhs, rhs }.into(),
ASTFlags::NONE,
op_pos,
))
}
// lhs.dot_lhs.dot_rhs or lhs.dot_lhs[idx_rhs]
(lhs, rhs @ Expr::Dot(..)) | (lhs, rhs @ Expr::Index(..)) => {
let (x, term, pos, is_dot) = match rhs {
Expr::Dot(x, term, pos) => (x, term, pos, true),
Expr::Index(x, term, pos) => (x, term, pos, false),
expr => unreachable!("Expr::Dot or Expr::Index expected but gets {:?}", expr),
};
match x.lhs {
// lhs.module::id.dot_rhs or lhs.module::id[idx_rhs] - syntax error
#[cfg(not(feature = "no_module"))]
Expr::Variable(x, ..) if !x.1.is_empty() => {
Err(PERR::PropertyExpected.into_err(x.1.position()))
}
// lhs.module::func().dot_rhs or lhs.module::func()[idx_rhs] - syntax error
#[cfg(not(feature = "no_module"))]
Expr::FnCall(func, ..) if func.is_qualified() => {
Err(PERR::PropertyExpected.into_err(func.namespace.position()))
}
// lhs.id.dot_rhs or lhs.id[idx_rhs]
Expr::Variable(..) | Expr::Property(..) => {
let new_lhs = BinaryExpr {
lhs: x.lhs.into_property(state),
rhs: x.rhs,
}
.into();
let rhs = if is_dot {
Expr::Dot(new_lhs, term, pos)
} else {
Expr::Index(new_lhs, term, pos)
};
Ok(Expr::Dot(
BinaryExpr { lhs, rhs }.into(),
ASTFlags::NONE,
op_pos,
))
}
// lhs.func().dot_rhs or lhs.func()[idx_rhs]
Expr::FnCall(mut func, func_pos) => {
// Recalculate hash
func.hashes = FnCallHashes::from_all(
#[cfg(not(feature = "no_function"))]
calc_fn_hash(&func.name, func.args.len()),
calc_fn_hash(&func.name, func.args.len() + 1),
);
let new_lhs = BinaryExpr {
lhs: Expr::MethodCall(func, func_pos),
rhs: x.rhs,
}
.into();
let rhs = if is_dot {
Expr::Dot(new_lhs, term, pos)
} else {
Expr::Index(new_lhs, term, pos)
};
Ok(Expr::Dot(
BinaryExpr { lhs, rhs }.into(),
ASTFlags::NONE,
op_pos,
))
}
expr => unreachable!("invalid dot expression: {:?}", expr),
}
}
// lhs.rhs
(.., rhs) => Err(PERR::PropertyExpected.into_err(rhs.start_position())),
}
}
/// Parse a binary expression (if any).
fn parse_binary_op(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
parent_precedence: Option<Precedence>,
lhs: Expr,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
settings.pos = lhs.position();
let mut root = lhs;
loop {
let (current_op, current_pos) = input.peek().expect(NEVER_ENDS);
let precedence = match current_op {
Token::Custom(c) => self
.custom_keywords
.get(c)
.cloned()
.ok_or_else(|| PERR::Reserved(c.to_string()).into_err(*current_pos))?,
Token::Reserved(c) if !is_valid_identifier(c.chars()) => {
return Err(PERR::UnknownOperator(c.to_string()).into_err(*current_pos))
}
_ => current_op.precedence(),
};
let bind_right = current_op.is_bind_right();
// Bind left to the parent lhs expression if precedence is higher
// If same precedence, then check if the operator binds right
if precedence < parent_precedence || (precedence == parent_precedence && !bind_right) {
return Ok(root);
}
let (op_token, pos) = input.next().expect(NEVER_ENDS);
let rhs = self.parse_unary(input, state, lib, settings)?;
let (next_op, next_pos) = input.peek().expect(NEVER_ENDS);
let next_precedence = match next_op {
Token::Custom(c) => self
.custom_keywords
.get(c)
.cloned()
.ok_or_else(|| PERR::Reserved(c.to_string()).into_err(*next_pos))?,
Token::Reserved(c) if !is_valid_identifier(c.chars()) => {
return Err(PERR::UnknownOperator(c.to_string()).into_err(*next_pos))
}
_ => next_op.precedence(),
};
// Bind to right if the next operator has higher precedence
// If same precedence, then check if the operator binds right
let rhs =
if (precedence == next_precedence && bind_right) || precedence < next_precedence {
self.parse_binary_op(input, state, lib, precedence, rhs, settings)?
} else {
// Otherwise bind to left (even if next operator has the same precedence)
rhs
};
settings = settings.level_up();
settings.pos = pos;
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let op = op_token.syntax();
let hash = calc_fn_hash(&op, 2);
let op_base = FnCallExpr {
name: state.get_identifier("", op),
hashes: FnCallHashes::from_native(hash),
pos,
..Default::default()
};
let mut args = StaticVec::new_const();
args.push(root);
args.push(rhs);
args.shrink_to_fit();
root = match op_token {
// '!=' defaults to true when passed invalid operands
Token::NotEqualsTo => FnCallExpr { args, ..op_base }.into_fn_call_expr(pos),
// Comparison operators default to false when passed invalid operands
Token::EqualsTo
| Token::LessThan
| Token::LessThanEqualsTo
| Token::GreaterThan
| Token::GreaterThanEqualsTo => {
let pos = args[0].start_position();
FnCallExpr { args, ..op_base }.into_fn_call_expr(pos)
}
Token::Or => {
let rhs = args.pop().unwrap();
let current_lhs = args.pop().unwrap();
Expr::Or(
BinaryExpr {
lhs: current_lhs.ensure_bool_expr()?,
rhs: rhs.ensure_bool_expr()?,
}
.into(),
pos,
)
}
Token::And => {
let rhs = args.pop().unwrap();
let current_lhs = args.pop().unwrap();
Expr::And(
BinaryExpr {
lhs: current_lhs.ensure_bool_expr()?,
rhs: rhs.ensure_bool_expr()?,
}
.into(),
pos,
)
}
Token::In => {
// Swap the arguments
let current_lhs = args.remove(0);
let pos = current_lhs.start_position();
args.push(current_lhs);
args.shrink_to_fit();
// Convert into a call to `contains`
FnCallExpr {
hashes: calc_fn_hash(OP_CONTAINS, 2).into(),
args,
name: state.get_identifier("", OP_CONTAINS),
..op_base
}
.into_fn_call_expr(pos)
}
Token::Custom(s)
if self
.custom_keywords
.get(s.as_str())
.map_or(false, Option::is_some) =>
{
let hash = calc_fn_hash(&s, 2);
let pos = args[0].start_position();
FnCallExpr {
hashes: if is_valid_function_name(&s) {
hash.into()
} else {
FnCallHashes::from_native(hash)
},
args,
..op_base
}
.into_fn_call_expr(pos)
}
_ => {
let pos = args[0].start_position();
FnCallExpr { args, ..op_base }.into_fn_call_expr(pos)
}
};
}
}
/// Parse a custom syntax.
fn parse_custom_syntax(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
key: impl Into<ImmutableString>,
syntax: &CustomSyntax,
pos: Position,
) -> ParseResult<Expr> {
let mut settings = settings;
let mut inputs = StaticVec::<Expr>::new();
let mut segments = StaticVec::new_const();
let mut tokens = StaticVec::new_const();
// Adjust the variables stack
if syntax.scope_may_be_changed {
// Add a barrier variable to the stack so earlier variables will not be matched.
// Variable searches stop at the first barrier.
let marker = state.get_identifier("", SCOPE_SEARCH_BARRIER_MARKER);
state.stack.push(marker, ());
}
let parse_func = syntax.parse.as_ref();
let mut required_token: ImmutableString = key.into();
tokens.push(required_token.clone().into());
segments.push(required_token.clone());
loop {
let (fwd_token, fwd_pos) = input.peek().expect(NEVER_ENDS);
settings.pos = *fwd_pos;
let settings = settings.level_up();
required_token = match parse_func(&segments, &*fwd_token.syntax()) {
Ok(Some(seg))
if seg.starts_with(CUSTOM_SYNTAX_MARKER_SYNTAX_VARIANT)
&& seg.len() > CUSTOM_SYNTAX_MARKER_SYNTAX_VARIANT.len() =>
{
inputs.push(Expr::StringConstant(
state.get_interned_string("", seg),
pos,
));
break;
}
Ok(Some(seg)) => seg,
Ok(None) => break,
Err(err) => return Err(err.0.into_err(settings.pos)),
};
match required_token.as_str() {
CUSTOM_SYNTAX_MARKER_IDENT => {
let (name, pos) = parse_var_name(input)?;
let name = state.get_identifier("", name);
#[cfg(not(feature = "no_module"))]
let ns = crate::ast::Namespace::NONE;
#[cfg(feature = "no_module")]
let ns = ();
segments.push(name.clone().into());
tokens.push(state.get_identifier("", CUSTOM_SYNTAX_MARKER_IDENT));
inputs.push(Expr::Variable((None, ns, 0, name).into(), None, pos));
}
CUSTOM_SYNTAX_MARKER_SYMBOL => {
let (symbol, pos) = parse_symbol(input)?;
let symbol = state.get_interned_string("", symbol);
segments.push(symbol.clone());
tokens.push(state.get_identifier("", CUSTOM_SYNTAX_MARKER_SYMBOL));
inputs.push(Expr::StringConstant(symbol, pos));
}
CUSTOM_SYNTAX_MARKER_EXPR => {
inputs.push(self.parse_expr(input, state, lib, settings)?);
let keyword = state.get_identifier("", CUSTOM_SYNTAX_MARKER_EXPR);
segments.push(keyword.clone().into());
tokens.push(keyword);
}
CUSTOM_SYNTAX_MARKER_BLOCK => {
match self.parse_block(input, state, lib, settings)? {
block @ Stmt::Block(..) => {
inputs.push(Expr::Stmt(Box::new(block.into())));
let keyword = state.get_identifier("", CUSTOM_SYNTAX_MARKER_BLOCK);
segments.push(keyword.clone().into());
tokens.push(keyword);
}
stmt => unreachable!("Stmt::Block expected but gets {:?}", stmt),
}
}
CUSTOM_SYNTAX_MARKER_BOOL => match input.next().expect(NEVER_ENDS) {
(b @ Token::True, pos) | (b @ Token::False, pos) => {
inputs.push(Expr::BoolConstant(b == Token::True, pos));
segments.push(state.get_interned_string("", b.literal_syntax()));
tokens.push(state.get_identifier("", CUSTOM_SYNTAX_MARKER_BOOL));
}
(.., pos) => {
return Err(
PERR::MissingSymbol("Expecting 'true' or 'false'".to_string())
.into_err(pos),
)
}
},
CUSTOM_SYNTAX_MARKER_INT => match input.next().expect(NEVER_ENDS) {
(Token::IntegerConstant(i), pos) => {
inputs.push(Expr::IntegerConstant(i, pos));
segments.push(i.to_string().into());
tokens.push(state.get_identifier("", CUSTOM_SYNTAX_MARKER_INT));
}
(.., pos) => {
return Err(
PERR::MissingSymbol("Expecting an integer number".to_string())
.into_err(pos),
)
}
},
#[cfg(not(feature = "no_float"))]
CUSTOM_SYNTAX_MARKER_FLOAT => match input.next().expect(NEVER_ENDS) {
(Token::FloatConstant(f), pos) => {
inputs.push(Expr::FloatConstant(f, pos));
segments.push(f.to_string().into());
tokens.push(state.get_identifier("", CUSTOM_SYNTAX_MARKER_FLOAT));
}
(.., pos) => {
return Err(PERR::MissingSymbol(
"Expecting a floating-point number".to_string(),
)
.into_err(pos))
}
},
CUSTOM_SYNTAX_MARKER_STRING => match input.next().expect(NEVER_ENDS) {
(Token::StringConstant(s), pos) => {
let s = state.get_interned_string("", s);
inputs.push(Expr::StringConstant(s.clone(), pos));
segments.push(s);
tokens.push(state.get_identifier("", CUSTOM_SYNTAX_MARKER_STRING));
}
(.., pos) => {
return Err(
PERR::MissingSymbol("Expecting a string".to_string()).into_err(pos)
)
}
},
s => match input.next().expect(NEVER_ENDS) {
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(t, ..) if &*t.syntax() == s => {
segments.push(required_token.clone());
tokens.push(required_token.clone().into());
}
(.., pos) => {
return Err(PERR::MissingToken(
s.to_string(),
format!("for '{}' expression", segments[0]),
)
.into_err(pos))
}
},
}
}
inputs.shrink_to_fit();
tokens.shrink_to_fit();
const KEYWORD_SEMICOLON: &str = Token::SemiColon.literal_syntax();
const KEYWORD_CLOSE_BRACE: &str = Token::RightBrace.literal_syntax();
let self_terminated = match required_token.as_str() {
// It is self-terminating if the last symbol is a block
CUSTOM_SYNTAX_MARKER_BLOCK => true,
// If the last symbol is `;` or `}`, it is self-terminating
KEYWORD_SEMICOLON | KEYWORD_CLOSE_BRACE => true,
_ => false,
};
Ok(Expr::Custom(
CustomExpr {
inputs,
tokens,
scope_may_be_changed: syntax.scope_may_be_changed,
self_terminated,
}
.into(),
pos,
))
}
/// Parse an expression.
fn parse_expr(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Expr> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
settings.pos = input.peek().expect(NEVER_ENDS).1;
// Parse expression normally.
let precedence = Precedence::new(1);
let lhs = self.parse_unary(input, state, lib, settings.level_up())?;
self.parse_binary_op(input, state, lib, precedence, lhs, settings.level_up())
}
/// Parse an if statement.
fn parse_if(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// if ...
let mut settings = settings;
settings.pos = eat_token(input, Token::If);
// if guard { if_body }
ensure_not_statement_expr(input, "a boolean")?;
let guard = self
.parse_expr(input, state, lib, settings.level_up())?
.ensure_bool_expr()?;
ensure_not_assignment(input)?;
let if_body = self.parse_block(input, state, lib, settings.level_up())?;
// if guard { if_body } else ...
let else_body = if match_token(input, Token::Else).0 {
if let (Token::If, ..) = input.peek().expect(NEVER_ENDS) {
// if guard { if_body } else if ...
self.parse_if(input, state, lib, settings.level_up())?
} else {
// if guard { if_body } else { else-body }
self.parse_block(input, state, lib, settings.level_up())?
}
} else {
Stmt::Noop(Position::NONE)
};
Ok(Stmt::If(
(guard, if_body.into(), else_body.into()).into(),
settings.pos,
))
}
/// Parse a while loop.
fn parse_while_loop(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
// while|loops ...
let (guard, token_pos) = match input.next().expect(NEVER_ENDS) {
(Token::While, pos) => {
ensure_not_statement_expr(input, "a boolean")?;
let expr = self
.parse_expr(input, state, lib, settings.level_up())?
.ensure_bool_expr()?;
ensure_not_assignment(input)?;
(expr, pos)
}
(Token::Loop, pos) => (Expr::Unit(Position::NONE), pos),
token => unreachable!("Token::While or Token::Loop expected but gets {:?}", token),
};
settings.pos = token_pos;
settings.is_breakable = true;
let body = self.parse_block(input, state, lib, settings.level_up())?;
Ok(Stmt::While((guard, body.into()).into(), settings.pos))
}
/// Parse a do loop.
fn parse_do(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// do ...
let mut settings = settings;
settings.pos = eat_token(input, Token::Do);
// do { body } [while|until] guard
settings.is_breakable = true;
let body = self.parse_block(input, state, lib, settings.level_up())?;
let negated = match input.next().expect(NEVER_ENDS) {
(Token::While, ..) => ASTFlags::NONE,
(Token::Until, ..) => ASTFlags::NEGATED,
(.., pos) => {
return Err(
PERR::MissingToken(Token::While.into(), "for the do statement".into())
.into_err(pos),
)
}
};
settings.is_breakable = false;
ensure_not_statement_expr(input, "a boolean")?;
let guard = self
.parse_expr(input, state, lib, settings.level_up())?
.ensure_bool_expr()?;
ensure_not_assignment(input)?;
Ok(Stmt::Do((guard, body.into()).into(), negated, settings.pos))
}
/// Parse a for loop.
fn parse_for(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// for ...
let mut settings = settings;
settings.pos = eat_token(input, Token::For);
// for name ...
let (name, name_pos, counter_name, counter_pos) = if match_token(input, Token::LeftParen).0
{
// ( name, counter )
let (name, name_pos) = parse_var_name(input)?;
let (has_comma, pos) = match_token(input, Token::Comma);
if !has_comma {
return Err(PERR::MissingToken(
Token::Comma.into(),
"after the iteration variable name".into(),
)
.into_err(pos));
}
let (counter_name, counter_pos) = parse_var_name(input)?;
if counter_name == name {
return Err(
PERR::DuplicatedVariable(counter_name.to_string()).into_err(counter_pos)
);
}
let (has_close_paren, pos) = match_token(input, Token::RightParen);
if !has_close_paren {
return Err(PERR::MissingToken(
Token::RightParen.into(),
"to close the iteration variable".into(),
)
.into_err(pos));
}
(name, name_pos, counter_name, counter_pos)
} else {
// name
let (name, name_pos) = parse_var_name(input)?;
(name, name_pos, Identifier::new_const(), Position::NONE)
};
// for name in ...
match input.next().expect(NEVER_ENDS) {
(Token::In, ..) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::In.into(),
"after the iteration variable".into(),
)
.into_err(pos))
}
}
// for name in expr { body }
ensure_not_statement_expr(input, "a boolean")?;
let expr = self
.parse_expr(input, state, lib, settings.level_up())?
.ensure_iterable()?;
let prev_stack_len = state.stack.len();
if !counter_name.is_empty() {
state.stack.push(name.clone(), ());
}
let counter_var = Ident {
name: state.get_identifier("", counter_name),
pos: counter_pos,
};
let loop_var = state.get_identifier("", name);
state.stack.push(loop_var.clone(), ());
let loop_var = Ident {
name: loop_var,
pos: name_pos,
};
settings.is_breakable = true;
let body = self.parse_block(input, state, lib, settings.level_up())?;
state.stack.rewind(prev_stack_len);
Ok(Stmt::For(
Box::new((loop_var, counter_var, expr, body.into())),
settings.pos,
))
}
/// Parse a variable definition statement.
fn parse_let(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
access: AccessMode,
is_export: bool,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// let/const... (specified in `var_type`)
let mut settings = settings;
settings.pos = input.next().expect(NEVER_ENDS).1;
// let name ...
let (name, pos) = parse_var_name(input)?;
if !self.allow_shadowing() && state.stack.iter().any(|(v, ..)| v == &name) {
return Err(PERR::VariableExists(name.to_string()).into_err(pos));
}
if let Some(ref filter) = self.def_var_filter {
let will_shadow = state.stack.iter().any(|(v, ..)| v == &name);
let level = settings.level;
let is_const = access == AccessMode::ReadOnly;
let info = VarDefInfo {
name: &name,
is_const,
nesting_level: level,
will_shadow,
};
let context = EvalContext {
engine: self,
scope: &mut state.stack,
global: &mut GlobalRuntimeState::new(self),
state: &mut EvalState::new(),
lib: &[],
this_ptr: &mut None,
level,
};
match filter(false, info, &context) {
Ok(true) => (),
Ok(false) => return Err(PERR::ForbiddenVariable(name.to_string()).into_err(pos)),
Err(err) => match *err {
EvalAltResult::ErrorParsing(perr, pos) => return Err(perr.into_err(pos)),
_ => return Err(PERR::ForbiddenVariable(name.to_string()).into_err(pos)),
},
}
}
let name = state.get_identifier("", name);
// let name = ...
let expr = if match_token(input, Token::Equals).0 {
// let name = expr
self.parse_expr(input, state, lib, settings.level_up())?
} else {
Expr::Unit(Position::NONE)
};
let export = if is_export {
ASTFlags::EXPORTED
} else {
ASTFlags::NONE
};
let (existing, hit_barrier) = state.find_var(&name);
let existing = if !hit_barrier && existing > 0 {
let offset = state.stack.len() - existing;
if offset < state.block_stack_len {
// Defined in parent block
None
} else {
Some(offset)
}
} else {
None
};
let idx = if let Some(n) = existing {
state.stack.get_mut_by_index(n).set_access_mode(access);
Some(NonZeroUsize::new(state.stack.len() - n).unwrap())
} else {
state.stack.push_entry(name.as_str(), access, Dynamic::UNIT);
None
};
let var_def = (Ident { name, pos }, expr, idx).into();
Ok(match access {
// let name = expr
AccessMode::ReadWrite => Stmt::Var(var_def, export, settings.pos),
// const name = { expr:constant }
AccessMode::ReadOnly => Stmt::Var(var_def, ASTFlags::CONSTANT | export, settings.pos),
})
}
/// Parse an import statement.
#[cfg(not(feature = "no_module"))]
fn parse_import(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// import ...
let mut settings = settings;
settings.pos = eat_token(input, Token::Import);
// import expr ...
let expr = self.parse_expr(input, state, lib, settings.level_up())?;
// import expr as ...
if !match_token(input, Token::As).0 {
return Ok(Stmt::Import((expr, Ident::EMPTY).into(), settings.pos));
}
// import expr as name ...
let (name, pos) = parse_var_name(input)?;
let name = state.get_identifier("", name);
state.imports.push(name.clone());
Ok(Stmt::Import(
(expr, Ident { name, pos }).into(),
settings.pos,
))
}
/// Parse an export statement.
#[cfg(not(feature = "no_module"))]
fn parse_export(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
settings.pos = eat_token(input, Token::Export);
match input.peek().expect(NEVER_ENDS) {
(Token::Let, pos) => {
let pos = *pos;
let mut stmt =
self.parse_let(input, state, lib, AccessMode::ReadWrite, true, settings)?;
stmt.set_position(pos);
return Ok(stmt);
}
(Token::Const, pos) => {
let pos = *pos;
let mut stmt =
self.parse_let(input, state, lib, AccessMode::ReadOnly, true, settings)?;
stmt.set_position(pos);
return Ok(stmt);
}
_ => (),
}
let (id, id_pos) = parse_var_name(input)?;
let (alias, alias_pos) = if match_token(input, Token::As).0 {
let (name, pos) = parse_var_name(input)?;
(Some(name), pos)
} else {
(None, Position::NONE)
};
let export = (
Ident {
name: state.get_identifier("", id),
pos: id_pos,
},
Ident {
name: state.get_identifier("", alias.as_ref().map_or("", <_>::as_ref)),
pos: alias_pos,
},
);
Ok(Stmt::Export(export.into(), settings.pos))
}
/// Parse a statement block.
fn parse_block(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// Must start with {
let mut settings = settings;
settings.pos = match input.next().expect(NEVER_ENDS) {
(Token::LeftBrace, pos) => pos,
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::LeftBrace.into(),
"to start a statement block".into(),
)
.into_err(pos))
}
};
let mut statements = StaticVec::new();
let prev_entry_stack_len = state.block_stack_len;
state.block_stack_len = state.stack.len();
#[cfg(not(feature = "no_module"))]
let orig_imports_len = state.imports.len();
let end_pos = loop {
// Terminated?
match input.peek().expect(NEVER_ENDS) {
(Token::RightBrace, ..) => break eat_token(input, Token::RightBrace),
(Token::EOF, pos) => {
return Err(PERR::MissingToken(
Token::RightBrace.into(),
"to terminate this block".into(),
)
.into_err(*pos));
}
_ => (),
}
// Parse statements inside the block
settings.is_global = false;
let stmt = self.parse_stmt(input, state, lib, settings.level_up())?;
if stmt.is_noop() {
continue;
}
// See if it needs a terminating semicolon
let need_semicolon = !stmt.is_self_terminated();
statements.push(stmt);
match input.peek().expect(NEVER_ENDS) {
// { ... stmt }
(Token::RightBrace, ..) => break eat_token(input, Token::RightBrace),
// { ... stmt;
(Token::SemiColon, ..) if need_semicolon => {
eat_token(input, Token::SemiColon);
}
// { ... { stmt } ;
(Token::SemiColon, ..) if !need_semicolon => {
eat_token(input, Token::SemiColon);
}
// { ... { stmt } ???
_ if !need_semicolon => (),
// { ... stmt <error>
(Token::LexError(err), err_pos) => return Err(err.clone().into_err(*err_pos)),
// { ... stmt ???
(.., pos) => {
// Semicolons are not optional between statements
return Err(PERR::MissingToken(
Token::SemiColon.into(),
"to terminate this statement".into(),
)
.into_err(*pos));
}
}
};
state.stack.rewind(state.block_stack_len);
state.block_stack_len = prev_entry_stack_len;
#[cfg(not(feature = "no_module"))]
state.imports.truncate(orig_imports_len);
Ok((statements, settings.pos, end_pos).into())
}
/// Parse an expression as a statement.
fn parse_expr_stmt(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
settings.pos = input.peek().expect(NEVER_ENDS).1;
let expr = self.parse_expr(input, state, lib, settings.level_up())?;
let stmt = self.parse_op_assignment_stmt(input, state, lib, expr, settings.level_up())?;
Ok(stmt)
}
/// Parse a single statement.
fn parse_stmt(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
use AccessMode::{ReadOnly, ReadWrite};
let mut settings = settings;
#[cfg(not(feature = "no_function"))]
#[cfg(feature = "metadata")]
let comments = {
let mut comments = StaticVec::<SmartString>::new();
let mut comments_pos = Position::NONE;
// Handle doc-comments.
while let (Token::Comment(ref comment), pos) = input.peek().expect(NEVER_ENDS) {
if comments_pos.is_none() {
comments_pos = *pos;
}
if !crate::tokenizer::is_doc_comment(comment) {
unreachable!("doc-comment expected but gets {:?}", comment);
}
if !settings.is_global {
return Err(PERR::WrongDocComment.into_err(comments_pos));
}
match input.next().expect(NEVER_ENDS).0 {
Token::Comment(comment) => {
comments.push(comment);
match input.peek().expect(NEVER_ENDS) {
(Token::Fn, ..) | (Token::Private, ..) => break,
(Token::Comment(..), ..) => (),
_ => return Err(PERR::WrongDocComment.into_err(comments_pos)),
}
}
token => unreachable!("Token::Comment expected but gets {:?}", token),
}
}
comments
};
let (token, token_pos) = match input.peek().expect(NEVER_ENDS) {
(Token::EOF, pos) => return Ok(Stmt::Noop(*pos)),
(x, pos) => (x, *pos),
};
settings.pos = token_pos;
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
match token {
// ; - empty statement
Token::SemiColon => {
eat_token(input, Token::SemiColon);
Ok(Stmt::Noop(token_pos))
}
// { - statements block
Token::LeftBrace => Ok(self.parse_block(input, state, lib, settings.level_up())?),
// fn ...
#[cfg(not(feature = "no_function"))]
Token::Fn if !settings.is_global => Err(PERR::WrongFnDefinition.into_err(token_pos)),
#[cfg(not(feature = "no_function"))]
Token::Fn | Token::Private => {
let access = if matches!(token, Token::Private) {
eat_token(input, Token::Private);
crate::FnAccess::Private
} else {
crate::FnAccess::Public
};
match input.next().expect(NEVER_ENDS) {
(Token::Fn, pos) => {
let mut new_state = ParseState::new(self, state.tokenizer_control.clone());
#[cfg(not(feature = "unchecked"))]
{
new_state.max_expr_depth = self.max_function_expr_depth();
}
let new_settings = ParseSettings {
is_global: false,
is_function_scope: true,
#[cfg(not(feature = "no_closure"))]
is_closure_scope: false,
is_breakable: false,
level: 0,
options: LanguageOptions {
strict_var: settings.options.strict_var,
..self.options
},
pos,
..settings
};
let func = self.parse_fn(
input,
&mut new_state,
lib,
access,
new_settings,
#[cfg(not(feature = "no_function"))]
#[cfg(feature = "metadata")]
comments,
)?;
let hash = calc_fn_hash(&func.name, func.params.len());
if !lib.is_empty() && lib.contains_key(&hash) {
return Err(PERR::FnDuplicatedDefinition(
func.name.to_string(),
func.params.len(),
)
.into_err(pos));
}
lib.insert(hash, func.into());
Ok(Stmt::Noop(pos))
}
(.., pos) => Err(PERR::MissingToken(
Token::Fn.into(),
format!("following '{}'", Token::Private.syntax()),
)
.into_err(pos)),
}
}
Token::If => self.parse_if(input, state, lib, settings.level_up()),
Token::Switch => self.parse_switch(input, state, lib, settings.level_up()),
Token::While | Token::Loop if self.allow_looping() => {
self.parse_while_loop(input, state, lib, settings.level_up())
}
Token::Do if self.allow_looping() => {
self.parse_do(input, state, lib, settings.level_up())
}
Token::For if self.allow_looping() => {
self.parse_for(input, state, lib, settings.level_up())
}
Token::Continue if self.allow_looping() && settings.is_breakable => {
let pos = eat_token(input, Token::Continue);
Ok(Stmt::BreakLoop(ASTFlags::NONE, pos))
}
Token::Break if self.allow_looping() && settings.is_breakable => {
let pos = eat_token(input, Token::Break);
Ok(Stmt::BreakLoop(ASTFlags::BREAK, pos))
}
Token::Continue | Token::Break if self.allow_looping() => {
Err(PERR::LoopBreak.into_err(token_pos))
}
Token::Return | Token::Throw => {
let (return_type, token_pos) = input
.next()
.map(|(token, pos)| {
let flags = match token {
Token::Return => ASTFlags::NONE,
Token::Throw => ASTFlags::BREAK,
token => unreachable!(
"Token::Return or Token::Throw expected but gets {:?}",
token
),
};
(flags, pos)
})
.expect(NEVER_ENDS);
match input.peek().expect(NEVER_ENDS) {
// `return`/`throw` at <EOF>
(Token::EOF, ..) => Ok(Stmt::Return(None, return_type, token_pos)),
// `return`/`throw` at end of block
(Token::RightBrace, ..) if !settings.is_global => {
Ok(Stmt::Return(None, return_type, token_pos))
}
// `return;` or `throw;`
(Token::SemiColon, ..) => Ok(Stmt::Return(None, return_type, token_pos)),
// `return` or `throw` with expression
_ => {
let expr = self.parse_expr(input, state, lib, settings.level_up())?;
Ok(Stmt::Return(Some(expr.into()), return_type, token_pos))
}
}
}
Token::Try => self.parse_try_catch(input, state, lib, settings.level_up()),
Token::Let => self.parse_let(input, state, lib, ReadWrite, false, settings.level_up()),
Token::Const => self.parse_let(input, state, lib, ReadOnly, false, settings.level_up()),
#[cfg(not(feature = "no_module"))]
Token::Import => self.parse_import(input, state, lib, settings.level_up()),
#[cfg(not(feature = "no_module"))]
Token::Export if !settings.is_global => Err(PERR::WrongExport.into_err(token_pos)),
#[cfg(not(feature = "no_module"))]
Token::Export => self.parse_export(input, state, lib, settings.level_up()),
_ => self.parse_expr_stmt(input, state, lib, settings.level_up()),
}
}
/// Parse a try/catch statement.
fn parse_try_catch(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<Stmt> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// try ...
let mut settings = settings;
settings.pos = eat_token(input, Token::Try);
// try { try_block }
let try_block = self.parse_block(input, state, lib, settings.level_up())?;
// try { try_block } catch
let (matched, catch_pos) = match_token(input, Token::Catch);
if !matched {
return Err(
PERR::MissingToken(Token::Catch.into(), "for the 'try' statement".into())
.into_err(catch_pos),
);
}
// try { try_block } catch (
let catch_var = if match_token(input, Token::LeftParen).0 {
let (name, pos) = parse_var_name(input)?;
let (matched, err_pos) = match_token(input, Token::RightParen);
if !matched {
return Err(PERR::MissingToken(
Token::RightParen.into(),
"to enclose the catch variable".into(),
)
.into_err(err_pos));
}
let name = state.get_identifier("", name);
state.stack.push(name.clone(), ());
Ident { name, pos }
} else {
Ident::EMPTY
};
// try { try_block } catch ( var ) { catch_block }
let catch_block = self.parse_block(input, state, lib, settings.level_up())?;
if !catch_var.is_empty() {
// Remove the error variable from the stack
state.stack.rewind(state.stack.len() - 1);
}
Ok(Stmt::TryCatch(
TryCatchBlock {
try_block: try_block.into(),
catch_var,
catch_block: catch_block.into(),
}
.into(),
settings.pos,
))
}
/// Parse a function definition.
#[cfg(not(feature = "no_function"))]
fn parse_fn(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
access: crate::FnAccess,
settings: ParseSettings,
#[cfg(not(feature = "no_function"))]
#[cfg(feature = "metadata")]
comments: StaticVec<SmartString>,
) -> ParseResult<ScriptFnDef> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
let (token, pos) = input.next().expect(NEVER_ENDS);
let name = match token.into_function_name_for_override() {
Ok(r) => r,
Err(Token::Reserved(s)) => return Err(PERR::Reserved(s.to_string()).into_err(pos)),
Err(_) => return Err(PERR::FnMissingName.into_err(pos)),
};
match input.peek().expect(NEVER_ENDS) {
(Token::LeftParen, ..) => eat_token(input, Token::LeftParen),
(.., pos) => return Err(PERR::FnMissingParams(name.to_string()).into_err(*pos)),
};
let mut params = StaticVec::new_const();
if !match_token(input, Token::RightParen).0 {
let sep_err = format!("to separate the parameters of function '{}'", name);
loop {
match input.next().expect(NEVER_ENDS) {
(Token::RightParen, ..) => break,
(Token::Identifier(s), pos) => {
if params.iter().any(|(p, _)| p == &*s) {
return Err(PERR::FnDuplicatedParam(name.to_string(), s.to_string())
.into_err(pos));
}
let s = state.get_identifier("", s);
state.stack.push(s.clone(), ());
params.push((s, pos))
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::RightParen.into(),
format!("to close the parameters list of function '{}'", name),
)
.into_err(pos))
}
}
match input.next().expect(NEVER_ENDS) {
(Token::RightParen, ..) => break,
(Token::Comma, ..) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(Token::Comma.into(), sep_err).into_err(pos))
}
}
}
}
// Parse function body
let body = match input.peek().expect(NEVER_ENDS) {
(Token::LeftBrace, ..) => {
settings.is_breakable = false;
self.parse_block(input, state, lib, settings.level_up())?
}
(.., pos) => return Err(PERR::FnMissingBody(name.to_string()).into_err(*pos)),
}
.into();
let mut params: StaticVec<_> = params.into_iter().map(|(p, ..)| p).collect();
params.shrink_to_fit();
Ok(ScriptFnDef {
name: state.get_identifier("", name),
access,
params,
body,
#[cfg(not(feature = "no_module"))]
environ: None,
#[cfg(not(feature = "no_function"))]
#[cfg(feature = "metadata")]
comments: comments
.into_iter()
.map(|s| s.to_string().into_boxed_str())
.collect::<Vec<_>>()
.into_boxed_slice(),
})
}
/// Creates a curried expression from a list of external variables
#[cfg(not(feature = "no_function"))]
#[cfg(not(feature = "no_closure"))]
fn make_curry_from_externals(
state: &mut ParseState,
fn_expr: Expr,
externals: StaticVec<crate::ast::Ident>,
pos: Position,
) -> Expr {
// If there are no captured variables, no need to curry
if externals.is_empty() {
return fn_expr;
}
let num_externals = externals.len();
let mut args = StaticVec::with_capacity(externals.len() + 1);
args.push(fn_expr);
args.extend(
externals
.iter()
.cloned()
.map(|crate::ast::Ident { name, pos }| {
#[cfg(not(feature = "no_module"))]
let ns = crate::ast::Namespace::NONE;
#[cfg(feature = "no_module")]
let ns = ();
Expr::Variable((None, ns, 0, name).into(), None, pos)
}),
);
let expr = FnCallExpr {
name: state.get_identifier("", crate::engine::KEYWORD_FN_PTR_CURRY),
hashes: FnCallHashes::from_native(calc_fn_hash(
crate::engine::KEYWORD_FN_PTR_CURRY,
num_externals + 1,
)),
args,
pos,
..Default::default()
}
.into_fn_call_expr(pos);
// Convert the entire expression into a statement block, then insert the relevant
// [`Share`][Stmt::Share] statements.
let mut statements = StaticVec::with_capacity(externals.len() + 1);
statements.extend(
externals
.into_iter()
.map(|crate::ast::Ident { name, pos }| Stmt::Share(name.into(), pos)),
);
statements.push(Stmt::Expr(expr.into()));
Expr::Stmt(crate::ast::StmtBlock::new(statements, pos, Position::NONE).into())
}
/// Parse an anonymous function definition.
#[cfg(not(feature = "no_function"))]
fn parse_anon_fn(
&self,
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FnLib,
settings: ParseSettings,
) -> ParseResult<(Expr, ScriptFnDef)> {
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut settings = settings;
let mut params_list = StaticVec::new_const();
if input.next().expect(NEVER_ENDS).0 != Token::Or && !match_token(input, Token::Pipe).0 {
loop {
match input.next().expect(NEVER_ENDS) {
(Token::Pipe, ..) => break,
(Token::Identifier(s), pos) => {
if params_list.iter().any(|p| p == &*s) {
return Err(PERR::FnDuplicatedParam("".to_string(), s.to_string())
.into_err(pos));
}
let s = state.get_identifier("", s);
state.stack.push(s.clone(), ());
params_list.push(s)
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::Pipe.into(),
"to close the parameters list of anonymous function".into(),
)
.into_err(pos))
}
}
match input.next().expect(NEVER_ENDS) {
(Token::Pipe, ..) => break,
(Token::Comma, ..) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(.., pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the parameters of anonymous function".into(),
)
.into_err(pos))
}
}
}
}
// Parse function body
settings.is_breakable = false;
let body = self.parse_stmt(input, state, lib, settings.level_up())?;
// External variables may need to be processed in a consistent order,
// so extract them into a list.
#[cfg(not(feature = "no_closure"))]
let (mut params, externals) = {
let externals: StaticVec<_> = state.external_vars.iter().cloned().collect();
let mut params = StaticVec::with_capacity(params_list.len() + externals.len());
params.extend(
externals
.iter()
.map(|crate::ast::Ident { name, .. }| name.clone()),
);
(params, externals)
};
#[cfg(feature = "no_closure")]
let mut params = StaticVec::with_capacity(params_list.len());
params.append(&mut params_list);
// Create unique function name by hashing the script body plus the parameters.
let hasher = &mut get_hasher();
params.iter().for_each(|p| p.hash(hasher));
body.hash(hasher);
let hash = hasher.finish();
let fn_name = state.get_identifier("", make_anonymous_fn(hash));
// Define the function
let script = ScriptFnDef {
name: fn_name.clone(),
access: crate::FnAccess::Public,
params,
body: body.into(),
#[cfg(not(feature = "no_module"))]
environ: None,
#[cfg(not(feature = "no_function"))]
#[cfg(feature = "metadata")]
comments: Box::default(),
};
let fn_ptr = crate::FnPtr::new_unchecked(fn_name, StaticVec::new_const());
let expr = Expr::DynamicConstant(Box::new(fn_ptr.into()), settings.pos);
#[cfg(not(feature = "no_closure"))]
let expr = Self::make_curry_from_externals(state, expr, externals, settings.pos);
Ok((expr, script))
}
/// Parse a global level expression.
pub(crate) fn parse_global_expr(
&self,
input: &mut TokenStream,
state: &mut ParseState,
scope: &Scope,
optimization_level: OptimizationLevel,
) -> ParseResult<AST> {
let _scope = scope;
let _optimization_level = optimization_level;
let mut functions = BTreeMap::new();
let settings = ParseSettings {
is_global: true,
#[cfg(not(feature = "no_function"))]
is_function_scope: false,
#[cfg(not(feature = "no_function"))]
#[cfg(not(feature = "no_closure"))]
is_closure_scope: false,
is_breakable: false,
level: 0,
options: LanguageOptions {
allow_if_expr: false,
allow_switch_expr: false,
allow_stmt_expr: false,
#[cfg(not(feature = "no_function"))]
allow_anonymous_fn: false,
..self.options
},
pos: Position::NONE,
};
let expr = self.parse_expr(input, state, &mut functions, settings)?;
assert!(functions.is_empty());
match input.peek().expect(NEVER_ENDS) {
(Token::EOF, ..) => (),
// Return error if the expression doesn't end
(token, pos) => {
return Err(LexError::UnexpectedInput(token.syntax().to_string()).into_err(*pos))
}
}
let mut statements = StmtBlockContainer::new_const();
statements.push(Stmt::Expr(expr.into()));
#[cfg(not(feature = "no_optimize"))]
return Ok(crate::optimizer::optimize_into_ast(
self,
_scope,
statements,
#[cfg(not(feature = "no_function"))]
StaticVec::new_const(),
optimization_level,
));
#[cfg(feature = "no_optimize")]
return Ok(AST::new(
statements,
#[cfg(not(feature = "no_function"))]
crate::Module::new(),
));
}
/// Parse the global level statements.
fn parse_global_level(
&self,
input: &mut TokenStream,
state: &mut ParseState,
) -> ParseResult<(StmtBlockContainer, StaticVec<Shared<ScriptFnDef>>)> {
let mut statements = StmtBlockContainer::new_const();
let mut functions = BTreeMap::new();
while !input.peek().expect(NEVER_ENDS).0.is_eof() {
let settings = ParseSettings {
is_global: true,
#[cfg(not(feature = "no_function"))]
is_function_scope: false,
#[cfg(not(feature = "no_function"))]
#[cfg(not(feature = "no_closure"))]
is_closure_scope: false,
is_breakable: false,
options: self.options,
level: 0,
pos: Position::NONE,
};
let stmt = self.parse_stmt(input, state, &mut functions, settings)?;
if stmt.is_noop() {
continue;
}
let need_semicolon = !stmt.is_self_terminated();
statements.push(stmt);
match input.peek().expect(NEVER_ENDS) {
// EOF
(Token::EOF, ..) => break,
// stmt ;
(Token::SemiColon, ..) if need_semicolon => {
eat_token(input, Token::SemiColon);
}
// stmt ;
(Token::SemiColon, ..) if !need_semicolon => (),
// { stmt } ???
_ if !need_semicolon => (),
// stmt <error>
(Token::LexError(err), pos) => return Err(err.clone().into_err(*pos)),
// stmt ???
(.., pos) => {
// Semicolons are not optional between statements
return Err(PERR::MissingToken(
Token::SemiColon.into(),
"to terminate this statement".into(),
)
.into_err(*pos));
}
}
}
Ok((statements, functions.into_iter().map(|(.., v)| v).collect()))
}
/// Run the parser on an input stream, returning an AST.
#[inline]
pub(crate) fn parse(
&self,
input: &mut TokenStream,
state: &mut ParseState,
scope: &Scope, | ) -> ParseResult<AST> {
let _scope = scope;
let _optimization_level = optimization_level;
let (statements, _lib) = self.parse_global_level(input, state)?;
#[cfg(not(feature = "no_optimize"))]
return Ok(crate::optimizer::optimize_into_ast(
self,
_scope,
statements,
#[cfg(not(feature = "no_function"))]
_lib,
optimization_level,
));
#[cfg(feature = "no_optimize")]
#[cfg(not(feature = "no_function"))]
{
let mut m = crate::Module::new();
for fn_def in _lib {
m.set_script_fn(fn_def);
}
return Ok(AST::new(statements, m));
}
#[cfg(feature = "no_optimize")]
#[cfg(feature = "no_function")]
return Ok(AST::new(
statements,
#[cfg(not(feature = "no_function"))]
crate::Module::new(),
));
}
} | optimization_level: OptimizationLevel, |
no_gopacket.go | // +build !linux
/*
* Copyright (C) 2016 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy ofthe License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specificlanguage governing permissions and
* limitations under the License.
*
*/
package probes
import (
"github.com/skydive-project/skydive/api/types"
"github.com/skydive-project/skydive/common"
"github.com/skydive-project/skydive/flow"
"github.com/skydive-project/skydive/graffiti/graph"
)
// GoPacketProbesHandler describes a flow probe handle in the graph
type GoPacketProbesHandler struct {
}
// RegisterProbe registers a gopacket probe
func (p *GoPacketProbesHandler) RegisterProbe(n *graph.Node, capture *types.Capture, e ProbeEventHandler) (Probe, error) {
return nil, common.ErrNotImplemented
}
// UnregisterProbe unregisters gopacket probe
func (p *GoPacketProbesHandler) UnregisterProbe(n *graph.Node, e ProbeEventHandler, fp Probe) error {
return common.ErrNotImplemented
}
// Start probe
func (p *GoPacketProbesHandler) Start() {
}
// Stop probe | }
// NewGoPacketProbesHandler creates a new gopacket probe in the graph
func NewGoPacketProbesHandler(g *graph.Graph, fta *flow.TableAllocator) (*GoPacketProbesHandler, error) {
return nil, common.ErrNotImplemented
} | func (p *GoPacketProbesHandler) Stop() { |
get_row_test.rs | import test_basic::*;
import glue::*;
#[test]
fn | () {
let conn = TestConnect();
Assure(conn.Exec("drop table if exists movie"));
Assure(conn.Exec("create table movie (\
did serial,\
unique(did),\
title varchar(255),\
year int,\
director varchar(255)\
);"
));
InsertStarWars(conn, "movie");
let res = Assure(conn.Exec("select * from movie"));
unsafe {
assert GetRow(res, 0) == [
Int32(1),
VarChar("a new hope"),
Int32(1977),
VarChar("lucas")
];
}
conn.Exec("drop table if exists movie");
}
| GetRowTest |
operations_insights_config.py | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class | (object):
"""
The configuration of Operations Insights for the external database
"""
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "ENABLING"
OPERATIONS_INSIGHTS_STATUS_ENABLING = "ENABLING"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "ENABLED"
OPERATIONS_INSIGHTS_STATUS_ENABLED = "ENABLED"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "DISABLING"
OPERATIONS_INSIGHTS_STATUS_DISABLING = "DISABLING"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "NOT_ENABLED"
OPERATIONS_INSIGHTS_STATUS_NOT_ENABLED = "NOT_ENABLED"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "FAILED_ENABLING"
OPERATIONS_INSIGHTS_STATUS_FAILED_ENABLING = "FAILED_ENABLING"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "FAILED_DISABLING"
OPERATIONS_INSIGHTS_STATUS_FAILED_DISABLING = "FAILED_DISABLING"
def __init__(self, **kwargs):
"""
Initializes a new OperationsInsightsConfig object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param operations_insights_status:
The value to assign to the operations_insights_status property of this OperationsInsightsConfig.
Allowed values for this property are: "ENABLING", "ENABLED", "DISABLING", "NOT_ENABLED", "FAILED_ENABLING", "FAILED_DISABLING", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type operations_insights_status: str
:param operations_insights_connector_id:
The value to assign to the operations_insights_connector_id property of this OperationsInsightsConfig.
:type operations_insights_connector_id: str
"""
self.swagger_types = {
'operations_insights_status': 'str',
'operations_insights_connector_id': 'str'
}
self.attribute_map = {
'operations_insights_status': 'operationsInsightsStatus',
'operations_insights_connector_id': 'operationsInsightsConnectorId'
}
self._operations_insights_status = None
self._operations_insights_connector_id = None
@property
def operations_insights_status(self):
"""
**[Required]** Gets the operations_insights_status of this OperationsInsightsConfig.
The status of Operations Insights
Allowed values for this property are: "ENABLING", "ENABLED", "DISABLING", "NOT_ENABLED", "FAILED_ENABLING", "FAILED_DISABLING", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The operations_insights_status of this OperationsInsightsConfig.
:rtype: str
"""
return self._operations_insights_status
@operations_insights_status.setter
def operations_insights_status(self, operations_insights_status):
"""
Sets the operations_insights_status of this OperationsInsightsConfig.
The status of Operations Insights
:param operations_insights_status: The operations_insights_status of this OperationsInsightsConfig.
:type: str
"""
allowed_values = ["ENABLING", "ENABLED", "DISABLING", "NOT_ENABLED", "FAILED_ENABLING", "FAILED_DISABLING"]
if not value_allowed_none_or_none_sentinel(operations_insights_status, allowed_values):
operations_insights_status = 'UNKNOWN_ENUM_VALUE'
self._operations_insights_status = operations_insights_status
@property
def operations_insights_connector_id(self):
"""
Gets the operations_insights_connector_id of this OperationsInsightsConfig.
The `OCID`__ of the
:func:`create_external_database_connector_details`.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The operations_insights_connector_id of this OperationsInsightsConfig.
:rtype: str
"""
return self._operations_insights_connector_id
@operations_insights_connector_id.setter
def operations_insights_connector_id(self, operations_insights_connector_id):
"""
Sets the operations_insights_connector_id of this OperationsInsightsConfig.
The `OCID`__ of the
:func:`create_external_database_connector_details`.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param operations_insights_connector_id: The operations_insights_connector_id of this OperationsInsightsConfig.
:type: str
"""
self._operations_insights_connector_id = operations_insights_connector_id
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| OperationsInsightsConfig |
fcntest.py | from metrics import MetricFunctionNYUv2, print_single_error
from model import SupervisedLossFunction
from torch.utils.data import DataLoader
from torchvision import transforms
from nyuv2 import NYUv2
from tqdm import tqdm
from general import generate_layers, load_checkpoint, tensors_to_device
import torch | from torchvision.models.segmentation.segmentation import fcn_resnet50
num_layers = 3
def runmodel(model, imgs, depths):
layers = generate_layers(imgs, depths, num_layers)
x = [model(x)['out'] for x in layers]
return torch.stack(x, dim=-1)
def run_test_nyuv2(model, dataloader, loss_fn, metric_fn):
loop = tqdm(dataloader, position=0, leave=True)
for i, tensors in enumerate(loop):
imgs, seg13, normals, depths = tensors_to_device(tensors, DEVICE)
with torch.no_grad():
predictions = runmodel(model, imgs, depths)
loss_fn(predictions, (normals, depths))
metric_fn.evaluate(predictions, (seg13, normals, depths))
loop.close()
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
model = fcn_resnet50(pretrained=False, num_classes=14)
model = model.to(DEVICE)
epoch_idx, model = load_checkpoint(model, "fcnmodel.pth", DEVICE)
t = transforms.Compose([transforms.Resize((256, 256)), transforms.ToTensor()])
test_dataset = NYUv2(root="../NYUv2", download=True, rgb_transform=t, seg_transform=t, sn_transform=t, depth_transform=t, train=False)
dataloader = DataLoader(test_dataset, batch_size=2, shuffle=True)
loss_fn = SupervisedLossFunction()
metric_fn = MetricFunctionNYUv2(2)
model.eval()
run_test_nyuv2(model, dataloader, loss_fn, metric_fn)
print_single_error(epoch_idx, loss_fn.show(), metric_fn.show()) | |
queueServiceProperties.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package latest
import (
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// The properties of a storage account’s Queue service.
type QueueServiceProperties struct {
pulumi.CustomResourceState
// Specifies CORS rules for the Queue service. You can include up to five CorsRule elements in the request. If no CorsRule elements are included in the request body, all CORS rules will be deleted, and CORS will be disabled for the Queue service.
Cors CorsRulesResponsePtrOutput `pulumi:"cors"`
// The name of the resource
Name pulumi.StringOutput `pulumi:"name"`
// The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Type pulumi.StringOutput `pulumi:"type"`
}
// NewQueueServiceProperties registers a new resource with the given unique name, arguments, and options.
func NewQueueServiceProperties(ctx *pulumi.Context,
name string, args *QueueServicePropertiesArgs, opts ...pulumi.ResourceOption) (*QueueServiceProperties, error) {
if args == nil || args.AccountName == nil {
return nil, errors.New("missing required argument 'AccountName'")
}
if args == nil || args.QueueServiceName == nil {
return nil, errors.New("missing required argument 'QueueServiceName'")
}
if args == nil || args.ResourceGroupName == nil {
return nil, errors.New("missing required argument 'ResourceGroupName'")
}
if args == nil {
args = &QueueServicePropertiesArgs{}
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:storage/v20190601:QueueServiceProperties"),
},
{
Type: pulumi.String("azure-nextgen:storage/v20200801preview:QueueServiceProperties"),
},
})
opts = append(opts, aliases)
var resource QueueServiceProperties
err := ctx.RegisterResource("azure-nextgen:storage/latest:QueueServiceProperties", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetQueueServiceProperties gets an existing QueueServiceProperties resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func Ge | tx *pulumi.Context,
name string, id pulumi.IDInput, state *QueueServicePropertiesState, opts ...pulumi.ResourceOption) (*QueueServiceProperties, error) {
var resource QueueServiceProperties
err := ctx.ReadResource("azure-nextgen:storage/latest:QueueServiceProperties", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering QueueServiceProperties resources.
type queueServicePropertiesState struct {
// Specifies CORS rules for the Queue service. You can include up to five CorsRule elements in the request. If no CorsRule elements are included in the request body, all CORS rules will be deleted, and CORS will be disabled for the Queue service.
Cors *CorsRulesResponse `pulumi:"cors"`
// The name of the resource
Name *string `pulumi:"name"`
// The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Type *string `pulumi:"type"`
}
type QueueServicePropertiesState struct {
// Specifies CORS rules for the Queue service. You can include up to five CorsRule elements in the request. If no CorsRule elements are included in the request body, all CORS rules will be deleted, and CORS will be disabled for the Queue service.
Cors CorsRulesResponsePtrInput
// The name of the resource
Name pulumi.StringPtrInput
// The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Type pulumi.StringPtrInput
}
func (QueueServicePropertiesState) ElementType() reflect.Type {
return reflect.TypeOf((*queueServicePropertiesState)(nil)).Elem()
}
type queueServicePropertiesArgs struct {
// The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
AccountName string `pulumi:"accountName"`
// Specifies CORS rules for the Queue service. You can include up to five CorsRule elements in the request. If no CorsRule elements are included in the request body, all CORS rules will be deleted, and CORS will be disabled for the Queue service.
Cors *CorsRules `pulumi:"cors"`
// The name of the Queue Service within the specified storage account. Queue Service Name must be 'default'
QueueServiceName string `pulumi:"queueServiceName"`
// The name of the resource group within the user's subscription. The name is case insensitive.
ResourceGroupName string `pulumi:"resourceGroupName"`
}
// The set of arguments for constructing a QueueServiceProperties resource.
type QueueServicePropertiesArgs struct {
// The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
AccountName pulumi.StringInput
// Specifies CORS rules for the Queue service. You can include up to five CorsRule elements in the request. If no CorsRule elements are included in the request body, all CORS rules will be deleted, and CORS will be disabled for the Queue service.
Cors CorsRulesPtrInput
// The name of the Queue Service within the specified storage account. Queue Service Name must be 'default'
QueueServiceName pulumi.StringInput
// The name of the resource group within the user's subscription. The name is case insensitive.
ResourceGroupName pulumi.StringInput
}
func (QueueServicePropertiesArgs) ElementType() reflect.Type {
return reflect.TypeOf((*queueServicePropertiesArgs)(nil)).Elem()
}
| tQueueServiceProperties(c |
users.module.ts | import { FormsModule } from '@angular/forms';
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import {
MatDialogModule,
MatButtonModule,
MatIconModule,
MatFormFieldModule,
MatInputModule,
MatTableModule,
MatSelectModule, MatSortModule, MatPaginatorModule
} from '@angular/material';
import { NgxMatSelectSearchModule } from 'ngx-mat-select-search';
import { SharedModule } from '../shared/shared.module';
import { UsersRoutingModule } from './users-routing.module';
import { UsersListComponent } from './users-list/users-list.component';
import { UserComponent } from './user/user.component';
@NgModule({
declarations: [
UsersListComponent,
UserComponent
],
imports: [
CommonModule,
FormsModule,
MatDialogModule,
MatButtonModule,
MatIconModule,
MatFormFieldModule,
MatInputModule,
MatTableModule,
MatSelectModule,
SharedModule,
UsersRoutingModule,
MatSortModule,
MatPaginatorModule,
NgxMatSelectSearchModule
]
})
export class | { }
| UsersModule |
robot.py | # ROS/IOP Bridge
# Copyright (c) 2017 Fraunhofer
#
# This program is dual licensed; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2 as published by the Free Software Foundation, or
# enter into a proprietary license agreement with the copyright
# holder.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; or you can read the full license at
# <http://www.gnu.de/documents/gpl-2.0.html>
#
# :author: Alexander Tiderko
import os
from python_qt_binding import loadUi
from python_qt_binding.QtCore import QObject, Signal, Qt
from python_qt_binding.QtGui import QIcon
try:
from python_qt_binding.QtGui import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
except:
from python_qt_binding.QtWidgets import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
import rospy
from .address import Address
from fkie_iop_msgs.msg import OcuCmdEntry, JausAddress
from .handoff_dialog import HandoffDialog
class Robot(QObject):
MAX_AGE = 30
control_activated = Signal(Address)
control_deactivated = Signal(Address)
view_activated = Signal(Address)
view_deactivated = Signal(Address)
def __init__(self, subsystem, settings, authority=205):
QObject.__init__(self)
self._subsystem = subsystem
self._settings = settings
self._authority = authority
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'robot.ui')
self._widget = QWidget()
loadUi(ui_file, self._widget)
self._last_update = rospy.Time.now()
self._component_names = dict()
self._warnings = []
self._feedback_warnings = dict()
self._ocu_client = None
# address reported by access control client
self._control_addr = Address(JausAddress())
self._warning_dialog = self._create_warning_dialog()
self._detailed_dialog = self._create_detailed_dialog()
self.handoff_dialog = HandoffDialog(self.name, self.subsystem_id, self._settings, self._widget)
self.handoff_dialog.button_blink.connect(self._widget.button_handoff.setEnabled)
self._widget.button_view.clicked.connect(self._on_robot_view)
self._widget.button_control.setText("%s - %d" % (subsystem.ident.name, self._subsystem.ident.address.subsystem_id))
self._widget.button_control.clicked.connect(self._on_robot_control)
self._widget.button_control.setObjectName(subsystem.ident.name)
self._widget.button_handoff.setEnabled(False)
self._widget.button_handoff.clicked.connect(self.on_show_handoff)
self._widget.button_warnings.setEnabled(False)
self._widget.button_warnings.clicked.connect(self.on_show_warnings)
self._widget.button_details.clicked.connect(self.on_show_details)
def __del__(self):
self.handoff_dialog.setParent(None)
self.handoff_dialog.shutdown()
self.handoff_dialog = None
self._detailed_dialog = None
self._warning_dialog = None
self._ocu_client = None
self._feedback_warnings.clear()
self._component_names.clear()
del self._warnings[:]
@property
def name(self):
return self._subsystem.ident.name
@property
def subsystem_id(self):
# return the subsystem_id of the robot
return self._subsystem.ident.address.subsystem_id
@property
def ocu_client(self):
return self._ocu_client
@ocu_client.setter
def ocu_client(self, ocu_client):
self.set_warnings([])
if self._ocu_client is not None:
self._ocu_client.control_subsystem = -1
self._ocu_client = ocu_client
if self._ocu_client is not None:
self._ocu_client.control_subsystem = self.subsystem_id
if ocu_client.subsystem_restricted == self.subsystem_id:
self._widget.button_control.setEnabled(not ocu_client.only_monitor)
self.handoff_dialog.set_client(self._ocu_client)
self.update_feedback_warnings()
elif self.has_view() or self.has_control():
self.set_warnings(["No free OCU client available!", "Start an ocu_client with different nodeID to be able to listen for sensors on second robot."])
self.handoff_dialog.set_client(None)
if self._ocu_client is not None:
self._widget.button_handoff.setVisible(self._ocu_client.has_handoff_publisher())
else:
self._widget.button_handoff.setVisible(True)
@property
def ocu_client_restricted(self):
if self._ocu_client is not None:
if self._ocu_client.subsystem_restricted == self.subsystem_id:
return self._ocu_client
return None
@property
def control_addr(self):
return self._control_addr
@control_addr.setter
def control_addr(self, address):
self._control_addr = address
self._update_warnings_button()
def set_control_active(self, state):
self._widget.button_control.setEnabled(state)
def _on_robot_control(self, checked=False):
'''
Click on control robot button. Change to controlled or monitor state.
Publishes the signals: control_activated or view_activated.
'''
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.control_activated.emit(addr)
self.handoff_dialog.on_access = True
else:
self.release_control()
self.control_deactivated.emit(addr)
self.handoff_dialog.cancel_handoff()
self.handoff_dialog.on_access = False
# if self.has_view():
# self.view_activated.emit(addr)
def _on_robot_view(self, checked=False):
'''
Click on view robot button. Change to monitor or not controlled state.
Publishes the signals: view_activated or control_deactivated.
'''
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.view_activated.emit(addr)
else:
if self.has_control():
self._widget.button_control.setChecked(False)
self.control_deactivated.emit(addr)
self.view_deactivated.emit(addr)
def has_control(self):
return self._widget.button_control.isChecked()
def has_view(self):
return self._widget.button_view.isChecked()
def release_control(self):
self._widget.button_view.setChecked(False)
self._widget.button_control.setChecked(False)
def activate_view(self):
self._widget.button_view.setChecked(True)
def state_to_cmd(self):
cmd = OcuCmdEntry()
cmd.authority = self._settings.authority
cmd.name = self.name
cmd.address.subsystem_id = self._subsystem.ident.address.subsystem_id
cmd.address.node_id = 255
cmd.address.component_id = 255
if self._widget.button_control.isChecked():
cmd.access_control = 12
elif self._widget.button_view.isChecked():
cmd.access_control = 11
else:
cmd.access_control = 10
if self.ocu_client is not None:
cmd.ocu_client = self.ocu_client.address
else:
cmd.ocu_client.subsystem_id = 65535
cmd.ocu_client.node_id = 255
cmd.ocu_client.component_id = 255
return cmd
def update(self, subsystem):
'''
Applies the updated description of the subsystem.
:type feedback: fkie_iop_msgs/System
'''
if self._subsystem.ident.address.subsystem_id != subsystem.ident.address.subsystem_id:
return False
# if self._subsystem.ident.node_id != subsystem.ident.node_id:
# return False
if self._subsystem.ident.name != subsystem.ident.name:
return False
self._subsystem = subsystem
# self._last_update = rospy.Time.now()
return True
def on_show_handoff(self):
self.handoff_dialog.setVisible(not self.handoff_dialog.isVisible())
def on_show_details(self):
'''
Shows the subsystem in a new dialog as tree view.
'''
twc = self._detailed_dialog.treewidget_components
twc.clear()
client_info = "OCU client: ---"
if self._ocu_client is not None:
add_info = ''
if self.ocu_client.subsystem_restricted == self.subsystem_id:
if self.ocu_client.only_monitor:
add_info = ' [restricted, only monitor]'
else:
add_info = ' [restricted]'
client_info = "OCU client: %s%s" % (self.ocu_client.address, add_info)
elif self.control_addr.subsystem_id != 0:
client_info = 'Controlled by other OCU: %s' % self.control_addr
self._detailed_dialog.label_info.setText(client_info)
if self.name == self._subsystem.ident.name:
for node in self._subsystem.nodes:
node_item = QTreeWidgetItem(twc)
node_name = node.ident.name if node.ident.name else "NODE"
node_item.setText(0, "%s [id: %d]" % (node_name, node.ident.address.node_id))
for comp in node.components:
cmp_item = QTreeWidgetItem(node_item)
cmp_name = self._get_component_name(comp.address)
cmp_item.setText(0, "%s [%d.%d.%d]" % (cmp_name, comp.address.subsystem_id, comp.address.node_id, comp.address.component_id))
twc.expandItem(node_item)
for srv in comp.services:
srv_item = QTreeWidgetItem(cmp_item)
srv_item.setText(0, "%s v%d.%d" % (srv.uri, srv.major_version, srv.minor_version))
if self._detailed_dialog.isVisible():
self._detailed_dialog.setFocus(Qt.ActiveWindowFocusReason)
else:
self._detailed_dialog.show()
def on_show_warnings(self):
'''
Shows warning received by feedback.
'''
text_browser = self._warning_dialog.warnings
text_browser.clear()
if not self._warnings and not self._feedback_warnings:
text_browser.append('No known warnings!')
else:
for msg in self._warnings:
text_browser.append(msg)
if self._feedback_warnings:
text_browser.append('Services with warning state:')
for client, service_infos in self._feedback_warnings.items():
text_browser.append("Client %s:" % client)
for service_info in service_infos:
text_browser.append(" %s[%s]: %s" % (service_info.uri, Address(service_info.addr_control), self.access_state_to_str(service_info.access_state)))
self._warning_dialog.show()
def update_feedback_warnings(self):
'''
:type warnigns: dict(Address of the ocu client: ServiceInfo)
'''
# get all warnings for each subsystem
warnings = dict()
if self._ocu_client is not None:
cw = self._ocu_client.get_warnings(self.subsystem_id, self.has_control())
warnings.update(cw)
# get insufficient authority reports to update handoff state button
insathority = dict()
cw = self._ocu_client.get_srvs_ins_authority(self.subsystem_id)
insathority.update(cw)
# update insufficient authority to activate handoff dialog
self.handoff_dialog.update_authority_problems(insathority)
self._feedback_warnings = warnings
self._update_warnings_button()
def set_warnings(self, warnings):
'''
:type warnigns: list of strings
'''
self._warnings = warnings
self._update_warnings_button()
def _update_warnings_button(self):
has_warning = (len(self._warnings) + len(self._feedback_warnings)) > 0
if has_warning and self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #FE9A2E;}")
elif self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #98FB98;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.has_view():
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.control_addr.subsystem_id != 0 and (self._ocu_client is None or self.control_addr.subsystem_id != self._ocu_client.subsystem_id):
self._widget.button_control.setStyleSheet("QPushButton { background-color: #A9A9A9;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
else:
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_warnings.setEnabled(has_warning)
def update_ident(self, ident):
if Address(ident.address) == Address(self._subsystem.ident.address):
self._last_update = rospy.Time.now()
if ident.system_type == 60001 or ident.request_type == 4:
if ident.address.subsystem_id == self._subsystem.ident.address.subsystem_id:
self._component_names[Address(ident.address)] = ident.name
return False
def _get_component_name(self, msg_address):
addr = Address(msg_address)
try:
return self._component_names[addr]
except Exception:
pass
return "Component"
def is_old(self):
return rospy.Time.now() - self._last_update > rospy.Duration(self.MAX_AGE)
def get_widget(self):
return self._widget
def _create_warning_dialog(self):
|
def _create_detailed_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'system_info.ui')
loadUi(ui_file, diag)
diag.treewidget_components.setHeaderLabel("%s [%d]" % (self.name, self.subsystem_id))
diag.resize(500, 300)
diag.setWindowTitle("subsystem %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("help-about"))
return diag
def access_state_to_str(self, state):
if state == 0:
return 'NOT_AVAILABLE'
if state == 1:
return 'NOT_CONTROLLED'
if state == 2:
return 'CONTROL_RELEASED'
if state == 3:
return 'CONTROL_ACCEPTED'
if state == 4:
return 'TIMEOUT'
if state == 5:
return 'INSUFFICIENT_AUTHORITY'
if state == 6:
return 'MONITORING'
return 'UNKNOWN'
| diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'warning_info.ui')
loadUi(ui_file, diag)
diag.resize(600, 250)
diag.setWindowTitle("Warning for %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("dialog-warning"))
return diag |
server.go | package main
import (
"net/http"
"fmt"
"html"
"log"
)
func | () {
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "Hello, %q", html.EscapeString(r.URL.Path))
})
log.Fatal(http.ListenAndServe(":8080", nil))
} | main |
span.js | "use strict";
import * as core from "./core.js";
import tag from "./tag.js";
/**
* opt : {tagoption}
*/
const defaultOption = {
tag: "span",
};
export default class | extends tag {
constructor(...opt) {
super(...opt);
}
get data() {
return super.data;
}
set data(opt) {
super.data = core.extend({}, defaultOption, opt);
}
}
| span |
requested_path.rs | use iron::Request;
use std::path::{PathBuf, Path};
use std::fs::{self, Metadata};
use std::convert::AsRef;
use url::percent_encoding::percent_decode;
pub struct | {
pub path: PathBuf,
}
#[inline]
fn decode_percents(string: &String) -> String {
String::from_utf8(percent_decode(string.as_bytes())).unwrap()
}
impl RequestedPath {
pub fn new<P: AsRef<Path>>(root_path: P, request: &Request) -> RequestedPath {
let mut path = root_path.as_ref().to_path_buf();
let decoded_req_path = request.url.path.iter().map(decode_percents);
path.extend(decoded_req_path);
RequestedPath { path: path }
}
pub fn should_redirect(&self, metadata: &Metadata, request: &Request) -> bool {
let last_url_element = request.url.path
.last()
.map(|s| s.as_ref());
// As per servo/rust-url/serialize_path, URLs ending in a slash have an
// empty string stored as the last component of their path. Rust-url
// even ensures that url.path is non-empty by appending a forward slash
// to URLs like http://example.com
// Some middleware may mutate the URL's path to violate this property,
// so the empty list case is handled as a redirect.
let has_trailing_slash = match last_url_element {
Some("") => true,
_ => false,
};
metadata.is_dir() && !has_trailing_slash
}
pub fn get_file(self, metadata: &Metadata) -> Option<PathBuf> {
if metadata.is_file() {
return Some(self.path);
}
let index_path = self.path.join("index.html");
match fs::metadata(&index_path) {
Ok(m) =>
if m.is_file() {
Some(index_path)
} else {
None
},
Err(_) => None,
}
}
}
| RequestedPath |
soap.rs | use hyper::{
header::{CONTENT_LENGTH, CONTENT_TYPE},
Body, Client, Request,
};
use crate::errors::RequestError;
#[derive(Clone, Debug)]
pub struct Action(String);
impl Action {
pub fn | (action: &str) -> Action {
Action(action.into())
}
}
const HEADER_NAME: &str = "SOAPAction";
pub async fn send_async(url: &str, action: Action, body: &str) -> Result<String, RequestError> {
let client = Client::new();
let req = Request::builder()
.uri(url)
.method("POST")
.header(HEADER_NAME, action.0)
.header(CONTENT_TYPE, "text/xml")
.header(CONTENT_LENGTH, body.len() as u64)
.body(Body::from(body.to_string()))?;
let resp = client.request(req).await?;
let body = hyper::body::to_bytes(resp.into_body()).await?;
let string = String::from_utf8(body.to_vec())?;
Ok(string)
}
| new |
baldi.js | var UnityLoader = UnityLoader || {
Compression: {
identity: {
require: function() {
return {};
},
decompress: function(e) {
return e;
}
},
gzip: {
require: function(e) {
var t = {
"inflate.js": function(e, t, r) {
"use strict";
function n(e) {
if (!(this instanceof n)) return new n(e);
this.options = s.assign(
{ chunkSize: 16384, windowBits: 0, to: "" },
e || {}
);
var t = this.options;
t.raw &&
t.windowBits >= 0 &&
t.windowBits < 16 &&
((t.windowBits = -t.windowBits),
0 === t.windowBits && (t.windowBits = -15)),
!(t.windowBits >= 0 && t.windowBits < 16) ||
(e && e.windowBits) ||
(t.windowBits += 32),
t.windowBits > 15 &&
t.windowBits < 48 &&
0 === (15 & t.windowBits) &&
(t.windowBits |= 15),
(this.err = 0),
(this.msg = ""),
(this.ended = !1),
(this.chunks = []),
(this.strm = new c()),
(this.strm.avail_out = 0);
var r = i.inflateInit2(this.strm, t.windowBits);
if (r !== l.Z_OK) throw new Error(u[r]);
(this.header = new f()),
i.inflateGetHeader(this.strm, this.header);
}
function o(e, t) {
var r = new n(t);
if ((r.push(e, !0), r.err)) throw r.msg || u[r.err];
return r.result;
}
function a(e, t) {
return (t = t || {}), (t.raw = !0), o(e, t);
}
var i = e("./zlib/inflate"),
s = e("./utils/common"),
d = e("./utils/strings"),
l = e("./zlib/constants"),
u = e("./zlib/messages"),
c = e("./zlib/zstream"),
f = e("./zlib/gzheader"),
h = Object.prototype.toString;
(n.prototype.push = function(e, t) {
var r,
n,
o,
a,
u,
c,
f = this.strm,
p = this.options.chunkSize,
w = this.options.dictionary,
m = !1;
if (this.ended) return !1;
(n = t === ~~t ? t : t === !0 ? l.Z_FINISH : l.Z_NO_FLUSH),
"string" == typeof e
? (f.input = d.binstring2buf(e))
: "[object ArrayBuffer]" === h.call(e)
? (f.input = new Uint8Array(e))
: (f.input = e),
(f.next_in = 0),
(f.avail_in = f.input.length);
do {
if (
(0 === f.avail_out &&
((f.output = new s.Buf8(p)),
(f.next_out = 0),
(f.avail_out = p)),
(r = i.inflate(f, l.Z_NO_FLUSH)),
r === l.Z_NEED_DICT &&
w &&
((c =
"string" == typeof w
? d.string2buf(w)
: "[object ArrayBuffer]" === h.call(w)
? new Uint8Array(w)
: w),
(r = i.inflateSetDictionary(this.strm, c))),
r === l.Z_BUF_ERROR && m === !0 && ((r = l.Z_OK), (m = !1)),
r !== l.Z_STREAM_END && r !== l.Z_OK)
)
return this.onEnd(r), (this.ended = !0), !1;
f.next_out &&
((0 !== f.avail_out &&
r !== l.Z_STREAM_END &&
(0 !== f.avail_in ||
(n !== l.Z_FINISH && n !== l.Z_SYNC_FLUSH))) ||
("string" === this.options.to
? ((o = d.utf8border(f.output, f.next_out)),
(a = f.next_out - o),
(u = d.buf2string(f.output, o)),
(f.next_out = a),
(f.avail_out = p - a),
a && s.arraySet(f.output, f.output, o, a, 0),
this.onData(u))
: this.onData(s.shrinkBuf(f.output, f.next_out)))),
0 === f.avail_in && 0 === f.avail_out && (m = !0);
} while (
(f.avail_in > 0 || 0 === f.avail_out) &&
r !== l.Z_STREAM_END
);
return (
r === l.Z_STREAM_END && (n = l.Z_FINISH),
n === l.Z_FINISH
? ((r = i.inflateEnd(this.strm)),
this.onEnd(r),
(this.ended = !0),
r === l.Z_OK)
: n !== l.Z_SYNC_FLUSH ||
(this.onEnd(l.Z_OK), (f.avail_out = 0), !0)
);
}),
(n.prototype.onData = function(e) {
this.chunks.push(e);
}),
(n.prototype.onEnd = function(e) {
e === l.Z_OK &&
("string" === this.options.to
? (this.result = this.chunks.join(""))
: (this.result = s.flattenChunks(this.chunks))),
(this.chunks = []),
(this.err = e),
(this.msg = this.strm.msg);
}),
(r.Inflate = n),
(r.inflate = o),
(r.inflateRaw = a),
(r.ungzip = o);
},
"utils/common.js": function(e, t, r) {
"use strict";
var n =
"undefined" != typeof Uint8Array &&
"undefined" != typeof Uint16Array &&
"undefined" != typeof Int32Array;
(r.assign = function(e) {
for (
var t = Array.prototype.slice.call(arguments, 1);
t.length;
) {
var r = t.shift();
if (r) {
if ("object" != typeof r)
throw new TypeError(r + "must be non-object");
for (var n in r) r.hasOwnProperty(n) && (e[n] = r[n]);
}
}
return e;
}),
(r.shrinkBuf = function(e, t) {
return e.length === t
? e
: e.subarray
? e.subarray(0, t)
: ((e.length = t), e);
});
var o = {
arraySet: function(e, t, r, n, o) {
if (t.subarray && e.subarray)
return void e.set(t.subarray(r, r + n), o);
for (var a = 0; a < n; a++) e[o + a] = t[r + a];
},
flattenChunks: function(e) {
var t, r, n, o, a, i;
for (n = 0, t = 0, r = e.length; t < r; t++) n += e[t].length;
for (
i = new Uint8Array(n), o = 0, t = 0, r = e.length;
t < r;
t++
)
(a = e[t]), i.set(a, o), (o += a.length);
return i;
}
},
a = {
arraySet: function(e, t, r, n, o) {
for (var a = 0; a < n; a++) e[o + a] = t[r + a];
},
flattenChunks: function(e) {
return [].concat.apply([], e);
}
};
(r.setTyped = function(e) {
e
? ((r.Buf8 = Uint8Array),
(r.Buf16 = Uint16Array),
(r.Buf32 = Int32Array),
r.assign(r, o))
: ((r.Buf8 = Array),
(r.Buf16 = Array),
(r.Buf32 = Array),
r.assign(r, a));
}),
r.setTyped(n);
},
"utils/strings.js": function(e, t, r) {
"use strict";
function n(e, t) {
if (t < 65537 && ((e.subarray && i) || (!e.subarray && a)))
return String.fromCharCode.apply(null, o.shrinkBuf(e, t));
for (var r = "", n = 0; n < t; n++)
r += String.fromCharCode(e[n]);
return r;
}
var o = e("./common"),
a = !0,
i = !0;
try {
String.fromCharCode.apply(null, [0]);
} catch (e) {
a = !1;
}
try {
String.fromCharCode.apply(null, new Uint8Array(1));
} catch (e) {
i = !1;
}
for (var s = new o.Buf8(256), d = 0; d < 256; d++)
s[d] =
d >= 252
? 6
: d >= 248
? 5
: d >= 240
? 4
: d >= 224
? 3
: d >= 192
? 2
: 1;
(s[254] = s[254] = 1),
(r.string2buf = function(e) {
var t,
r,
n,
a,
i,
s = e.length,
d = 0;
for (a = 0; a < s; a++)
(r = e.charCodeAt(a)),
55296 === (64512 & r) &&
a + 1 < s &&
((n = e.charCodeAt(a + 1)),
56320 === (64512 & n) &&
((r = 65536 + ((r - 55296) << 10) + (n - 56320)), a++)),
(d += r < 128 ? 1 : r < 2048 ? 2 : r < 65536 ? 3 : 4);
for (t = new o.Buf8(d), i = 0, a = 0; i < d; a++)
(r = e.charCodeAt(a)),
55296 === (64512 & r) &&
a + 1 < s &&
((n = e.charCodeAt(a + 1)),
56320 === (64512 & n) &&
((r = 65536 + ((r - 55296) << 10) + (n - 56320)), a++)),
r < 128
? (t[i++] = r)
: r < 2048
? ((t[i++] = 192 | (r >>> 6)), (t[i++] = 128 | (63 & r)))
: r < 65536
? ((t[i++] = 224 | (r >>> 12)),
(t[i++] = 128 | ((r >>> 6) & 63)),
(t[i++] = 128 | (63 & r)))
: ((t[i++] = 240 | (r >>> 18)),
(t[i++] = 128 | ((r >>> 12) & 63)),
(t[i++] = 128 | ((r >>> 6) & 63)),
(t[i++] = 128 | (63 & r)));
return t;
}),
(r.buf2binstring = function(e) {
return n(e, e.length);
}),
(r.binstring2buf = function(e) {
for (
var t = new o.Buf8(e.length), r = 0, n = t.length;
r < n;
r++
)
t[r] = e.charCodeAt(r);
return t;
}),
(r.buf2string = function(e, t) {
var r,
o,
a,
i,
d = t || e.length,
l = new Array(2 * d);
for (o = 0, r = 0; r < d; )
if (((a = e[r++]), a < 128)) l[o++] = a;
else if (((i = s[a]), i > 4)) (l[o++] = 65533), (r += i - 1);
else {
for (a &= 2 === i ? 31 : 3 === i ? 15 : 7; i > 1 && r < d; )
(a = (a << 6) | (63 & e[r++])), i--;
i > 1
? (l[o++] = 65533)
: a < 65536
? (l[o++] = a)
: ((a -= 65536),
(l[o++] = 55296 | ((a >> 10) & 1023)),
(l[o++] = 56320 | (1023 & a)));
}
return n(l, o);
}),
(r.utf8border = function(e, t) {
var r;
for (
t = t || e.length, t > e.length && (t = e.length), r = t - 1;
r >= 0 && 128 === (192 & e[r]);
)
r--;
return r < 0 ? t : 0 === r ? t : r + s[e[r]] > t ? r : t;
});
},
"zlib/inflate.js": function(e, t, r) {
"use strict";
function n(e) {
return (
((e >>> 24) & 255) +
((e >>> 8) & 65280) +
((65280 & e) << 8) +
((255 & e) << 24)
);
}
function o() {
(this.mode = 0),
(this.last = !1),
(this.wrap = 0),
(this.havedict = !1),
(this.flags = 0),
(this.dmax = 0),
(this.check = 0),
(this.total = 0),
(this.head = null),
(this.wbits = 0),
(this.wsize = 0),
(this.whave = 0),
(this.wnext = 0),
(this.window = null),
(this.hold = 0),
(this.bits = 0),
(this.length = 0),
(this.offset = 0),
(this.extra = 0),
(this.lencode = null),
(this.distcode = null),
(this.lenbits = 0),
(this.distbits = 0),
(this.ncode = 0),
(this.nlen = 0),
(this.ndist = 0),
(this.have = 0),
(this.next = null),
(this.lens = new y.Buf16(320)),
(this.work = new y.Buf16(288)),
(this.lendyn = null),
(this.distdyn = null),
(this.sane = 0),
(this.back = 0),
(this.was = 0);
}
function a(e) {
var t;
return e && e.state
? ((t = e.state),
(e.total_in = e.total_out = t.total = 0),
(e.msg = ""),
t.wrap && (e.adler = 1 & t.wrap),
(t.mode = P),
(t.last = 0),
(t.havedict = 0),
(t.dmax = 32768),
(t.head = null),
(t.hold = 0),
(t.bits = 0),
(t.lencode = t.lendyn = new y.Buf32(we)),
(t.distcode = t.distdyn = new y.Buf32(me)),
(t.sane = 1),
(t.back = -1),
O)
: R;
}
function i(e) {
var t;
return e && e.state
? ((t = e.state),
(t.wsize = 0),
(t.whave = 0),
(t.wnext = 0),
a(e))
: R;
}
function s(e, t) {
var r, n;
return e && e.state
? ((n = e.state),
t < 0
? ((r = 0), (t = -t))
: ((r = (t >> 4) + 1), t < 48 && (t &= 15)),
t && (t < 8 || t > 15)
? R
: (null !== n.window && n.wbits !== t && (n.window = null),
(n.wrap = r),
(n.wbits = t),
i(e)))
: R;
}
function d(e, t) {
var r, n;
return e
? ((n = new o()),
(e.state = n),
(n.window = null),
(r = s(e, t)),
r !== O && (e.state = null),
r)
: R;
}
function l(e) {
return d(e, ye);
}
function u(e) {
if (ge) {
var t;
for (
m = new y.Buf32(512), b = new y.Buf32(32), t = 0;
t < 144;
)
e.lens[t++] = 8;
for (; t < 256; ) e.lens[t++] = 9;
for (; t < 280; ) e.lens[t++] = 7;
for (; t < 288; ) e.lens[t++] = 8;
for (
U(E, e.lens, 0, 288, m, 0, e.work, { bits: 9 }), t = 0;
t < 32;
)
e.lens[t++] = 5;
U(k, e.lens, 0, 32, b, 0, e.work, { bits: 5 }), (ge = !1);
}
(e.lencode = m),
(e.lenbits = 9),
(e.distcode = b),
(e.distbits = 5);
}
function c(e, t, r, n) {
var o,
a = e.state;
return (
null === a.window &&
((a.wsize = 1 << a.wbits),
(a.wnext = 0),
(a.whave = 0),
(a.window = new y.Buf8(a.wsize))),
n >= a.wsize
? (y.arraySet(a.window, t, r - a.wsize, a.wsize, 0),
(a.wnext = 0),
(a.whave = a.wsize))
: ((o = a.wsize - a.wnext),
o > n && (o = n),
y.arraySet(a.window, t, r - n, o, a.wnext),
(n -= o),
n
? (y.arraySet(a.window, t, r - n, n, 0),
(a.wnext = n),
(a.whave = a.wsize))
: ((a.wnext += o),
a.wnext === a.wsize && (a.wnext = 0),
a.whave < a.wsize && (a.whave += o))),
0
);
}
function f(e, t) {
var r,
o,
a,
i,
s,
d,
l,
f,
h,
p,
w,
m,
b,
we,
me,
be,
ye,
ge,
ve,
Ae,
Ue,
xe,
Ee,
ke,
Be = 0,
Le = new y.Buf8(4),
We = [
16,
17,
18,
0,
8,
7,
9,
6,
10,
5,
11,
4,
12,
3,
13,
2,
14,
1,
15
];
if (!e || !e.state || !e.output || (!e.input && 0 !== e.avail_in))
return R;
(r = e.state),
r.mode === j && (r.mode = X),
(s = e.next_out),
(a = e.output),
(l = e.avail_out),
(i = e.next_in),
(o = e.input),
(d = e.avail_in),
(f = r.hold),
(h = r.bits),
(p = d),
(w = l),
(xe = O);
e: for (;;)
switch (r.mode) {
case P:
if (0 === r.wrap) {
r.mode = X;
break;
}
for (; h < 16; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (2 & r.wrap && 35615 === f) {
(r.check = 0),
(Le[0] = 255 & f),
(Le[1] = (f >>> 8) & 255),
(r.check = v(r.check, Le, 2, 0)),
(f = 0),
(h = 0),
(r.mode = T);
break;
}
if (
((r.flags = 0),
r.head && (r.head.done = !1),
!(1 & r.wrap) || (((255 & f) << 8) + (f >> 8)) % 31)
) {
(e.msg = "incorrect header check"), (r.mode = fe);
break;
}
if ((15 & f) !== S) {
(e.msg = "unknown compression method"), (r.mode = fe);
break;
}
if (
((f >>>= 4), (h -= 4), (Ue = (15 & f) + 8), 0 === r.wbits)
)
r.wbits = Ue;
else if (Ue > r.wbits) {
(e.msg = "invalid window size"), (r.mode = fe);
break;
}
(r.dmax = 1 << Ue),
(e.adler = r.check = 1),
(r.mode = 512 & f ? G : j),
(f = 0),
(h = 0);
break;
case T:
for (; h < 16; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (((r.flags = f), (255 & r.flags) !== S)) {
(e.msg = "unknown compression method"), (r.mode = fe);
break;
}
if (57344 & r.flags) {
(e.msg = "unknown header flags set"), (r.mode = fe);
break;
}
r.head && (r.head.text = (f >> 8) & 1),
512 & r.flags &&
((Le[0] = 255 & f),
(Le[1] = (f >>> 8) & 255),
(r.check = v(r.check, Le, 2, 0))),
(f = 0),
(h = 0),
(r.mode = D);
case D:
for (; h < 32; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
r.head && (r.head.time = f),
512 & r.flags &&
((Le[0] = 255 & f),
(Le[1] = (f >>> 8) & 255),
(Le[2] = (f >>> 16) & 255),
(Le[3] = (f >>> 24) & 255),
(r.check = v(r.check, Le, 4, 0))),
(f = 0),
(h = 0),
(r.mode = F);
case F:
for (; h < 16; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
r.head && ((r.head.xflags = 255 & f), (r.head.os = f >> 8)),
512 & r.flags &&
((Le[0] = 255 & f),
(Le[1] = (f >>> 8) & 255),
(r.check = v(r.check, Le, 2, 0))),
(f = 0),
(h = 0),
(r.mode = q);
case q:
if (1024 & r.flags) {
for (; h < 16; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(r.length = f),
r.head && (r.head.extra_len = f),
512 & r.flags &&
((Le[0] = 255 & f),
(Le[1] = (f >>> 8) & 255),
(r.check = v(r.check, Le, 2, 0))),
(f = 0),
(h = 0);
} else r.head && (r.head.extra = null);
r.mode = V;
case V:
if (
1024 & r.flags &&
((m = r.length),
m > d && (m = d),
m &&
(r.head &&
((Ue = r.head.extra_len - r.length),
r.head.extra ||
(r.head.extra = new Array(r.head.extra_len)),
y.arraySet(r.head.extra, o, i, m, Ue)),
512 & r.flags && (r.check = v(r.check, o, m, i)),
(d -= m),
(i += m),
(r.length -= m)),
r.length)
)
break e;
(r.length = 0), (r.mode = z);
case z:
if (2048 & r.flags) {
if (0 === d) break e;
m = 0;
do
(Ue = o[i + m++]),
r.head &&
Ue &&
r.length < 65536 &&
(r.head.name += String.fromCharCode(Ue));
while (Ue && m < d);
if (
(512 & r.flags && (r.check = v(r.check, o, m, i)),
(d -= m),
(i += m),
Ue)
)
break e;
} else r.head && (r.head.name = null);
(r.length = 0), (r.mode = Z);
case Z:
if (4096 & r.flags) {
if (0 === d) break e;
m = 0;
do
(Ue = o[i + m++]),
r.head &&
Ue &&
r.length < 65536 &&
(r.head.comment += String.fromCharCode(Ue));
while (Ue && m < d);
if (
(512 & r.flags && (r.check = v(r.check, o, m, i)),
(d -= m),
(i += m),
Ue)
)
break e;
} else r.head && (r.head.comment = null);
r.mode = Y;
case Y:
if (512 & r.flags) {
for (; h < 16; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (f !== (65535 & r.check)) {
(e.msg = "header crc mismatch"), (r.mode = fe);
break;
}
(f = 0), (h = 0);
}
r.head &&
((r.head.hcrc = (r.flags >> 9) & 1), (r.head.done = !0)),
(e.adler = r.check = 0),
(r.mode = j);
break;
case G:
for (; h < 32; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(e.adler = r.check = n(f)), (f = 0), (h = 0), (r.mode = J);
case J:
if (0 === r.havedict)
return (
(e.next_out = s),
(e.avail_out = l),
(e.next_in = i),
(e.avail_in = d),
(r.hold = f),
(r.bits = h),
N
);
(e.adler = r.check = 1), (r.mode = j);
case j:
if (t === L || t === W) break e;
case X:
if (r.last) {
(f >>>= 7 & h), (h -= 7 & h), (r.mode = le);
break;
}
for (; h < 3; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
switch (((r.last = 1 & f), (f >>>= 1), (h -= 1), 3 & f)) {
case 0:
r.mode = K;
break;
case 1:
if ((u(r), (r.mode = re), t === W)) {
(f >>>= 2), (h -= 2);
break e;
}
break;
case 2:
r.mode = $;
break;
case 3:
(e.msg = "invalid block type"), (r.mode = fe);
}
(f >>>= 2), (h -= 2);
break;
case K:
for (f >>>= 7 & h, h -= 7 & h; h < 32; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if ((65535 & f) !== ((f >>> 16) ^ 65535)) {
(e.msg = "invalid stored block lengths"), (r.mode = fe);
break;
}
if (
((r.length = 65535 & f),
(f = 0),
(h = 0),
(r.mode = Q),
t === W)
)
break e;
case Q:
r.mode = _;
case _:
if ((m = r.length)) {
if ((m > d && (m = d), m > l && (m = l), 0 === m))
break e;
y.arraySet(a, o, i, m, s),
(d -= m),
(i += m),
(l -= m),
(s += m),
(r.length -= m);
break;
}
r.mode = j;
break;
case $:
for (; h < 14; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (
((r.nlen = (31 & f) + 257),
(f >>>= 5),
(h -= 5),
(r.ndist = (31 & f) + 1),
(f >>>= 5),
(h -= 5),
(r.ncode = (15 & f) + 4),
(f >>>= 4),
(h -= 4),
r.nlen > 286 || r.ndist > 30)
) {
(e.msg = "too many length or distance symbols"),
(r.mode = fe);
break;
}
(r.have = 0), (r.mode = ee);
case ee:
for (; r.have < r.ncode; ) {
for (; h < 3; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(r.lens[We[r.have++]] = 7 & f), (f >>>= 3), (h -= 3);
}
for (; r.have < 19; ) r.lens[We[r.have++]] = 0;
if (
((r.lencode = r.lendyn),
(r.lenbits = 7),
(Ee = { bits: r.lenbits }),
(xe = U(x, r.lens, 0, 19, r.lencode, 0, r.work, Ee)),
(r.lenbits = Ee.bits),
xe)
) {
(e.msg = "invalid code lengths set"), (r.mode = fe);
break;
}
(r.have = 0), (r.mode = te);
case te:
for (; r.have < r.nlen + r.ndist; ) {
for (
;
(Be = r.lencode[f & ((1 << r.lenbits) - 1)]),
(me = Be >>> 24),
(be = (Be >>> 16) & 255),
(ye = 65535 & Be),
!(me <= h);
) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (ye < 16)
(f >>>= me), (h -= me), (r.lens[r.have++] = ye);
else {
if (16 === ye) {
for (ke = me + 2; h < ke; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (((f >>>= me), (h -= me), 0 === r.have)) {
(e.msg = "invalid bit length repeat"),
(r.mode = fe);
break;
}
(Ue = r.lens[r.have - 1]),
(m = 3 + (3 & f)),
(f >>>= 2),
(h -= 2);
} else if (17 === ye) {
for (ke = me + 3; h < ke; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(f >>>= me),
(h -= me),
(Ue = 0),
(m = 3 + (7 & f)),
(f >>>= 3),
(h -= 3);
} else {
for (ke = me + 7; h < ke; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(f >>>= me),
(h -= me),
(Ue = 0),
(m = 11 + (127 & f)),
(f >>>= 7),
(h -= 7);
}
if (r.have + m > r.nlen + r.ndist) {
(e.msg = "invalid bit length repeat"), (r.mode = fe);
break;
}
for (; m--; ) r.lens[r.have++] = Ue;
}
}
if (r.mode === fe) break;
if (0 === r.lens[256]) {
(e.msg = "invalid code -- missing end-of-block"),
(r.mode = fe);
break;
}
if (
((r.lenbits = 9),
(Ee = { bits: r.lenbits }),
(xe = U(E, r.lens, 0, r.nlen, r.lencode, 0, r.work, Ee)),
(r.lenbits = Ee.bits),
xe)
) {
(e.msg = "invalid literal/lengths set"), (r.mode = fe);
break;
}
if (
((r.distbits = 6),
(r.distcode = r.distdyn),
(Ee = { bits: r.distbits }),
(xe = U(
k,
r.lens,
r.nlen,
r.ndist,
r.distcode,
0,
r.work,
Ee
)),
(r.distbits = Ee.bits),
xe)
) {
(e.msg = "invalid distances set"), (r.mode = fe);
break;
}
if (((r.mode = re), t === W)) break e;
case re:
r.mode = ne;
case ne:
if (d >= 6 && l >= 258) {
(e.next_out = s),
(e.avail_out = l),
(e.next_in = i),
(e.avail_in = d),
(r.hold = f),
(r.bits = h),
A(e, w),
(s = e.next_out),
(a = e.output),
(l = e.avail_out),
(i = e.next_in),
(o = e.input),
(d = e.avail_in),
(f = r.hold),
(h = r.bits),
r.mode === j && (r.back = -1);
break;
}
for (
r.back = 0;
(Be = r.lencode[f & ((1 << r.lenbits) - 1)]),
(me = Be >>> 24),
(be = (Be >>> 16) & 255),
(ye = 65535 & Be),
!(me <= h);
) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (be && 0 === (240 & be)) {
for (
ge = me, ve = be, Ae = ye;
(Be =
r.lencode[Ae + ((f & ((1 << (ge + ve)) - 1)) >> ge)]),
(me = Be >>> 24),
(be = (Be >>> 16) & 255),
(ye = 65535 & Be),
!(ge + me <= h);
) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(f >>>= ge), (h -= ge), (r.back += ge);
}
if (
((f >>>= me),
(h -= me),
(r.back += me),
(r.length = ye),
0 === be)
) {
r.mode = de;
break;
}
if (32 & be) {
(r.back = -1), (r.mode = j);
break;
}
if (64 & be) {
(e.msg = "invalid literal/length code"), (r.mode = fe);
break;
}
(r.extra = 15 & be), (r.mode = oe);
case oe:
if (r.extra) {
for (ke = r.extra; h < ke; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(r.length += f & ((1 << r.extra) - 1)),
(f >>>= r.extra),
(h -= r.extra),
(r.back += r.extra);
}
(r.was = r.length), (r.mode = ae);
case ae:
for (
;
(Be = r.distcode[f & ((1 << r.distbits) - 1)]),
(me = Be >>> 24),
(be = (Be >>> 16) & 255),
(ye = 65535 & Be),
!(me <= h);
) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (0 === (240 & be)) {
for (
ge = me, ve = be, Ae = ye;
(Be =
r.distcode[
Ae + ((f & ((1 << (ge + ve)) - 1)) >> ge)
]),
(me = Be >>> 24),
(be = (Be >>> 16) & 255),
(ye = 65535 & Be),
!(ge + me <= h);
) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(f >>>= ge), (h -= ge), (r.back += ge);
}
if (((f >>>= me), (h -= me), (r.back += me), 64 & be)) {
(e.msg = "invalid distance code"), (r.mode = fe);
break;
}
(r.offset = ye), (r.extra = 15 & be), (r.mode = ie);
case ie:
if (r.extra) {
for (ke = r.extra; h < ke; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
(r.offset += f & ((1 << r.extra) - 1)),
(f >>>= r.extra),
(h -= r.extra),
(r.back += r.extra);
}
if (r.offset > r.dmax) {
(e.msg = "invalid distance too far back"), (r.mode = fe);
break;
}
r.mode = se;
case se:
if (0 === l) break e;
if (((m = w - l), r.offset > m)) {
if (((m = r.offset - m), m > r.whave && r.sane)) {
(e.msg = "invalid distance too far back"),
(r.mode = fe);
break;
}
m > r.wnext
? ((m -= r.wnext), (b = r.wsize - m))
: (b = r.wnext - m),
m > r.length && (m = r.length),
(we = r.window);
} else (we = a), (b = s - r.offset), (m = r.length);
m > l && (m = l), (l -= m), (r.length -= m);
do a[s++] = we[b++];
while (--m);
0 === r.length && (r.mode = ne);
break;
case de:
if (0 === l) break e;
(a[s++] = r.length), l--, (r.mode = ne);
break;
case le:
if (r.wrap) {
for (; h < 32; ) {
if (0 === d) break e;
d--, (f |= o[i++] << h), (h += 8);
}
if (
((w -= l),
(e.total_out += w),
(r.total += w),
w &&
(e.adler = r.check = r.flags
? v(r.check, a, w, s - w)
: g(r.check, a, w, s - w)),
(w = l),
(r.flags ? f : n(f)) !== r.check)
) {
(e.msg = "incorrect data check"), (r.mode = fe);
break;
}
(f = 0), (h = 0);
}
r.mode = ue;
case ue:
if (r.wrap && r.flags) {
for (; h < 32; ) {
if (0 === d) break e;
d--, (f += o[i++] << h), (h += 8);
}
if (f !== (4294967295 & r.total)) {
(e.msg = "incorrect length check"), (r.mode = fe);
break;
}
(f = 0), (h = 0);
}
r.mode = ce;
case ce:
xe = M;
break e;
case fe:
xe = C;
break e;
case he:
return H;
case pe:
default:
return R;
}
return (
(e.next_out = s),
(e.avail_out = l),
(e.next_in = i),
(e.avail_in = d),
(r.hold = f),
(r.bits = h),
(r.wsize ||
(w !== e.avail_out &&
r.mode < fe &&
(r.mode < le || t !== B))) &&
c(e, e.output, e.next_out, w - e.avail_out)
? ((r.mode = he), H)
: ((p -= e.avail_in),
(w -= e.avail_out),
(e.total_in += p),
(e.total_out += w),
(r.total += w),
r.wrap &&
w &&
(e.adler = r.check = r.flags
? v(r.check, a, w, e.next_out - w)
: g(r.check, a, w, e.next_out - w)),
(e.data_type =
r.bits +
(r.last ? 64 : 0) +
(r.mode === j ? 128 : 0) +
(r.mode === re || r.mode === Q ? 256 : 0)),
((0 === p && 0 === w) || t === B) && xe === O && (xe = I),
xe)
);
}
function h(e) {
if (!e || !e.state) return R;
var t = e.state;
return t.window && (t.window = null), (e.state = null), O;
}
function | (e, t) {
var r;
return e && e.state
? ((r = e.state),
0 === (2 & r.wrap) ? R : ((r.head = t), (t.done = !1), O))
: R;
}
function w(e, t) {
var r,
n,
o,
a = t.length;
return e && e.state
? ((r = e.state),
0 !== r.wrap && r.mode !== J
? R
: r.mode === J &&
((n = 1), (n = g(n, t, a, 0)), n !== r.check)
? C
: (o = c(e, t, a, a))
? ((r.mode = he), H)
: ((r.havedict = 1), O))
: R;
}
var m,
b,
y = e("../utils/common"),
g = e("./adler32"),
v = e("./crc32"),
A = e("./inffast"),
U = e("./inftrees"),
x = 0,
E = 1,
k = 2,
B = 4,
L = 5,
W = 6,
O = 0,
M = 1,
N = 2,
R = -2,
C = -3,
H = -4,
I = -5,
S = 8,
P = 1,
T = 2,
D = 3,
F = 4,
q = 5,
V = 6,
z = 7,
Z = 8,
Y = 9,
G = 10,
J = 11,
j = 12,
X = 13,
K = 14,
Q = 15,
_ = 16,
$ = 17,
ee = 18,
te = 19,
re = 20,
ne = 21,
oe = 22,
ae = 23,
ie = 24,
se = 25,
de = 26,
le = 27,
ue = 28,
ce = 29,
fe = 30,
he = 31,
pe = 32,
we = 852,
me = 592,
be = 15,
ye = be,
ge = !0;
(r.inflateReset = i),
(r.inflateReset2 = s),
(r.inflateResetKeep = a),
(r.inflateInit = l),
(r.inflateInit2 = d),
(r.inflate = f),
(r.inflateEnd = h),
(r.inflateGetHeader = p),
(r.inflateSetDictionary = w),
(r.inflateInfo = "pako inflate (from Nodeca project)");
},
"zlib/constants.js": function(e, t, r) {
"use strict";
t.exports = {
Z_NO_FLUSH: 0,
Z_PARTIAL_FLUSH: 1,
Z_SYNC_FLUSH: 2,
Z_FULL_FLUSH: 3,
Z_FINISH: 4,
Z_BLOCK: 5,
Z_TREES: 6,
Z_OK: 0,
Z_STREAM_END: 1,
Z_NEED_DICT: 2,
Z_ERRNO: -1,
Z_STREAM_ERROR: -2,
Z_DATA_ERROR: -3,
Z_BUF_ERROR: -5,
Z_NO_COMPRESSION: 0,
Z_BEST_SPEED: 1,
Z_BEST_COMPRESSION: 9,
Z_DEFAULT_COMPRESSION: -1,
Z_FILTERED: 1,
Z_HUFFMAN_ONLY: 2,
Z_RLE: 3,
Z_FIXED: 4,
Z_DEFAULT_STRATEGY: 0,
Z_BINARY: 0,
Z_TEXT: 1,
Z_UNKNOWN: 2,
Z_DEFLATED: 8
};
},
"zlib/messages.js": function(e, t, r) {
"use strict";
t.exports = {
2: "need dictionary",
1: "stream end",
0: "",
"-1": "file error",
"-2": "stream error",
"-3": "data error",
"-4": "insufficient memory",
"-5": "buffer error",
"-6": "incompatible version"
};
},
"zlib/zstream.js": function(e, t, r) {
"use strict";
function n() {
(this.input = null),
(this.next_in = 0),
(this.avail_in = 0),
(this.total_in = 0),
(this.output = null),
(this.next_out = 0),
(this.avail_out = 0),
(this.total_out = 0),
(this.msg = ""),
(this.state = null),
(this.data_type = 2),
(this.adler = 0);
}
t.exports = n;
},
"zlib/gzheader.js": function(e, t, r) {
"use strict";
function n() {
(this.text = 0),
(this.time = 0),
(this.xflags = 0),
(this.os = 0),
(this.extra = null),
(this.extra_len = 0),
(this.name = ""),
(this.comment = ""),
(this.hcrc = 0),
(this.done = !1);
}
t.exports = n;
},
"zlib/adler32.js": function(e, t, r) {
"use strict";
function n(e, t, r, n) {
for (
var o = (65535 & e) | 0, a = ((e >>> 16) & 65535) | 0, i = 0;
0 !== r;
) {
(i = r > 2e3 ? 2e3 : r), (r -= i);
do (o = (o + t[n++]) | 0), (a = (a + o) | 0);
while (--i);
(o %= 65521), (a %= 65521);
}
return o | (a << 16) | 0;
}
t.exports = n;
},
"zlib/crc32.js": function(e, t, r) {
"use strict";
function n() {
for (var e, t = [], r = 0; r < 256; r++) {
e = r;
for (var n = 0; n < 8; n++)
e = 1 & e ? 3988292384 ^ (e >>> 1) : e >>> 1;
t[r] = e;
}
return t;
}
function o(e, t, r, n) {
var o = a,
i = n + r;
e ^= -1;
for (var s = n; s < i; s++) e = (e >>> 8) ^ o[255 & (e ^ t[s])];
return e ^ -1;
}
var a = n();
t.exports = o;
},
"zlib/inffast.js": function(e, t, r) {
"use strict";
var n = 30,
o = 12;
t.exports = function(e, t) {
var r,
a,
i,
s,
d,
l,
u,
c,
f,
h,
p,
w,
m,
b,
y,
g,
v,
A,
U,
x,
E,
k,
B,
L,
W;
(r = e.state),
(a = e.next_in),
(L = e.input),
(i = a + (e.avail_in - 5)),
(s = e.next_out),
(W = e.output),
(d = s - (t - e.avail_out)),
(l = s + (e.avail_out - 257)),
(u = r.dmax),
(c = r.wsize),
(f = r.whave),
(h = r.wnext),
(p = r.window),
(w = r.hold),
(m = r.bits),
(b = r.lencode),
(y = r.distcode),
(g = (1 << r.lenbits) - 1),
(v = (1 << r.distbits) - 1);
e: do {
m < 15 &&
((w += L[a++] << m), (m += 8), (w += L[a++] << m), (m += 8)),
(A = b[w & g]);
t: for (;;) {
if (
((U = A >>> 24),
(w >>>= U),
(m -= U),
(U = (A >>> 16) & 255),
0 === U)
)
W[s++] = 65535 & A;
else {
if (!(16 & U)) {
if (0 === (64 & U)) {
A = b[(65535 & A) + (w & ((1 << U) - 1))];
continue t;
}
if (32 & U) {
r.mode = o;
break e;
}
(e.msg = "invalid literal/length code"), (r.mode = n);
break e;
}
(x = 65535 & A),
(U &= 15),
U &&
(m < U && ((w += L[a++] << m), (m += 8)),
(x += w & ((1 << U) - 1)),
(w >>>= U),
(m -= U)),
m < 15 &&
((w += L[a++] << m),
(m += 8),
(w += L[a++] << m),
(m += 8)),
(A = y[w & v]);
r: for (;;) {
if (
((U = A >>> 24),
(w >>>= U),
(m -= U),
(U = (A >>> 16) & 255),
!(16 & U))
) {
if (0 === (64 & U)) {
A = y[(65535 & A) + (w & ((1 << U) - 1))];
continue r;
}
(e.msg = "invalid distance code"), (r.mode = n);
break e;
}
if (
((E = 65535 & A),
(U &= 15),
m < U &&
((w += L[a++] << m),
(m += 8),
m < U && ((w += L[a++] << m), (m += 8))),
(E += w & ((1 << U) - 1)),
E > u)
) {
(e.msg = "invalid distance too far back"), (r.mode = n);
break e;
}
if (((w >>>= U), (m -= U), (U = s - d), E > U)) {
if (((U = E - U), U > f && r.sane)) {
(e.msg = "invalid distance too far back"),
(r.mode = n);
break e;
}
if (((k = 0), (B = p), 0 === h)) {
if (((k += c - U), U < x)) {
x -= U;
do W[s++] = p[k++];
while (--U);
(k = s - E), (B = W);
}
} else if (h < U) {
if (((k += c + h - U), (U -= h), U < x)) {
x -= U;
do W[s++] = p[k++];
while (--U);
if (((k = 0), h < x)) {
(U = h), (x -= U);
do W[s++] = p[k++];
while (--U);
(k = s - E), (B = W);
}
}
} else if (((k += h - U), U < x)) {
x -= U;
do W[s++] = p[k++];
while (--U);
(k = s - E), (B = W);
}
for (; x > 2; )
(W[s++] = B[k++]),
(W[s++] = B[k++]),
(W[s++] = B[k++]),
(x -= 3);
x && ((W[s++] = B[k++]), x > 1 && (W[s++] = B[k++]));
} else {
k = s - E;
do
(W[s++] = W[k++]),
(W[s++] = W[k++]),
(W[s++] = W[k++]),
(x -= 3);
while (x > 2);
x && ((W[s++] = W[k++]), x > 1 && (W[s++] = W[k++]));
}
break;
}
}
break;
}
} while (a < i && s < l);
(x = m >> 3),
(a -= x),
(m -= x << 3),
(w &= (1 << m) - 1),
(e.next_in = a),
(e.next_out = s),
(e.avail_in = a < i ? 5 + (i - a) : 5 - (a - i)),
(e.avail_out = s < l ? 257 + (l - s) : 257 - (s - l)),
(r.hold = w),
(r.bits = m);
};
},
"zlib/inftrees.js": function(e, t, r) {
"use strict";
var n = e("../utils/common"),
o = 15,
a = 852,
i = 592,
s = 0,
d = 1,
l = 2,
u = [
3,
4,
5,
6,
7,
8,
9,
10,
11,
13,
15,
17,
19,
23,
27,
31,
35,
43,
51,
59,
67,
83,
99,
115,
131,
163,
195,
227,
258,
0,
0
],
c = [
16,
16,
16,
16,
16,
16,
16,
16,
17,
17,
17,
17,
18,
18,
18,
18,
19,
19,
19,
19,
20,
20,
20,
20,
21,
21,
21,
21,
16,
72,
78
],
f = [
1,
2,
3,
4,
5,
7,
9,
13,
17,
25,
33,
49,
65,
97,
129,
193,
257,
385,
513,
769,
1025,
1537,
2049,
3073,
4097,
6145,
8193,
12289,
16385,
24577,
0,
0
],
h = [
16,
16,
16,
16,
17,
17,
18,
18,
19,
19,
20,
20,
21,
21,
22,
22,
23,
23,
24,
24,
25,
25,
26,
26,
27,
27,
28,
28,
29,
29,
64,
64
];
t.exports = function(e, t, r, p, w, m, b, y) {
var g,
v,
A,
U,
x,
E,
k,
B,
L,
W = y.bits,
O = 0,
M = 0,
N = 0,
R = 0,
C = 0,
H = 0,
I = 0,
S = 0,
P = 0,
T = 0,
D = null,
F = 0,
q = new n.Buf16(o + 1),
V = new n.Buf16(o + 1),
z = null,
Z = 0;
for (O = 0; O <= o; O++) q[O] = 0;
for (M = 0; M < p; M++) q[t[r + M]]++;
for (C = W, R = o; R >= 1 && 0 === q[R]; R--);
if ((C > R && (C = R), 0 === R))
return (
(w[m++] = 20971520), (w[m++] = 20971520), (y.bits = 1), 0
);
for (N = 1; N < R && 0 === q[N]; N++);
for (C < N && (C = N), S = 1, O = 1; O <= o; O++)
if (((S <<= 1), (S -= q[O]), S < 0)) return -1;
if (S > 0 && (e === s || 1 !== R)) return -1;
for (V[1] = 0, O = 1; O < o; O++) V[O + 1] = V[O] + q[O];
for (M = 0; M < p; M++) 0 !== t[r + M] && (b[V[t[r + M]]++] = M);
if (
(e === s
? ((D = z = b), (E = 19))
: e === d
? ((D = u), (F -= 257), (z = c), (Z -= 257), (E = 256))
: ((D = f), (z = h), (E = -1)),
(T = 0),
(M = 0),
(O = N),
(x = m),
(H = C),
(I = 0),
(A = -1),
(P = 1 << C),
(U = P - 1),
(e === d && P > a) || (e === l && P > i))
)
return 1;
for (;;) {
(k = O - I),
b[M] < E
? ((B = 0), (L = b[M]))
: b[M] > E
? ((B = z[Z + b[M]]), (L = D[F + b[M]]))
: ((B = 96), (L = 0)),
(g = 1 << (O - I)),
(v = 1 << H),
(N = v);
do
(v -= g),
(w[x + (T >> I) + v] = (k << 24) | (B << 16) | L | 0);
while (0 !== v);
for (g = 1 << (O - 1); T & g; ) g >>= 1;
if (
(0 !== g ? ((T &= g - 1), (T += g)) : (T = 0),
M++,
0 === --q[O])
) {
if (O === R) break;
O = t[r + b[M]];
}
if (O > C && (T & U) !== A) {
for (
0 === I && (I = C), x += N, H = O - I, S = 1 << H;
H + I < R && ((S -= q[H + I]), !(S <= 0));
)
H++, (S <<= 1);
if (((P += 1 << H), (e === d && P > a) || (e === l && P > i)))
return 1;
(A = T & U), (w[A] = (C << 24) | (H << 16) | (x - m) | 0);
}
}
return (
0 !== T && (w[x + T] = ((O - I) << 24) | (64 << 16) | 0),
(y.bits = C),
0
);
};
}
};
for (var r in t) t[r].folder = r.substring(0, r.lastIndexOf("/") + 1);
var n = function(e) {
var r = [];
return (
(e = e.split("/").every(function(e) {
return ".." == e ? r.pop() : "." == e || "" == e || r.push(e);
})
? r.join("/")
: null),
e ? t[e] || t[e + ".js"] || t[e + "/index.js"] : null
);
},
o = function(e, t) {
return e
? n(e.folder + "node_modules/" + t) || o(e.parent, t)
: null;
},
a = function(e, t) {
var r = t.match(/^\//)
? null
: e
? t.match(/^\.\.?\//)
? n(e.folder + t)
: o(e, t)
: n(t);
if (!r) throw "module not found: " + t;
return (
r.exports ||
((r.parent = e), r(a.bind(null, r), r, (r.exports = {}))),
r.exports
);
};
return a(null, e);
},
decompress: function(e) {
this.exports || (this.exports = this.require("inflate.js"));
try {
return this.exports.inflate(e);
} catch (e) {}
},
hasUnityMarker: function(e) {
var t = 10,
r = "UnityWeb Compressed Content (gzip)";
if (t > e.length || 31 != e[0] || 139 != e[1]) return !1;
var n = e[3];
if (4 & n) {
if (t + 2 > e.length) return !1;
if (((t += 2 + e[t] + (e[t + 1] << 8)), t > e.length)) return !1;
}
if (8 & n) {
for (; t < e.length && e[t]; ) t++;
if (t + 1 > e.length) return !1;
t++;
}
return (
16 & n &&
String.fromCharCode.apply(null, e.subarray(t, t + r.length + 1)) ==
r + "\0"
);
}
},
brotli: {
require: function(e) {
var t = {
"decompress.js": function(e, t, r) {
t.exports = e("./dec/decode").BrotliDecompressBuffer;
},
"dec/bit_reader.js": function(e, t, r) {
function n(e) {
(this.buf_ = new Uint8Array(a)), (this.input_ = e), this.reset();
}
const o = 4096,
a = 8224,
i = 8191,
s = new Uint32Array([
0,
1,
3,
7,
15,
31,
63,
127,
255,
511,
1023,
2047,
4095,
8191,
16383,
32767,
65535,
131071,
262143,
524287,
1048575,
2097151,
4194303,
8388607,
16777215
]);
(n.READ_SIZE = o),
(n.IBUF_MASK = i),
(n.prototype.reset = function() {
(this.buf_ptr_ = 0),
(this.val_ = 0),
(this.pos_ = 0),
(this.bit_pos_ = 0),
(this.bit_end_pos_ = 0),
(this.eos_ = 0),
this.readMoreInput();
for (var e = 0; e < 4; e++)
(this.val_ |= this.buf_[this.pos_] << (8 * e)), ++this.pos_;
return this.bit_end_pos_ > 0;
}),
(n.prototype.readMoreInput = function() {
if (!(this.bit_end_pos_ > 256))
if (this.eos_) {
if (this.bit_pos_ > this.bit_end_pos_)
throw new Error(
"Unexpected end of input " +
this.bit_pos_ +
" " +
this.bit_end_pos_
);
} else {
var e = this.buf_ptr_,
t = this.input_.read(this.buf_, e, o);
if (t < 0) throw new Error("Unexpected end of input");
if (t < o) {
this.eos_ = 1;
for (var r = 0; r < 32; r++) this.buf_[e + t + r] = 0;
}
if (0 === e) {
for (var r = 0; r < 32; r++)
this.buf_[8192 + r] = this.buf_[r];
this.buf_ptr_ = o;
} else this.buf_ptr_ = 0;
this.bit_end_pos_ += t << 3;
}
}),
(n.prototype.fillBitWindow = function() {
for (; this.bit_pos_ >= 8; )
(this.val_ >>>= 8),
(this.val_ |= this.buf_[this.pos_ & i] << 24),
++this.pos_,
(this.bit_pos_ = (this.bit_pos_ - 8) >>> 0),
(this.bit_end_pos_ = (this.bit_end_pos_ - 8) >>> 0);
}),
(n.prototype.readBits = function(e) {
32 - this.bit_pos_ < e && this.fillBitWindow();
var t = (this.val_ >>> this.bit_pos_) & s[e];
return (this.bit_pos_ += e), t;
}),
(t.exports = n);
},
"dec/context.js": function(e, t, r) {
(r.lookup = new Uint8Array([
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
8,
12,
16,
12,
12,
20,
12,
16,
24,
28,
12,
12,
32,
12,
36,
12,
44,
44,
44,
44,
44,
44,
44,
44,
44,
44,
32,
32,
24,
40,
28,
12,
12,
48,
52,
52,
52,
48,
52,
52,
52,
48,
52,
52,
52,
52,
52,
48,
52,
52,
52,
52,
52,
48,
52,
52,
52,
52,
52,
24,
12,
28,
12,
12,
12,
56,
60,
60,
60,
56,
60,
60,
60,
56,
60,
60,
60,
60,
60,
56,
60,
60,
60,
60,
60,
56,
60,
60,
60,
60,
60,
24,
12,
28,
12,
0,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
0,
1,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
2,
3,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
1,
1,
1,
1,
1,
1,
1,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
1,
1,
1,
1,
1,
1,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
1,
1,
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
0,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
7,
0,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
24,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
32,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
40,
48,
48,
48,
48,
48,
48,
48,
48,
48,
48,
48,
48,
48,
48,
48,
56,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
0,
0,
0,
0,
1,
1,
1,
1,
2,
2,
2,
2,
3,
3,
3,
3,
4,
4,
4,
4,
5,
5,
5,
5,
6,
6,
6,
6,
7,
7,
7,
7,
8,
8,
8,
8,
9,
9,
9,
9,
10,
10,
10,
10,
11,
11,
11,
11,
12,
12,
12,
12,
13,
13,
13,
13,
14,
14,
14,
14,
15,
15,
15,
15,
16,
16,
16,
16,
17,
17,
17,
17,
18,
18,
18,
18,
19,
19,
19,
19,
20,
20,
20,
20,
21,
21,
21,
21,
22,
22,
22,
22,
23,
23,
23,
23,
24,
24,
24,
24,
25,
25,
25,
25,
26,
26,
26,
26,
27,
27,
27,
27,
28,
28,
28,
28,
29,
29,
29,
29,
30,
30,
30,
30,
31,
31,
31,
31,
32,
32,
32,
32,
33,
33,
33,
33,
34,
34,
34,
34,
35,
35,
35,
35,
36,
36,
36,
36,
37,
37,
37,
37,
38,
38,
38,
38,
39,
39,
39,
39,
40,
40,
40,
40,
41,
41,
41,
41,
42,
42,
42,
42,
43,
43,
43,
43,
44,
44,
44,
44,
45,
45,
45,
45,
46,
46,
46,
46,
47,
47,
47,
47,
48,
48,
48,
48,
49,
49,
49,
49,
50,
50,
50,
50,
51,
51,
51,
51,
52,
52,
52,
52,
53,
53,
53,
53,
54,
54,
54,
54,
55,
55,
55,
55,
56,
56,
56,
56,
57,
57,
57,
57,
58,
58,
58,
58,
59,
59,
59,
59,
60,
60,
60,
60,
61,
61,
61,
61,
62,
62,
62,
62,
63,
63,
63,
63,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
])),
(r.lookupOffsets = new Uint16Array([
1024,
1536,
1280,
1536,
0,
256,
768,
512
]));
},
"dec/decode.js": function(e, t, r) {
function n(e) {
var t;
return 0 === e.readBits(1)
? 16
: ((t = e.readBits(3)),
t > 0 ? 17 + t : ((t = e.readBits(3)), t > 0 ? 8 + t : 17));
}
function o(e) {
if (e.readBits(1)) {
var t = e.readBits(3);
return 0 === t ? 1 : e.readBits(t) + (1 << t);
}
return 0;
}
function a() {
(this.meta_block_length = 0),
(this.input_end = 0),
(this.is_uncompressed = 0),
(this.is_metadata = !1);
}
function i(e) {
var t,
r,
n,
o = new a();
if (((o.input_end = e.readBits(1)), o.input_end && e.readBits(1)))
return o;
if (((t = e.readBits(2) + 4), 7 === t)) {
if (((o.is_metadata = !0), 0 !== e.readBits(1)))
throw new Error("Invalid reserved bit");
if (((r = e.readBits(2)), 0 === r)) return o;
for (n = 0; n < r; n++) {
var i = e.readBits(8);
if (n + 1 === r && r > 1 && 0 === i)
throw new Error("Invalid size byte");
o.meta_block_length |= i << (8 * n);
}
} else
for (n = 0; n < t; ++n) {
var s = e.readBits(4);
if (n + 1 === t && t > 4 && 0 === s)
throw new Error("Invalid size nibble");
o.meta_block_length |= s << (4 * n);
}
return (
++o.meta_block_length,
o.input_end ||
o.is_metadata ||
(o.is_uncompressed = e.readBits(1)),
o
);
}
function s(e, t, r) {
var n;
return (
r.fillBitWindow(),
(t += (r.val_ >>> r.bit_pos_) & D),
(n = e[t].bits - T),
n > 0 &&
((r.bit_pos_ += T),
(t += e[t].value),
(t += (r.val_ >>> r.bit_pos_) & ((1 << n) - 1))),
(r.bit_pos_ += e[t].bits),
e[t].value
);
}
function d(e, t, r, n) {
for (
var o = 0, a = N, i = 0, s = 0, d = 32768, l = [], u = 0;
u < 32;
u++
)
l.push(new B(0, 0));
for (L(l, 0, 5, e, q); o < t && d > 0; ) {
var c,
f = 0;
if (
(n.readMoreInput(),
n.fillBitWindow(),
(f += (n.val_ >>> n.bit_pos_) & 31),
(n.bit_pos_ += l[f].bits),
(c = 255 & l[f].value),
c < R)
)
(i = 0),
(r[o++] = c),
0 !== c && ((a = c), (d -= 32768 >> c));
else {
var h,
p,
w = c - 14,
m = 0;
if (
(c === R && (m = a),
s !== m && ((i = 0), (s = m)),
(h = i),
i > 0 && ((i -= 2), (i <<= w)),
(i += n.readBits(w) + 3),
(p = i - h),
o + p > t)
)
throw new Error(
"[ReadHuffmanCodeLengths] symbol + repeat_delta > num_symbols"
);
for (var b = 0; b < p; b++) r[o + b] = s;
(o += p), 0 !== s && (d -= p << (15 - s));
}
}
if (0 !== d)
throw new Error("[ReadHuffmanCodeLengths] space = " + d);
for (; o < t; o++) r[o] = 0;
}
function l(e, t, r, n) {
var o,
a = 0,
i = new Uint8Array(e);
if ((n.readMoreInput(), (o = n.readBits(2)), 1 === o)) {
for (
var s,
l = e - 1,
u = 0,
c = new Int32Array(4),
f = n.readBits(2) + 1;
l;
)
(l >>= 1), ++u;
for (s = 0; s < f; ++s)
(c[s] = n.readBits(u) % e), (i[c[s]] = 2);
switch (((i[c[0]] = 1), f)) {
case 1:
break;
case 3:
if (c[0] === c[1] || c[0] === c[2] || c[1] === c[2])
throw new Error("[ReadHuffmanCode] invalid symbols");
break;
case 2:
if (c[0] === c[1])
throw new Error("[ReadHuffmanCode] invalid symbols");
i[c[1]] = 1;
break;
case 4:
if (
c[0] === c[1] ||
c[0] === c[2] ||
c[0] === c[3] ||
c[1] === c[2] ||
c[1] === c[3] ||
c[2] === c[3]
)
throw new Error("[ReadHuffmanCode] invalid symbols");
n.readBits(1)
? ((i[c[2]] = 3), (i[c[3]] = 3))
: (i[c[0]] = 2);
}
} else {
var s,
h = new Uint8Array(q),
p = 32,
w = 0,
m = [
new B(2, 0),
new B(2, 4),
new B(2, 3),
new B(3, 2),
new B(2, 0),
new B(2, 4),
new B(2, 3),
new B(4, 1),
new B(2, 0),
new B(2, 4),
new B(2, 3),
new B(3, 2),
new B(2, 0),
new B(2, 4),
new B(2, 3),
new B(4, 5)
];
for (s = o; s < q && p > 0; ++s) {
var b,
y = V[s],
g = 0;
n.fillBitWindow(),
(g += (n.val_ >>> n.bit_pos_) & 15),
(n.bit_pos_ += m[g].bits),
(b = m[g].value),
(h[y] = b),
0 !== b && ((p -= 32 >> b), ++w);
}
if (1 !== w && 0 !== p)
throw new Error(
"[ReadHuffmanCode] invalid num_codes or space"
);
d(h, e, i, n);
}
if (((a = L(t, r, T, i, e)), 0 === a))
throw new Error("[ReadHuffmanCode] BuildHuffmanTable failed: ");
return a;
}
function u(e, t, r) {
var n, o;
return (
(n = s(e, t, r)),
(o = O.kBlockLengthPrefixCode[n].nbits),
O.kBlockLengthPrefixCode[n].offset + r.readBits(o)
);
}
function c(e, t, r) {
var n;
return (
e < z
? ((r += Z[e]), (r &= 3), (n = t[r] + Y[e]))
: (n = e - z + 1),
n
);
}
function f(e, t) {
for (var r = e[t], n = t; n; --n) e[n] = e[n - 1];
e[0] = r;
}
function h(e, t) {
var r,
n = new Uint8Array(256);
for (r = 0; r < 256; ++r) n[r] = r;
for (r = 0; r < t; ++r) {
var o = e[r];
(e[r] = n[o]), o && f(n, o);
}
}
function p(e, t) {
(this.alphabet_size = e),
(this.num_htrees = t),
(this.codes = new Array(t + t * G[(e + 31) >>> 5])),
(this.htrees = new Uint32Array(t));
}
function w(e, t) {
var r,
n,
a,
i = { num_htrees: null, context_map: null },
d = 0;
t.readMoreInput();
var u = (i.num_htrees = o(t) + 1),
c = (i.context_map = new Uint8Array(e));
if (u <= 1) return i;
for (
r = t.readBits(1), r && (d = t.readBits(4) + 1), n = [], a = 0;
a < F;
a++
)
n[a] = new B(0, 0);
for (l(u + d, n, 0, t), a = 0; a < e; ) {
var f;
if ((t.readMoreInput(), (f = s(n, 0, t)), 0 === f))
(c[a] = 0), ++a;
else if (f <= d)
for (var p = 1 + (1 << f) + t.readBits(f); --p; ) {
if (a >= e)
throw new Error(
"[DecodeContextMap] i >= context_map_size"
);
(c[a] = 0), ++a;
}
else (c[a] = f - d), ++a;
}
return t.readBits(1) && h(c, e), i;
}
function m(e, t, r, n, o, a, i) {
var d,
l = 2 * r,
u = r,
c = s(t, r * F, i);
(d =
0 === c
? o[l + (1 & a[u])]
: 1 === c
? o[l + ((a[u] - 1) & 1)] + 1
: c - 2),
d >= e && (d -= e),
(n[r] = d),
(o[l + (1 & a[u])] = d),
++a[u];
}
function b(e, t, r, n, o, a) {
var i,
s = o + 1,
d = r & o,
l = a.pos_ & E.IBUF_MASK;
if (t < 8 || a.bit_pos_ + (t << 3) < a.bit_end_pos_)
for (; t-- > 0; )
a.readMoreInput(),
(n[d++] = a.readBits(8)),
d === s && (e.write(n, s), (d = 0));
else {
if (a.bit_end_pos_ < 32)
throw new Error(
"[CopyUncompressedBlockToOutput] br.bit_end_pos_ < 32"
);
for (; a.bit_pos_ < 32; )
(n[d] = a.val_ >>> a.bit_pos_), (a.bit_pos_ += 8), ++d, --t;
if (
((i = (a.bit_end_pos_ - a.bit_pos_) >> 3),
l + i > E.IBUF_MASK)
) {
for (var u = E.IBUF_MASK + 1 - l, c = 0; c < u; c++)
n[d + c] = a.buf_[l + c];
(i -= u), (d += u), (t -= u), (l = 0);
}
for (var c = 0; c < i; c++) n[d + c] = a.buf_[l + c];
if (((d += i), (t -= i), d >= s)) {
e.write(n, s), (d -= s);
for (var c = 0; c < d; c++) n[c] = n[s + c];
}
for (; d + t >= s; ) {
if (((i = s - d), a.input_.read(n, d, i) < i))
throw new Error(
"[CopyUncompressedBlockToOutput] not enough bytes"
);
e.write(n, s), (t -= i), (d = 0);
}
if (a.input_.read(n, d, t) < t)
throw new Error(
"[CopyUncompressedBlockToOutput] not enough bytes"
);
a.reset();
}
}
function y(e) {
var t = (e.bit_pos_ + 7) & -8,
r = e.readBits(t - e.bit_pos_);
return 0 == r;
}
function g(e) {
var t = new U(e),
r = new E(t);
n(r);
var o = i(r);
return o.meta_block_length;
}
function v(e, t) {
var r = new U(e);
null == t && (t = g(e));
var n = new Uint8Array(t),
o = new x(n);
return (
A(r, o),
o.pos < o.buffer.length &&
(o.buffer = o.buffer.subarray(0, o.pos)),
o.buffer
);
}
function A(e, t) {
var r,
a,
d,
f,
h,
g,
v,
A,
U,
x = 0,
L = 0,
N = 0,
R = 0,
T = [16, 15, 11, 4],
D = 0,
q = 0,
V = 0,
Z = [new p(0, 0), new p(0, 0), new p(0, 0)];
const Y = 128 + E.READ_SIZE;
(U = new E(e)),
(N = n(U)),
(a = (1 << N) - 16),
(d = 1 << N),
(f = d - 1),
(h = new Uint8Array(d + Y + k.maxDictionaryWordLength)),
(g = d),
(v = []),
(A = []);
for (var G = 0; G < 3240; G++)
(v[G] = new B(0, 0)), (A[G] = new B(0, 0));
for (; !L; ) {
var J,
j,
X,
K,
Q,
_,
$,
ee,
te,
re = 0,
ne = [1 << 28, 1 << 28, 1 << 28],
oe = [0],
ae = [1, 1, 1],
ie = [0, 1, 0, 1, 0, 1],
se = [0],
de = null,
le = null,
ue = null,
ce = 0,
fe = null,
he = 0,
pe = 0,
we = null,
me = 0,
be = 0,
ye = 0;
for (r = 0; r < 3; ++r)
(Z[r].codes = null), (Z[r].htrees = null);
U.readMoreInput();
var ge = i(U);
if (((re = ge.meta_block_length), x + re > t.buffer.length)) {
var ve = new Uint8Array(x + re);
ve.set(t.buffer), (t.buffer = ve);
}
if (
((L = ge.input_end), (J = ge.is_uncompressed), ge.is_metadata)
)
for (y(U); re > 0; --re) U.readMoreInput(), U.readBits(8);
else if (0 !== re)
if (J)
(U.bit_pos_ = (U.bit_pos_ + 7) & -8),
b(t, re, x, h, f, U),
(x += re);
else {
for (r = 0; r < 3; ++r)
(ae[r] = o(U) + 1),
ae[r] >= 2 &&
(l(ae[r] + 2, v, r * F, U),
l(I, A, r * F, U),
(ne[r] = u(A, r * F, U)),
(se[r] = 1));
for (
U.readMoreInput(),
j = U.readBits(2),
X = z + (U.readBits(4) << j),
K = (1 << j) - 1,
Q = X + (48 << j),
le = new Uint8Array(ae[0]),
r = 0;
r < ae[0];
++r
)
U.readMoreInput(), (le[r] = U.readBits(2) << 1);
var Ae = w(ae[0] << S, U);
(_ = Ae.num_htrees), (de = Ae.context_map);
var Ue = w(ae[2] << P, U);
for (
$ = Ue.num_htrees,
ue = Ue.context_map,
Z[0] = new p(C, _),
Z[1] = new p(H, ae[1]),
Z[2] = new p(Q, $),
r = 0;
r < 3;
++r
)
Z[r].decode(U);
for (
fe = 0,
we = 0,
ee = le[oe[0]],
be = W.lookupOffsets[ee],
ye = W.lookupOffsets[ee + 1],
te = Z[1].htrees[0];
re > 0;
) {
var xe, Ee, ke, Be, Le, We, Oe, Me, Ne, Re, Ce;
for (
U.readMoreInput(),
0 === ne[1] &&
(m(ae[1], v, 1, oe, ie, se, U),
(ne[1] = u(A, F, U)),
(te = Z[1].htrees[oe[1]])),
--ne[1],
xe = s(Z[1].codes, te, U),
Ee = xe >> 6,
Ee >= 2 ? ((Ee -= 2), (Oe = -1)) : (Oe = 0),
ke = O.kInsertRangeLut[Ee] + ((xe >> 3) & 7),
Be = O.kCopyRangeLut[Ee] + (7 & xe),
Le =
O.kInsertLengthPrefixCode[ke].offset +
U.readBits(O.kInsertLengthPrefixCode[ke].nbits),
We =
O.kCopyLengthPrefixCode[Be].offset +
U.readBits(O.kCopyLengthPrefixCode[Be].nbits),
q = h[(x - 1) & f],
V = h[(x - 2) & f],
Re = 0;
Re < Le;
++Re
)
U.readMoreInput(),
0 === ne[0] &&
(m(ae[0], v, 0, oe, ie, se, U),
(ne[0] = u(A, 0, U)),
(ce = oe[0] << S),
(fe = ce),
(ee = le[oe[0]]),
(be = W.lookupOffsets[ee]),
(ye = W.lookupOffsets[ee + 1])),
(Ne = W.lookup[be + q] | W.lookup[ye + V]),
(he = de[fe + Ne]),
--ne[0],
(V = q),
(q = s(Z[0].codes, Z[0].htrees[he], U)),
(h[x & f] = q),
(x & f) === f && t.write(h, d),
++x;
if (((re -= Le), re <= 0)) break;
if (Oe < 0) {
var Ne;
if (
(U.readMoreInput(),
0 === ne[2] &&
(m(ae[2], v, 2, oe, ie, se, U),
(ne[2] = u(A, 2160, U)),
(pe = oe[2] << P),
(we = pe)),
--ne[2],
(Ne = 255 & (We > 4 ? 3 : We - 2)),
(me = ue[we + Ne]),
(Oe = s(Z[2].codes, Z[2].htrees[me], U)),
Oe >= X)
) {
var He, Ie, Se;
(Oe -= X),
(Ie = Oe & K),
(Oe >>= j),
(He = (Oe >> 1) + 1),
(Se = ((2 + (1 & Oe)) << He) - 4),
(Oe = X + ((Se + U.readBits(He)) << j) + Ie);
}
}
if (((Me = c(Oe, T, D)), Me < 0))
throw new Error("[BrotliDecompress] invalid distance");
if (
((R = x < a && R !== a ? x : a), (Ce = x & f), Me > R)
) {
if (
!(
We >= k.minDictionaryWordLength &&
We <= k.maxDictionaryWordLength
)
)
throw new Error(
"Invalid backward reference. pos: " +
x +
" distance: " +
Me +
" len: " +
We +
" bytes left: " +
re
);
var Se = k.offsetsByLength[We],
Pe = Me - R - 1,
Te = k.sizeBitsByLength[We],
De = (1 << Te) - 1,
Fe = Pe & De,
qe = Pe >> Te;
if (((Se += Fe * We), !(qe < M.kNumTransforms)))
throw new Error(
"Invalid backward reference. pos: " +
x +
" distance: " +
Me +
" len: " +
We +
" bytes left: " +
re
);
var Ve = M.transformDictionaryWord(h, Ce, Se, We, qe);
if (((Ce += Ve), (x += Ve), (re -= Ve), Ce >= g)) {
t.write(h, d);
for (var ze = 0; ze < Ce - g; ze++) h[ze] = h[g + ze];
}
} else {
if ((Oe > 0 && ((T[3 & D] = Me), ++D), We > re))
throw new Error(
"Invalid backward reference. pos: " +
x +
" distance: " +
Me +
" len: " +
We +
" bytes left: " +
re
);
for (Re = 0; Re < We; ++Re)
(h[x & f] = h[(x - Me) & f]),
(x & f) === f && t.write(h, d),
++x,
--re;
}
(q = h[(x - 1) & f]), (V = h[(x - 2) & f]);
}
x &= 1073741823;
}
}
t.write(h, x & f);
}
var U = e("./streams").BrotliInput,
x = e("./streams").BrotliOutput,
E = e("./bit_reader"),
k = e("./dictionary"),
B = e("./huffman").HuffmanCode,
L = e("./huffman").BrotliBuildHuffmanTable,
W = e("./context"),
O = e("./prefix"),
M = e("./transform");
const N = 8,
R = 16,
C = 256,
H = 704,
I = 26,
S = 6,
P = 2,
T = 8,
D = 255,
F = 1080,
q = 18,
V = new Uint8Array([
1,
2,
3,
4,
0,
5,
17,
6,
16,
7,
8,
9,
10,
11,
12,
13,
14,
15
]),
z = 16,
Z = new Uint8Array([
3,
2,
1,
0,
3,
3,
3,
3,
3,
3,
2,
2,
2,
2,
2,
2
]),
Y = new Int8Array([
0,
0,
0,
0,
-1,
1,
-2,
2,
-3,
3,
-1,
1,
-2,
2,
-3,
3
]),
G = new Uint16Array([
256,
402,
436,
468,
500,
534,
566,
598,
630,
662,
694,
726,
758,
790,
822,
854,
886,
920,
952,
984,
1016,
1048,
1080
]);
(p.prototype.decode = function(e) {
var t,
r,
n = 0;
for (t = 0; t < this.num_htrees; ++t)
(this.htrees[t] = n),
(r = l(this.alphabet_size, this.codes, n, e)),
(n += r);
}),
(r.BrotliDecompressedSize = g),
(r.BrotliDecompressBuffer = v),
(r.BrotliDecompress = A),
k.init();
},
"dec/dictionary.js": function(e, t, r) {
var n = e("./dictionary-browser");
(r.init = function() {
r.dictionary = n.init();
}),
(r.offsetsByLength = new Uint32Array([
0,
0,
0,
0,
0,
4096,
9216,
21504,
35840,
44032,
53248,
63488,
74752,
87040,
93696,
100864,
104704,
106752,
108928,
113536,
115968,
118528,
119872,
121280,
122016
])),
(r.sizeBitsByLength = new Uint8Array([
0,
0,
0,
0,
10,
10,
11,
11,
10,
10,
10,
10,
10,
9,
9,
8,
7,
7,
8,
7,
7,
6,
6,
5,
5
])),
(r.minDictionaryWordLength = 4),
(r.maxDictionaryWordLength = 24);
},
"dec/dictionary.bin.js": function(e, t, r) {
t.exports =
"W5/fcQLn5gKf2XUbAiQ1XULX+TZz6ADToDsgqk6qVfeC0e4m6OO2wcQ1J76ZBVRV1fRkEsdu//62zQsFEZWSTCnMhcsQKlS2qOhuVYYMGCkV0fXWEoMFbESXrKEZ9wdUEsyw9g4bJlEt1Y6oVMxMRTEVbCIwZzJzboK5j8m4YH02qgXYhv1V+PM435sLVxyHJihaJREEhZGqL03txGFQLm76caGO/ovxKvzCby/3vMTtX/459f0igi7WutnKiMQ6wODSoRh/8Lx1V3Q99MvKtwB6bHdERYRY0hStJoMjNeTsNX7bn+Y7e4EQ3bf8xBc7L0BsyfFPK43dGSXpL6clYC/I328h54/VYrQ5i0648FgbGtl837svJ35L3Mot/+nPlNpWgKx1gGXQYqX6n+bbZ7wuyCHKcUok12Xjqub7NXZGzqBx0SD+uziNf87t7ve42jxSKQoW3nyxVrWIGlFShhCKxjpZZ5MeGna0+lBkk+kaN8F9qFBAFgEogyMBdcX/T1W/WnMOi/7ycWUQloEBKGeC48MkiwqJkJO+12eQiOFHMmck6q/IjWW3RZlany23TBm+cNr/84/oi5GGmGBZWrZ6j+zykVozz5fT/QH/Da6WTbZYYPynVNO7kxzuNN2kxKKWche5WveitPKAecB8YcAHz/+zXLjcLzkdDSktNIDwZE9J9X+tto43oJy65wApM3mDzYtCwX9lM+N5VR3kXYo0Z3t0TtXfgBFg7gU8oN0Dgl7fZlUbhNll+0uuohRVKjrEd8egrSndy5/Tgd2gqjA4CAVuC7ESUmL3DZoGnfhQV8uwnpi8EGvAVVsowNRxPudck7+oqAUDkwZopWqFnW1riss0t1z6iCISVKreYGNvQcXv+1L9+jbP8cd/dPUiqBso2q+7ZyFBvENCkkVr44iyPbtOoOoCecWsiuqMSML5lv+vN5MzUr+Dnh73G7Q1YnRYJVYXHRJaNAOByiaK6CusgFdBPE40r0rvqXV7tksKO2DrHYXBTv8P5ysqxEx8VDXUDDqkPH6NNOV/a2WH8zlkXRELSa8P+heNyJBBP7PgsG1EtWtNef6/i+lcayzQwQCsduidpbKfhWUDgAEmyhGu/zVTacI6RS0zTABrOYueemnVa19u9fT23N/Ta6RvTpof5DWygqreCqrDAgM4LID1+1T/taU6yTFVLqXOv+/MuQOFnaF8vLMKD7tKWDoBdALgxF33zQccCcdHx8fKIVdW69O7qHtXpeGr9jbbpFA+qRMWr5hp0s67FPc7HAiLV0g0/peZlW7hJPYEhZyhpSwahnf93/tZgfqZWXFdmdXBzqxGHLrQKxoAY6fRoBhgCRPmmGueYZ5JexTVDKUIXzkG/fqp/0U3hAgQdJ9zumutK6nqWbaqvm1pgu03IYR+G+8s0jDBBz8cApZFSBeuWasyqo2OMDKAZCozS+GWSvL/HsE9rHxooe17U3s/lTE+VZAk4j3dp6uIGaC0JMiqR5CUsabPyM0dOYDR7Ea7ip4USZlya38YfPtvrX/tBlhHilj55nZ1nfN24AOAi9BVtz/Mbn8AEDJCqJgsVUa6nQnSxv2Fs7l/NlCzpfYEjmPrNyib/+t0ei2eEMjvNhLkHCZlci4WhBe7ePZTmzYqlY9+1pxtS4GB+5lM1BHT9tS270EWUDYFq1I0yY/fNiAk4bk9yBgmef/f2k6AlYQZHsNFnW8wBQxCd68iWv7/35bXfz3JZmfGligWAKRjIs3IpzxQ27vAglHSiOzCYzJ9L9A1CdiyFvyR66ucA4jKifu5ehwER26yV7HjKqn5Mfozo7Coxxt8LWWPT47BeMxX8p0Pjb7hZn+6bw7z3Lw+7653j5sI8CLu5kThpMlj1m4c2ch3jGcP1FsT13vuK3qjecKTZk2kHcOZY40UX+qdaxstZqsqQqgXz+QGF99ZJLqr3VYu4aecl1Ab5GmqS8k/GV5b95zxQ5d4EfXUJ6kTS/CXF/aiqKDOT1T7Jz5z0PwDUcwr9clLN1OJGCiKfqvah+h3XzrBOiLOW8wvn8gW6qE8vPxi+Efv+UH55T7PQFVMh6cZ1pZQlzJpKZ7P7uWvwPGJ6DTlR6wbyj3Iv2HyefnRo/dv7dNx+qaa0N38iBsR++Uil7Wd4afwDNsrzDAK4fXZwvEY/jdKuIKXlfrQd2C39dW7ntnRbIp9OtGy9pPBn/V2ASoi/2UJZfS+xuGLH8bnLuPlzdTNS6zdyk8Dt/h6sfOW5myxh1f+zf3zZ3MX/mO9cQPp5pOx967ZA6/pqHvclNfnUFF+rq+Vd7alKr6KWPcIDhpn6v2K6NlUu6LrKo8b/pYpU/Gazfvtwhn7tEOUuXht5rUJdSf6sLjYf0VTYDgwJ81yaqKTUYej/tbHckSRb/HZicwGJqh1mAHB/IuNs9dc9yuvF3D5Xocm3elWFdq5oEy70dYFit79yaLiNjPj5UUcVmZUVhQEhW5V2Z6Cm4HVH/R8qlamRYwBileuh07CbEce3TXa2JmXWBf+ozt319psboobeZhVnwhMZzOeQJzhpTDbP71Tv8HuZxxUI/+ma3XW6DFDDs4+qmpERwHGBd2edxwUKlODRdUWZ/g0GOezrbzOZauFMai4QU6GVHV6aPNBiBndHSsV4IzpvUiiYyg6OyyrL4Dj5q/Lw3N5kAwftEVl9rNd7Jk5PDij2hTH6wIXnsyXkKePxbmHYgC8A6an5Fob/KH5GtC0l4eFso+VpxedtJHdHpNm+Bvy4C79yVOkrZsLrQ3OHCeB0Ra+kBIRldUGlDCEmq2RwXnfyh6Dz+alk6eftI2n6sastRrGwbwszBeDRS/Fa/KwRJkCzTsLr/JCs5hOPE/MPLYdZ1F1fv7D+VmysX6NpOC8aU9F4Qs6HvDyUy9PvFGDKZ/P5101TYHFl8pjj6wm/qyS75etZhhfg0UEL4OYmHk6m6dO192AzoIyPSV9QedDA4Ml23rRbqxMPMxf7FJnDc5FTElVS/PyqgePzmwVZ26NWhRDQ+oaT7ly7ell4s3DypS1s0g+tOr7XHrrkZj9+x/mJBttrLx98lFIaRZzHz4aC7r52/JQ4VjHahY2/YVXZn/QC2ztQb/sY3uRlyc5vQS8nLPGT/n27495i8HPA152z7Fh5aFpyn1GPJKHuPL8Iw94DuW3KjkURAWZXn4EQy89xiKEHN1mk/tkM4gYDBxwNoYvRfE6LFqsxWJtPrDGbsnLMap3Ka3MUoytW0cvieozOmdERmhcqzG+3HmZv2yZeiIeQTKGdRT4HHNxekm1tY+/n06rGmFleqLscSERzctTKM6G9P0Pc1RmVvrascIxaO1CQCiYPE15bD7c3xSeW7gXxYjgxcrUlcbIvO0r+Yplhx0kTt3qafDOmFyMjgGxXu73rddMHpV1wMubyAGcf/v5dLr5P72Ta9lBF+fzMJrMycwv+9vnU3ANIl1cH9tfW7af8u0/HG0vV47jNFXzFTtaha1xvze/s8KMtCYucXc1nzfd/MQydUXn/b72RBt5wO/3jRcMH9BdhC/yctKBIveRYPrNpDWqBsO8VMmP+WvRaOcA4zRMR1PvSoO92rS7pYEv+fZfEfTMzEdM+6X5tLlyxExhqLRkms5EuLovLfx66de5fL2/yX02H52FPVwahrPqmN/E0oVXnsCKhbi/yRxX83nRbUKWhzYceXOntfuXn51NszJ6MO73pQf5Pl4in3ec4JU8hF7ppV34+mm9r1LY0ee/i1O1wpd8+zfLztE0cqBxggiBi5Bu95v9l3r9r/U5hweLn+TbfxowrWDqdJauKd8+q/dH8sbPkc9ttuyO94f7/XK/nHX46MPFLEb5qQlNPvhJ50/59t9ft3LXu7uVaWaO2bDrDCnRSzZyWvFKxO1+vT8MwwunR3bX0CkfPjqb4K9O19tn5X50PvmYpEwHtiW9WtzuV/s76B1zvLLNkViNd8ySxIl/3orfqP90TyTGaf7/rx8jQzeHJXdmh/N6YDvbvmTBwCdxfEQ1NcL6wNMdSIXNq7b1EUzRy1/Axsyk5p22GMG1b+GxFgbHErZh92wuvco0AuOLXct9hvw2nw/LqIcDRRmJmmZzcgUa7JpM/WV/S9IUfbF56TL2orzqwebdRD8nIYNJ41D/hz37Fo11p2Y21wzPcn713qVGhqtevStYfGH4n69OEJtPvbbLYWvscDqc3Hgnu166+tAyLnxrX0Y5zoYjV++1sI7t5kMr02KT/+uwtkc+rZLOf/qn/s3nYCf13Dg8/sB2diJgjGqjQ+TLhxbzyue2Ob7X6/9lUwW7a+lbznHzOYy8LKW1C/uRPbQY3KW/0gO9LXunHLvPL97afba9bFtc9hmz7GAttjVYlCvQAiOwAk/gC5+hkLEs6tr3AZKxLJtOEwk2dLxTYWsIB/j/ToWtIWzo906FrSG8iaqqqqqqiIiIiAgzMzMzNz+AyK+01/zi8n8S+Y1MjoRaQ80WU/G8MBlO+53VPXANrWm4wzGUVZUjjBJZVdhpcfkjsmcWaO+UEldXi1e+zq+HOsCpknYshuh8pOLISJun7TN0EIGW2xTnlOImeecnoGW4raxe2G1T3HEvfYUYMhG+gAFOAwh5nK8mZhwJMmN7r224QVsNFvZ87Z0qatvknklyPDK3Hy45PgVKXji52Wen4d4PlFVVYGnNap+fSpFbK90rYnhUc6n91Q3AY9E0tJOFrcfZtm/491XbcG/jsViUPPX76qmeuiz+qY1Hk7/1VPM405zWVuoheLUimpWYdVzCmUdKHebMdzgrYrb8mL2eeLSnRWHdonfZa8RsOU9F37w+591l5FLYHiOqWeHtE/lWrBHcRKp3uhtr8yXm8LU/5ms+NM6ZKsqu90cFZ4o58+k4rdrtB97NADFbwmEG7lXqvirhOTOqU14xuUF2myIjURcPHrPOQ4lmM3PeMg7bUuk0nnZi67bXsU6H8lhqIo8TaOrEafCO1ARK9PjC0QOoq2BxmMdgYB9G/lIb9++fqNJ2s7BHGFyBNmZAR8J3KCo012ikaSP8BCrf6VI0X5xdnbhHIO+B5rbOyB54zXkzfObyJ4ecwxfqBJMLFc7m59rNcw7hoHnFZ0b00zee+gTqvjm61Pb4xn0kcDX4jvHM0rBXZypG3DCKnD/Waa/ZtHmtFPgO5eETx+k7RrVg3aSwm2YoNXnCs3XPQDhNn+Fia6IlOOuIG6VJH7TP6ava26ehKHQa2T4N0tcZ9dPCGo3ZdnNltsHQbeYt5vPnJezV/cAeNypdml1vCHI8M81nSRP5Qi2+mI8v/sxiZru9187nRtp3f/42NemcONa+4eVC3PCZzc88aZh851CqSsshe70uPxeN/dmYwlwb3trwMrN1Gq8jbnApcVDx/yDPeYs5/7r62tsQ6lLg+DiFXTEhzR9dHqv0iT4tgj825W+H3XiRUNUZT2kR9Ri0+lp+UM3iQtS8uOE23Ly4KYtvqH13jghUntJRAewuzNLDXp8RxdcaA3cMY6TO2IeSFRXezeWIjCqyhsUdMYuCgYTZSKpBype1zRfq8FshvfBPc6BAQWl7/QxIDp3VGo1J3vn42OEs3qznws+YLRXbymyB19a9XBx6n/owcyxlEYyFWCi+kG9F+EyD/4yn80+agaZ9P7ay2Dny99aK2o91FkfEOY8hBwyfi5uwx2y5SaHmG+oq/zl1FX/8irOf8Y3vAcX/6uLP6A6nvMO24edSGPjQc827Rw2atX+z2bKq0CmW9mOtYnr5/AfDa1ZfPaXnKtlWborup7QYx+Or2uWb+N3N//2+yDcXMqIJdf55xl7/vsj4WoPPlxLxtVrkJ4w/tTe3mLdATOOYwxcq52w5Wxz5MbPdVs5O8/lhfE7dPj0bIiPQ3QV0iqm4m3YX8hRfc6jQ3fWepevMqUDJd86Z4vwM40CWHnn+WphsGHfieF02D3tmZvpWD+kBpNCFcLnZhcmmrhpGzzbdA+sQ1ar18OJD87IOKOFoRNznaHPNHUfUNhvY1iU+uhvEvpKHaUn3qK3exVVyX4joipp3um7FmYJWmA+WbIDshRpbVRx5/nqstCgy87FGbfVB8yDGCqS+2qCsnRwnSAN6zgzxfdB2nBT/vZ4/6uxb6oH8b4VBRxiIB93wLa47hG3w2SL/2Z27yOXJFwZpSJaBYyvajA7vRRYNKqljXKpt/CFD/tSMr18DKKbwB0xggBePatl1nki0yvqW5zchlyZmJ0OTxJ3D+fsYJs/mxYN5+Le5oagtcl+YsVvy8kSjI2YGvGjvmpkRS9W2dtXqWnVuxUhURm1lKtou/hdEq19VBp9OjGvHEQSmrpuf2R24mXGheil8KeiANY8fW1VERUfBImb64j12caBZmRViZHbeVMjCrPDg9A90IXrtnsYCuZtRQ0PyrKDjBNOsPfKsg1pA02gHlVr0OXiFhtp6nJqXVzcbfM0KnzC3ggOENPE9VBdmHKN6LYaijb4wXxJn5A0FSDF5j+h1ooZx885Jt3ZKzO5n7Z5WfNEOtyyPqQEnn7WLv5Fis3PdgMshjF1FRydbNyeBbyKI1oN1TRVrVK7kgsb/zjX4NDPIRMctVeaxVB38Vh1x5KbeJbU138AM5KzmZu3uny0ErygxiJF7GVXUrPzFxrlx1uFdAaZFDN9cvIb74qD9tzBMo7L7WIEYK+sla1DVMHpF0F7b3+Y6S+zjvLeDMCpapmJo1weBWuxKF3rOocih1gun4BoJh1kWnV/Jmiq6uOhK3VfKxEHEkafjLgK3oujaPzY6SXg8phhL4TNR1xvJd1Wa0aYFfPUMLrNBDCh4AuGRTbtKMc6Z1Udj8evY/ZpCuMAUefdo69DZUngoqE1P9A3PJfOf7WixCEj+Y6t7fYeHbbxUAoFV3M89cCKfma3fc1+jKRe7MFWEbQqEfyzO2x/wrO2VYH7iYdQ9BkPyI8/3kXBpLaCpU7eC0Yv/am/tEDu7HZpqg0EvHo0nf/R/gRzUWy33/HXMJQeu1GylKmOkXzlCfGFruAcPPhaGqZOtu19zsJ1SO2Jz4Ztth5cBX6mRQwWmDwryG9FUMlZzNckMdK+IoMJv1rOWnBamS2w2KHiaPMPLC15hCZm4KTpoZyj4E2TqC/P6r7/EhnDMhKicZZ1ZwxuC7DPzDGs53q8gXaI9kFTK+2LTq7bhwsTbrMV8Rsfua5lMS0FwbTitUVnVa1yTb5IX51mmYnUcP9wPr8Ji1tiYJeJV9GZTrQhF7vvdU2OTU42ogJ9FDwhmycI2LIg++03C6scYhUyUuMV5tkw6kGUoL+mjNC38+wMdWNljn6tGPpRES7veqrSn5TRuv+dh6JVL/iDHU1db4c9WK3++OrH3PqziF916UMUKn8G67nN60GfWiHrXYhUG3yVWmyYak59NHj8t1smG4UDiWz2rPHNrKnN4Zo1LBbr2/eF9YZ0n0blx2nG4X+EKFxvS3W28JESD+FWk61VCD3z/URGHiJl++7TdBwkCj6tGOH3qDb0QqcOF9Kzpj0HUb/KyFW3Yhj2VMKJqGZleFBH7vqvf7WqLC3XMuHV8q8a4sTFuxUtkD/6JIBvKaVjv96ndgruKZ1k/BHzqf2K9fLk7HGXANyLDd1vxkK/i055pnzl+zw6zLnwXlVYVtfmacJgEpRP1hbGgrYPVN6v2lG+idQNGmwcKXu/8xEj/P6qe/sB2WmwNp6pp8jaISMkwdleFXYK55NHWLTTbutSUqjBfDGWo/Yg918qQ+8BRZSAHZbfuNZz2O0sov1Ue4CWlVg3rFhM3Kljj9ksGd/NUhk4nH+a5UN2+1i8+NM3vRNp7uQ6sqexSCukEVlVZriHNqFi5rLm9TMWa4qm3idJqppQACol2l4VSuvWLfta4JcXy3bROPNbXOgdOhG47LC0CwW/dMlSx4Jf17aEU3yA1x9p+Yc0jupXgcMuYNku64iYOkGToVDuJvlbEKlJqsmiHbvNrIVZEH+yFdF8DbleZ6iNiWwMqvtMp/mSpwx5KxRrT9p3MAPTHGtMbfvdFhyj9vhaKcn3At8Lc16Ai+vBcSp1ztXi7rCJZx/ql7TXcclq6Q76UeKWDy9boS0WHIjUuWhPG8LBmW5y2rhuTpM5vsLt+HOLh1Yf0DqXa9tsfC+kaKt2htA0ai/L2i7RKoNjEwztkmRU0GfgW1TxUvPFhg0V7DdfWJk5gfrccpYv+MA9M0dkGTLECeYwUixRzjRFdmjG7zdZIl3XKB9YliNKI31lfa7i2JG5C8Ss+rHe0D7Z696/V3DEAOWHnQ9yNahMUl5kENWS6pHKKp2D1BaSrrHdE1w2qNxIztpXgUIrF0bm15YML4b6V1k+GpNysTahKMVrrS85lTVo9OGJ96I47eAy5rYWpRf/mIzeoYU1DKaQCTUVwrhHeyNoDqHel+lLxr9WKzhSYw7vrR6+V5q0pfi2k3L1zqkubY6rrd9ZLvSuWNf0uqnkY+FpTvFzSW9Fp0b9l8JA7THV9eCi/PY/SCZIUYx3BU2alj7Cm3VV6eYpios4b6WuNOJdYXUK3zTqj5CVG2FqYM4Z7CuIU0qO05XR0d71FHM0YhZmJmTRfLlXEumN82BGtzdX0S19t1e+bUieK8zRmqpa4Qc5TSjifmaQsY2ETLjhI36gMR1+7qpjdXXHiceUekfBaucHShAOiFXmv3sNmGQyU5iVgnoocuonQXEPTFwslHtS8R+A47StI9wj0iSrtbi5rMysczFiImsQ+bdFClnFjjpXXwMy6O7qfjOr8Fb0a7ODItisjnn3EQO16+ypd1cwyaAW5Yzxz5QknfMO7643fXW/I9y3U2xH27Oapqr56Z/tEzglj6IbT6HEHjopiXqeRbe5mQQvxtcbDOVverN0ZgMdzqRYRjaXtMRd56Q4cZSmdPvZJdSrhJ1D9zNXPqAEqPIavPdfubt5oke2kmv0dztIszSv2VYuoyf1UuopbsYb+uX9h6WpwjpgtZ6fNNawNJ4q8O3CFoSbioAaOSZMx2GYaPYB+rEb6qjQiNRFQ76TvwNFVKD+BhH9VhcKGsXzmMI7BptU/CNWolM7YzROvpFAntsiWJp6eR2d3GarcYShVYSUqhmYOWj5E96NK2WvmYNTeY7Zs4RUEdv9h9QT4EseKt6LzLrqEOs3hxAY1MaNWpSa6zZx8F3YOVeCYMS88W+CYHDuWe4yoc6YK+djDuEOrBR5lvh0r+Q9uM88lrjx9x9AtgpQVNE8r+3O6Gvw59D+kBF/UMXyhliYUtPjmvXGY6Dk3x+kEOW+GtdMVC4EZTqoS/jmR0P0LS75DOc/w2vnri97M4SdbZ8qeU7gg8DVbERkU5geaMQO3mYrSYyAngeUQqrN0C0/vsFmcgWNXNeidsTAj7/4MncJR0caaBUpbLK1yBCBNRjEv6KvuVSdpPnEMJdsRRtqJ+U8tN1gXA4ePHc6ZT0eviI73UOJF0fEZ8YaneAQqQdGphNvwM4nIqPnXxV0xA0fnCT+oAhJuyw/q8jO0y8CjSteZExwBpIN6SvNp6A5G/abi6egeND/1GTguhuNjaUbbnSbGd4L8937Ezm34Eyi6n1maeOBxh3PI0jzJDf5mh/BsLD7F2GOKvlA/5gtvxI3/eV4sLfKW5Wy+oio+es/u6T8UU+nsofy57Icb/JlZHPFtCgd/x+bwt3ZT+xXTtTtTrGAb4QehC6X9G+8YT+ozcLxDsdCjsuOqwPFnrdLYaFc92Ui0m4fr39lYmlCaqTit7G6O/3kWDkgtXjNH4BiEm/+jegQnihOtfffn33WxsFjhfMd48HT+f6o6X65j7XR8WLSHMFkxbvOYsrRsF1bowDuSQ18Mkxk4qz2zoGPL5fu9h2Hqmt1asl3Q3Yu3szOc+spiCmX4AETBM3pLoTYSp3sVxahyhL8eC4mPN9k2x3o0xkiixIzM3CZFzf5oR4mecQ5+ax2wCah3/crmnHoqR0+KMaOPxRif1oEFRFOO/kTPPmtww+NfMXxEK6gn6iU32U6fFruIz8Q4WgljtnaCVTBgWx7diUdshC9ZEa5yKpRBBeW12r/iNc/+EgNqmhswNB8SBoihHXeDF7rrWDLcmt3V8GYYN7pXRy4DZjj4DJuUBL5iC3DQAaoo4vkftqVTYRGLS3mHZ7gdmdTTqbgNN/PTdTCOTgXolc88MhXAEUMdX0iy1JMuk5wLsgeu0QUYlz2S4skTWwJz6pOm/8ihrmgGfFgri+ZWUK2gAPHgbWa8jaocdSuM4FJYoKicYX/ZSENkg9Q1ZzJfwScfVnR2DegOGwCvmogaWJCLQepv9WNlU6QgsmOwICquU28Mlk3d9W5E81lU/5Ez0LcX6lwKMWDNluNKfBDUy/phJgBcMnfkh9iRxrdOzgs08JdPB85Lwo+GUSb4t3nC+0byqMZtO2fQJ4U2zGIr49t/28qmmGv2RanDD7a3FEcdtutkW8twwwlUSpb8QalodddbBfNHKDQ828BdE7OBgFdiKYohLawFYqpybQoxATZrheLhdI7+0Zlu9Q1myRcd15r9UIm8K2LGJxqTegntqNVMKnf1a8zQiyUR1rxoqjiFxeHxqFcYUTHfDu7rhbWng6qOxOsI+5A1p9mRyEPdVkTlE24vY54W7bWc6jMgZvNXdfC9/9q7408KDsbdL7Utz7QFSDetz2picArzrdpL8OaCHC9V26RroemtDZ5yNM/KGkWMyTmfnInEvwtSD23UcFcjhaE3VKzkoaEMKGBft4XbIO6forTY1lmGQwVmKicBCiArDzE+1oIxE08fWeviIOD5TznqH+OoHadvoOP20drMPe5Irg3XBQziW2XDuHYzjqQQ4wySssjXUs5H+t3FWYMHppUnBHMx/nYIT5d7OmjDbgD9F6na3m4l7KdkeSO3kTEPXafiWinogag7b52taiZhL1TSvBFmEZafFq2H8khQaZXuitCewT5FBgVtPK0j4xUHPfUz3Q28eac1Z139DAP23dgki94EC8vbDPTQC97HPPSWjUNG5tWKMsaxAEMKC0665Xvo1Ntd07wCLNf8Q56mrEPVpCxlIMVlQlWRxM3oAfpgIc+8KC3rEXUog5g06vt7zgXY8grH7hhwVSaeuvC06YYRAwpbyk/Unzj9hLEZNs2oxPQB9yc+GnL6zTgq7rI++KDJwX2SP8Sd6YzTuw5lV/kU6eQxRD12omfQAW6caTR4LikYkBB1CMOrvgRr/VY75+NSB40Cni6bADAtaK+vyxVWpf9NeKJxN2KYQ8Q2xPB3K1s7fuhvWbr2XpgW044VD6DRs0qXoqKf1NFsaGvKJc47leUV3pppP/5VTKFhaGuol4Esfjf5zyCyUHmHthChcYh4hYLQF+AFWsuq4t0wJyWgdwQVOZiV0efRHPoK5+E1vjz9wTJmVkITC9oEstAsyZSgE/dbicwKr89YUxKZI+owD205Tm5lnnmDRuP/JnzxX3gMtlrcX0UesZdxyQqYQuEW4R51vmQ5xOZteUd8SJruMlTUzhtVw/Nq7eUBcqN2/HVotgfngif60yKEtoUx3WYOZlVJuJOh8u59fzSDPFYtQgqDUAGyGhQOAvKroXMcOYY0qjnStJR/G3aP+Jt1sLVlGV8POwr/6OGsqetnyF3TmTqZjENfnXh51oxe9qVUw2M78EzAJ+IM8lZ1MBPQ9ZWSVc4J3mWSrLKrMHReA5qdGoz0ODRsaA+vwxXA2cAM4qlfzBJA6581m4hzxItQw5dxrrBL3Y6kCbUcFxo1S8jyV44q//+7ASNNudZ6xeaNOSIUffqMn4A9lIjFctYn2gpEPAb3f7p3iIBN8H14FUGQ9ct2hPsL+cEsTgUrR47uJVN4n4wt/wgfwwHuOnLd4yobkofy8JvxSQTA7rMpDIc608SlZFJfZYcmbT0tAHpPE8MrtQ42siTUNWxqvWZOmvu9f0JPoQmg+6l7sZWwyfi6PXkxJnwBraUG0MYG4zYHQz3igy/XsFkx5tNQxw43qvI9dU3f0DdhOUlHKjmi1VAr2Kiy0HZwD8VeEbhh0OiDdMYspolQsYdSwjCcjeowIXNZVUPmL2wwIkYhmXKhGozdCJ4lRKbsf4NBh/XnQoS92NJEWOVOFs2YhN8c5QZFeK0pRdAG40hqvLbmoSA8xQmzOOEc7wLcme9JOsjPCEgpCwUs9E2DohMHRhUeyGIN6TFvrbny8nDuilsDpzrH5mS76APoIEJmItS67sQJ+nfwddzmjPxcBEBBCw0kWDwd0EZCkNeOD7NNQhtBm7KHL9mRxj6U1yWU2puzlIDtpYxdH4ZPeXBJkTGAJfUr/oTCz/iypY6uXaR2V1doPxJYlrw2ghH0D5gbrhFcIxzYwi4a/4hqVdf2DdxBp6vGYDjavxMAAoy+1+3aiO6S3W/QAKNVXagDtvsNtx7Ks+HKgo6U21B+QSZgIogV5Bt+BnXisdVfy9VyXV+2P5fMuvdpAjM1o/K9Z+XnE4EOCrue+kcdYHqAQ0/Y/OmNlQ6OI33jH/uD1RalPaHpJAm2av0/xtpqdXVKNDrc9F2izo23Wu7firgbURFDNX9eGGeYBhiypyXZft2j3hTvzE6PMWKsod//rEILDkzBXfi7xh0eFkfb3/1zzPK/PI5Nk3FbZyTl4mq5BfBoVoqiPHO4Q4QKZAlrQ3MdNfi3oxIjvsM3kAFv3fdufurqYR3PSwX/mpGy/GFI/B2MNPiNdOppWVbs/gjF3YH+QA9jMhlAbhvasAHstB0IJew09iAkmXHl1/TEj+jvHOpOGrPRQXbPADM+Ig2/OEcUcpgPTItMtW4DdqgfYVI/+4hAFWYjUGpOP/UwNuB7+BbKOcALbjobdgzeBQfjgNSp2GOpxzGLj70Vvq5cw2AoYENwKLUtJUX8sGRox4dVa/TN4xKwaKcl9XawQR/uNus700Hf17pyNnezrUgaY9e4MADhEDBpsJT6y1gDJs1q6wlwGhuUzGR7C8kgpjPyHWwsvrf3yn1zJEIRa5eSxoLAZOCR9xbuztxFRJW9ZmMYfCFJ0evm9F2fVnuje92Rc4Pl6A8bluN8MZyyJGZ0+sNSb//DvAFxC2BqlEsFwccWeAl6CyBcQV1bx4mQMBP1Jxqk1EUADNLeieS2dUFbQ/c/kvwItbZ7tx0st16viqd53WsRmPTKv2AD8CUnhtPWg5aUegNpsYgasaw2+EVooeNKmrW3MFtj76bYHJm5K9gpAXZXsE5U8DM8XmVOSJ1F1WnLy6nQup+jx52bAb+rCq6y9WXl2B2oZDhfDkW7H3oYfT/4xx5VncBuxMXP2lNfhUVQjSSzSRbuZFE4vFawlzveXxaYKVs8LpvAb8IRYF3ZHiRnm0ADeNPWocwxSzNseG7NrSEVZoHdKWqaGEBz1N8Pt7kFbqh3LYmAbm9i1IChIpLpM5AS6mr6OAPHMwwznVy61YpBYX8xZDN/a+lt7n+x5j4bNOVteZ8lj3hpAHSx1VR8vZHec4AHO9XFCdjZ9eRkSV65ljMmZVzaej2qFn/qt1lvWzNZEfHxK3qOJrHL6crr0CRzMox5f2e8ALBB4UGFZKA3tN6F6IXd32GTJXGQ7DTi9j/dNcLF9jCbDcWGKxoKTYblIwbLDReL00LRcDPMcQuXLMh5YzgtfjkFK1DP1iDzzYYVZz5M/kWYRlRpig1htVRjVCknm+h1M5LiEDXOyHREhvzCGpFZjHS0RsK27o2avgdilrJkalWqPW3D9gmwV37HKmfM3F8YZj2ar+vHFvf3B8CRoH4kDHIK9mrAg+owiEwNjjd9V+FsQKYR8czJrUkf7Qoi2YaW6EVDZp5zYlqiYtuXOTHk4fAcZ7qBbdLDiJq0WNV1l2+Hntk1mMWvxrYmc8kIx8G3rW36J6Ra4lLrTOCgiOihmow+YnzUT19jbV2B3RWqSHyxkhmgsBqMYWvOcUom1jDQ436+fcbu3xf2bbeqU/ca+C4DOKE+e3qvmeMqW3AxejfzBRFVcwVYPq4L0APSWWoJu+5UYX4qg5U6YTioqQGPG9XrnuZ/BkxuYpe6Li87+18EskyQW/uA+uk2rpHpr6hut2TlVbKgWkFpx+AZffweiw2+VittkEyf/ifinS/0ItRL2Jq3tQOcxPaWO2xrG68GdFoUpZgFXaP2wYVtRc6xYCfI1CaBqyWpg4bx8OHBQwsV4XWMibZZ0LYjWEy2IxQ1mZrf1/UNbYCJplWu3nZ4WpodIGVA05d+RWSS+ET9tH3RfGGmNI1cIY7evZZq7o+a0bjjygpmR3mVfalkT/SZGT27Q8QGalwGlDOS9VHCyFAIL0a1Q7JiW3saz9gqY8lqKynFrPCzxkU4SIfLc9VfCI5edgRhDXs0edO992nhTKHriREP1NJC6SROMgQ0xO5kNNZOhMOIT99AUElbxqeZF8A3xrfDJsWtDnUenAHdYWSwAbYjFqQZ+D5gi3hNK8CSxU9i6f6ClL9IGlj1OPMQAsr84YG6ijsJpCaGWj75c3yOZKBB9mNpQNPUKkK0D6wgLH8MGoyRxTX6Y05Q4AnYNXMZwXM4eij/9WpsM/9CoRnFQXGR6MEaY+FXvXEO3RO0JaStk6OXuHVATHJE+1W+TU3bSZ2ksMtqjO0zfSJCdBv7y2d8DMx6TfVme3q0ZpTKMMu4YL/t7ciTNtdDkwPogh3Cnjx7qk08SHwf+dksZ7M2vCOlfsF0hQ6J4ehPCaHTNrM/zBSOqD83dBEBCW/F/LEmeh0nOHd7oVl3/Qo/9GUDkkbj7yz+9cvvu+dDAtx8NzCDTP4iKdZvk9MWiizvtILLepysflSvTLFBZ37RLwiriqyRxYv/zrgFd/9XVHh/OmzBvDX4mitMR/lUavs2Vx6cR94lzAkplm3IRNy4TFfu47tuYs9EQPIPVta4P64tV+sZ7n3ued3cgEx2YK+QL5+xms6osk8qQbTyuKVGdaX9FQqk6qfDnT5ykxk0VK7KZ62b6DNDUfQlqGHxSMKv1P0XN5BqMeKG1P4Wp5QfZDUCEldppoX0U6ss2jIko2XpURKCIhfaOqLPfShdtS37ZrT+jFRSH2xYVV1rmT/MBtRQhxiO4MQ3iAGlaZi+9PWBEIXOVnu9jN1f921lWLZky9bqbM3J2MAAI9jmuAx3gyoEUa6P2ivs0EeNv/OR+AX6q5SW6l5HaoFuS6jr6yg9limu+P0KYKzfMXWcQSfTXzpOzKEKpwI3YGXZpSSy2LTlMgfmFA3CF6R5c9xWEtRuCg2ZPUQ2Nb6dRFTNd4TfGHrnEWSKHPuRyiJSDAZ+KX0VxmSHjGPbQTLVpqixia2uyhQ394gBMt7C3ZAmxn/DJS+l1fBsAo2Eir/C0jG9csd4+/tp12pPc/BVJGaK9mfvr7M/CeztrmCO5qY06Edi4xAGtiEhnWAbzLy2VEyazE1J5nPmgU4RpW4Sa0TnOT6w5lgt3/tMpROigHHmexBGAMY0mdcDbDxWIz41NgdD6oxgHsJRgr5RnT6wZAkTOcStU4NMOQNemSO7gxGahdEsC+NRVGxMUhQmmM0llWRbbmFGHzEqLM4Iw0H7577Kyo+Zf+2cUFIOw93gEY171vQaM0HLwpjpdRR6Jz7V0ckE7XzYJ0TmY9znLdzkva0vNrAGGT5SUZ5uaHDkcGvI0ySpwkasEgZPMseYcu85w8HPdSNi+4T6A83iAwDbxgeFcB1ZM2iGXzFcEOUlYVrEckaOyodfvaYSQ7GuB4ISE0nYJc15X/1ciDTPbPCgYJK55VkEor4LvzL9S2WDy4xj+6FOqVyTAC2ZNowheeeSI5hA/02l8UYkv4nk9iaVn+kCVEUstgk5Hyq+gJm6R9vG3rhuM904he/hFmNQaUIATB1y3vw+OmxP4X5Yi6A5I5jJufHCjF9+AGNwnEllZjUco6XhsO5T5+R3yxz5yLVOnAn0zuS+6zdj0nTJbEZCbXJdtpfYZfCeCOqJHoE2vPPFS6eRLjIJlG69X93nfR0mxSFXzp1Zc0lt/VafDaImhUMtbnqWVb9M4nGNQLN68BHP7AR8Il9dkcxzmBv8PCZlw9guY0lurbBsmNYlwJZsA/B15/HfkbjbwPddaVecls/elmDHNW2r4crAx43feNkfRwsaNq/yyJ0d/p5hZ6AZajz7DBfUok0ZU62gCzz7x8eVfJTKA8IWn45vINLSM1q+HF9CV9qF3zP6Ml21kPPL3CXzkuYUlnSqT+Ij4tI/od5KwIs+tDajDs64owN7tOAd6eucGz+KfO26iNcBFpbWA5732bBNWO4kHNpr9D955L61bvHCF/mwSrz6eQaDjfDEANqGMkFc+NGxpKZzCD2sj/JrHd+zlPQ8Iz7Q+2JVIiVCuCKoK/hlAEHzvk/Piq3mRL1rT/fEh9hoT5GJmeYswg1otiKydizJ/fS2SeKHVu6Z3JEHjiW8NaTQgP5xdBli8nC57XiN9hrquBu99hn9zqwo92+PM2JXtpeVZS0PdqR5mDyDreMMtEws+CpwaRyyzoYtfcvt9PJIW0fJVNNi/FFyRsea7peLvJrL+5b4GOXJ8tAr+ATk9f8KmiIsRhqRy0vFzwRV3Z5dZ3QqIU8JQ/uQpkJbjMUMFj2F9sCFeaBjI4+fL/oN3+LQgjI4zuAfQ+3IPIPFQBccf0clJpsfpnBxD84atwtupkGqKvrH7cGNl/QcWcSi6wcVDML6ljOgYbo+2BOAWNNjlUBPiyitUAwbnhFvLbnqw42kR3Yp2kv2dMeDdcGOX5kT4S6M44KHEB/SpCfl7xgsUvs+JNY9G3O2X/6FEt9FyAn57lrbiu+tl83sCymSvq9eZbe9mchL7MTf/Ta78e80zSf0hYY5eUU7+ff14jv7Xy8qjzfzzzvaJnrIdvFb5BLWKcWGy5/w7+vV2cvIfwHqdTB+RuJK5oj9mbt0Hy94AmjMjjwYNZlNS6uiyxNnwNyt3gdreLb64p/3+08nXkb92LTkkRgFOwk1oGEVllcOj5lv1hfAZywDows0944U8vUFw+A/nuVq/UCygsrmWIBnHyU01d0XJPwriEOvx/ISK6Pk4y2w0gmojZs7lU8TtakBAdne4v/aNxmMpK4VcGMp7si0yqsiolXRuOi1Z1P7SqD3Zmp0CWcyK4Ubmp2SXiXuI5nGLCieFHKHNRIlcY3Pys2dwMTYCaqlyWSITwr2oGXvyU3h1Pf8eQ3w1bnD7ilocVjYDkcXR3Oo1BXgMLTUjNw2xMVwjtp99NhSVc5aIWrDQT5DHPKtCtheBP4zHcw4dz2eRdTMamhlHhtfgqJJHI7NGDUw1XL8vsSeSHyKqDtqoAmrQqsYwvwi7HW3ojWyhIa5oz5xJTaq14NAzFLjVLR12rRNUQ6xohDnrWFb5bG9yf8aCD8d5phoackcNJp+Dw3Due3RM+5Rid7EuIgsnwgpX0rUWh/nqPtByMhMZZ69NpgvRTKZ62ViZ+Q7Dp5r4K0d7EfJuiy06KuIYauRh5Ecrhdt2QpTS1k1AscEHvapNbU3HL1F2TFyR33Wxb5MvH5iZsrn3SDcsxlnnshO8PLwmdGN+paWnQuORtZGX37uhFT64SeuPsx8UOokY6ON85WdQ1dki5zErsJGazcBOddWJEKqNPiJpsMD1GrVLrVY+AOdPWQneTyyP1hRX/lMM4ZogGGOhYuAdr7F/DOiAoc++cn5vlf0zkMUJ40Z1rlgv9BelPqVOpxKeOpzKdF8maK+1Vv23MO9k/8+qpLoxrIGH2EDQlnGmH8CD31G8QqlyQIcpmR5bwmSVw9/Ns6IHgulCRehvZ/+VrM60Cu/r3AontFfrljew74skYe2uyn7JKQtFQBQRJ9ryGic/zQOsbS4scUBctA8cPToQ3x6ZBQu6DPu5m1bnCtP8TllLYA0UTQNVqza5nfew3Mopy1GPUwG5jsl0OVXniPmAcmLqO5HG8Hv3nSLecE9oOjPDXcsTxoCBxYyzBdj4wmnyEV4kvFDunipS8SSkvdaMnTBN9brHUR8xdmmEAp/Pdqk9uextp1t+JrtXwpN/MG2w/qhRMpSNxQ1uhg/kKO30eQ/FyHUDkWHT8V6gGRU4DhDMxZu7xXij9Ui6jlpWmQCqJg3FkOTq3WKneCRYZxBXMNAVLQgHXSCGSqNdjebY94oyIpVjMYehAiFx/tqzBXFHZaL5PeeD74rW5OysFoUXY8sebUZleFTUa/+zBKVTFDopTReXNuZq47QjkWnxjirCommO4L/GrFtVV21EpMyw8wyThL5Y59d88xtlx1g1ttSICDwnof6lt/6zliPzgVUL8jWBjC0o2D6Kg+jNuThkAlaDJsq/AG2aKA//A76avw2KNqtv223P+Wq3StRDDNKFFgtsFukYt1GFDWooFVXitaNhb3RCyJi4cMeNjROiPEDb4k+G3+hD8tsg+5hhmSc/8t2JTSwYoCzAI75doq8QTHe+E/Tw0RQSUDlU+6uBeNN3h6jJGX/mH8oj0i3caCNsjvTnoh73BtyZpsflHLq6AfwJNCDX4S98h4+pCOhGKDhV3rtkKHMa3EG4J9y8zFWI4UsfNzC/Rl5midNn7gwoN9j23HGCQQ+OAZpTTPMdiVow740gIyuEtd0qVxMyNXhHcnuXRKdw5wDUSL358ktjMXmAkvIB73BLa1vfF9BAUZInPYJiwxqFWQQBVk7gQH4ojfUQ/KEjn+A/WR6EEe4CtbpoLe1mzHkajgTIoE0SLDHVauKhrq12zrAXBGbPPWKCt4DGedq3JyGRbmPFW32bE7T20+73BatV/qQhhBWfWBFHfhYWXjALts38FemnoT+9bn1jDBMcUMmYgSc0e7GQjv2MUBwLU8ionCpgV+Qrhg7iUIfUY6JFxR0Y+ZTCPM+rVuq0GNLyJXX6nrUTt8HzFBRY1E/FIm2EeVA9NcXrj7S6YYIChVQCWr/m2fYUjC4j0XLkzZ8GCSLfmkW3PB/xq+nlXsKVBOj7vTvqKCOMq7Ztqr3cQ+N8gBnPaAps+oGwWOkbuxnRYj/x/WjiDclVrs22xMK4qArE1Ztk1456kiJriw6abkNeRHogaPRBgbgF9Z8i/tbzWELN4CvbqtrqV9TtGSnmPS2F9kqOIBaazHYaJ9bi3AoDBvlZasMluxt0BDXfhp02Jn411aVt6S4TUB8ZgFDkI6TP6gwPY85w+oUQSsjIeXVminrwIdK2ZAawb8Se6XOJbOaliQxHSrnAeONDLuCnFejIbp4YDtBcQCwMsYiRZfHefuEJqJcwKTTJ8sx5hjHmJI1sPFHOr6W9AhZ2NAod38mnLQk1gOz2LCAohoQbgMbUK9RMEA3LkiF7Sr9tLZp6lkciIGhE2V546w3Mam53VtVkGbB9w0Yk2XiRnCmbpxmHr2k4eSC0RuNbjNsUfDIfc8DZvRvgUDe1IlKdZTzcT4ZGEb53dp8VtsoZlyXzLHOdAbsp1LPTVaHvLA0GYDFMbAW/WUBfUAdHwqLFAV+3uHvYWrCfhUOR2i89qvCBoOb48usAGdcF2M4aKn79k/43WzBZ+xR1L0uZfia70XP9soQReeuhZiUnXFDG1T8/OXNmssTSnYO+3kVLAgeiY719uDwL9FQycgLPessNihMZbAKG7qwPZyG11G1+ZA3jAX2yddpYfmaKBlmfcK/V0mwIRUDC0nJSOPUl2KB8h13F4dlVZiRhdGY5farwN+f9hEb1cRi41ZcGDn6Xe9MMSTOY81ULJyXIHSWFIQHstVYLiJEiUjktlHiGjntN5/btB8Fu+vp28zl2fZXN+dJDyN6EXhS+0yzqpl/LSJNEUVxmu7BsNdjAY0jVsAhkNuuY0E1G48ej25mSt+00yPbQ4SRCVkIwb6ISvYtmJRPz9Zt5dk76blf+lJwAPH5KDF+vHAmACLoCdG2Adii6dOHnNJnTmZtoOGO8Q1jy1veMw6gbLFToQmfJa7nT7Al89mRbRkZZQxJTKgK5Kc9INzmTJFp0tpAPzNmyL/F08bX3nhCumM/cR/2RPn9emZ3VljokttZD1zVWXlUIqEU7SLk5I0lFRU0AcENXBYazNaVzsVHA/sD3o9hm42wbHIRb/BBQTKzAi8s3+bMtpOOZgLdQzCYPfX3UUxKd1WYVkGH7lh/RBBgMZZwXzU9+GYxdBqlGs0LP+DZ5g2BWNh6FAcR944B+K/JTWI3t9YyVyRhlP4CCoUk/mmF7+r2pilVBjxXBHFaBfBtr9hbVn2zDuI0kEOG3kBx8CGdPOjX1ph1POOZJUO1JEGG0jzUy2tK4X0CgVNYhmkqqQysRNtKuPdCJqK3WW57kaV17vXgiyPrl4KEEWgiGF1euI4QkSFHFf0TDroQiLNKJiLbdhH0YBhriRNCHPxSqJmNNoketaioohqMglh6wLtEGWSM1EZbQg72h0UJAIPVFCAJOThpQGGdKfFovcwEeiBuZHN2Ob4uVM7+gwZLz1D9E7ta4RmMZ24OBBAg7Eh6dLXGofZ4U2TFOCQMKjwhVckjrydRS+YaqCw1kYt6UexuzbNEDyYLTZnrY1PzsHZJT4U+awO2xlqTSYu6n/U29O2wPXgGOEKDMSq+zTUtyc8+6iLp0ivav4FKx+xxVy4FxhIF/pucVDqpsVe2jFOfdZhTzLz2QjtzvsTCvDPU7bzDH2eXVKUV9TZ+qFtaSSxnYgYdXKwVreIgvWhT9eGDB2OvnWyPLfIIIfNnfIxU8nW7MbcH05nhlsYtaW9EZRsxWcKdEqInq1DiZPKCz7iGmAU9/ccnnQud2pNgIGFYOTAWjhIrd63aPDgfj8/sdlD4l+UTlcxTI9jbaMqqN0gQxSHs60IAcW3cH4p3V1aSciTKB29L1tz2eUQhRiTgTvmqc+sGtBNh4ky0mQJGsdycBREP+fAaSs1EREDVo5gvgi5+aCN7NECw30owbCc1mSpjiahyNVwJd1jiGgzSwfTpzf2c5XJvG/g1n0fH88KHNnf+u7ZiRMlXueSIsloJBUtW9ezvsx9grfsX/FNxnbxU1Lvg0hLxixypHKGFAaPu0xCD8oDTeFSyfRT6s8109GMUZL8m2xXp8X2dpPCWWdX84iga4BrTlOfqox4shqEgh/Ht4qRst52cA1xOIUuOxgfUivp6v5f8IVyaryEdpVk72ERAwdT4aoY1usBgmP+0m06Q216H/nubtNYxHaOIYjcach3A8Ez/zc0KcShhel0HCYjFsA0FjYqyJ5ZUH1aZw3+zWC0hLpM6GDfcAdn9fq2orPmZbW6XXrf+Krc9RtvII5jeD3dFoT1KwZJwxfUMvc5KLfn8rROW23Jw89sJ2a5dpB3qWDUBWF2iX8OCuKprHosJ2mflBR+Wqs86VvgI/XMnsqb97+VlKdPVysczPj8Jhzf+WCvGBHijAqYlavbF60soMWlHbvKT+ScvhprgeTln51xX0sF+Eadc/l2s2a5BgkVbHYyz0E85p0LstqH+gEGiR84nBRRFIn8hLSZrGwqjZ3E29cuGi+5Z5bp7EM8MWFa9ssS/vy4VrDfECSv7DSU84DaP0sXI3Ap4lWznQ65nQoTKRWU30gd7Nn8ZowUvGIx4aqyXGwmA/PB4qN8msJUODezUHEl0VP9uo+cZ8vPFodSIB4C7lQYjEFj8yu49C2KIV3qxMFYTevG8KqAr0TPlkbzHHnTpDpvpzziAiNFh8xiT7C/TiyH0EguUw4vxAgpnE27WIypV+uFN2zW7xniF/n75trs9IJ5amB1zXXZ1LFkJ6GbS/dFokzl4cc2mamVwhL4XU0Av5gDWAl+aEWhAP7t2VIwU+EpvfOPDcLASX7H7lZpXA2XQfbSlD4qU18NffNPoAKMNSccBfO9YVVgmlW4RydBqfHAV7+hrZ84WJGho6bNT0YMhxxLdOx/dwGj0oyak9aAkNJ8lRJzUuA8sR+fPyiyTgUHio5+Pp+YaKlHrhR41jY5NESPS3x+zTMe0S2HnLOKCOQPpdxKyviBvdHrCDRqO+l96HhhNBLXWv4yEMuEUYo8kXnYJM8oIgVM4XJ+xXOev4YbWeqsvgq0lmw4/PiYr9sYLt+W5EAuYSFnJEan8CwJwbtASBfLBBpJZiRPor/aCJBZsM+MhvS7ZepyHvU8m5WSmaZnxuLts8ojl6KkS8oSAHkq5GWlCB/NgJ5W3rO2Cj1MK7ahxsCrbTT3a0V/QQH+sErxV4XUWDHx0kkFy25bPmBMBQ6BU3HoHhhYcJB9JhP6NXUWKxnE0raXHB6U9KHpWdQCQI72qevp5fMzcm+AvC85rsynVQhruDA9fp9COe7N56cg1UKGSas89vrN+WlGLYTwi5W+0xYdKEGtGCeNJwXKDU0XqU5uQYnWsMwTENLGtbQMvoGjIFIEMzCRal4rnBAg7D/CSn8MsCvS+FDJJAzoiioJEhZJgAp9n2+1Yznr7H+6eT4YkJ9Mpj60ImcW4i4iHDLn9RydB8dx3QYm3rsX6n4VRrZDsYK6DCGwkwd5n3/INFEpk16fYpP6JtMQpqEMzcOfQGAHXBTEGzuLJ03GYQL9bmV2/7ExDlRf+Uvf1sM2frRtCWmal12pMgtonvSCtR4n1CLUZRdTHDHP1Otwqd+rcdlavnKjUB/OYXQHUJzpNyFoKpQK+2OgrEKpGyIgIBgn2y9QHnTJihZOpEvOKIoHAMGAXHmj21Lym39Mbiow4IF+77xNuewziNVBxr6KD5e+9HzZSBIlUa/AmsDFJFXeyrQakR3FwowTGcADJHcEfhGkXYNGSYo4dh4bxwLM+28xjiqkdn0/3R4UEkvcBrBfn/SzBc1XhKM2VPlJgKSorjDac96V2UnQYXl1/yZPT4DVelgO+soMjexXwYO58VLl5xInQUZI8jc3H2CPnCNb9X05nOxIy4MlecasTqGK6s2az4RjpF2cQP2G28R+7wDPsZDZC/kWtjdoHC7SpdPmqQrUAhMwKVuxCmYTiD9q/O7GHtZvPSN0CAUQN/rymXZNniYLlJDE70bsk6Xxsh4kDOdxe7A2wo7P9F5YvqqRDI6brf79yPCSp4I0jVoO4YnLYtX5nzspR5WB4AKOYtR1ujXbOQpPyYDvfRE3FN5zw0i7reehdi7yV0YDRKRllGCGRk5Yz+Uv1fYl2ZwrnGsqsjgAVo0xEUba8ohjaNMJNwTwZA/wBDWFSCpg1eUH8MYL2zdioxRTqgGQrDZxQyNzyBJPXZF0+oxITJAbj7oNC5JwgDMUJaM5GqlGCWc//KCIrI+aclEe4IA0uzv7cuj6GCdaJONpi13O544vbtIHBF+A+JeDFUQNy61Gki3rtyQ4aUywn6ru314/dkGiP8Iwjo0J/2Txs49ZkwEl4mx+iYUUO55I6pJzU4P+7RRs+DXZkyKUYZqVWrPF4I94m4Wx1tXeE74o9GuX977yvJ/jkdak8+AmoHVjI15V+WwBdARFV2IPirJgVMdsg1Pez2VNHqa7EHWdTkl3XTcyjG9BiueWFvQfXI8aWSkuuRmqi/HUuzqyvLJfNfs0txMqldYYflWB1BS31WkuPJGGwXUCpjiQSktkuBMWwHjSkQxeehqw1Kgz0Trzm7QbtgxiEPDVmWCNCAeCfROTphd1ZNOhzLy6XfJyG6Xgd5MCAZw4xie0Sj5AnY1/akDgNS9YFl3Y06vd6FAsg2gVQJtzG7LVq1OH2frbXNHWH/NY89NNZ4QUSJqL2yEcGADbT38X0bGdukqYlSoliKOcsSTuqhcaemUeYLLoI8+MZor2RxXTRThF1LrHfqf/5LcLAjdl4EERgUysYS2geE+yFdasU91UgUDsc2cSQ1ZoT9+uLOwdgAmifwQqF028INc2IQEDfTmUw3eZxvz7Ud1z3xc1PQfeCvfKsB9jOhRj7rFyb9XcDWLcYj0bByosychMezMLVkFiYcdBBQtvI6K0KRuOZQH2kBsYHJaXTkup8F0eIhO1/GcIwWKpr2mouB7g5TUDJNvORXPXa/mU8bh27TAZYBe2sKx4NSv5OjnHIWD2RuysCzBlUfeNXhDd2jxnHoUlheJ3jBApzURy0fwm2FwwsSU0caQGl0Kv8hopRQE211NnvtLRsmCNrhhpEDoNiZEzD2QdJWKbRRWnaFedXHAELSN0t0bfsCsMf0ktfBoXBoNA+nZN9+pSlmuzspFevmsqqcMllzzvkyXrzoA+Ryo1ePXpdGOoJvhyru+EBRsmOp7MXZ0vNUMUqHLUoKglg1p73sWeZmPc+KAw0pE2zIsFFE5H4192KwDvDxdxEYoDBDNZjbg2bmADTeUKK57IPD4fTYF4c6EnXx/teYMORBDtIhPJneiZny7Nv/zG+YmekIKCoxr6kauE2bZtBLufetNG0BtBY7f+/ImUypMBvdWu/Q7vTMRzw5aQGZWuc1V0HEsItFYMIBnoKGZ0xcarba/TYZq50kCaflFysYjA4EDKHqGdpYWdKYmm+a7TADmW35yfnOYpZYrkpVEtiqF0EujI00aeplNs2k+qyFZNeE3CDPL9P6b4PQ/kataHkVpLSEVGK7EX6rAa7IVNrvZtFvOA6okKvBgMtFDAGZOx88MeBcJ8AR3AgUUeIznAN6tjCUipGDZONm1FjWJp4A3QIzSaIOmZ7DvF/ysYYbM/fFDOV0jntAjRdapxJxL0eThpEhKOjCDDq2ks+3GrwxqIFKLe1WdOzII8XIOPGnwy6LKXVfpSDOTEfaRsGujhpS4hBIsMOqHbl16PJxc4EkaVu9wpEYlF/84NSv5Zum4drMfp9yXbzzAOJqqS4YkI4cBrFrC7bMPiCfgI3nNZAqkk3QOZqR+yyqx+nDQKBBBZ7QKrfGMCL+XpqFaBJU0wpkBdAhbR4hJsmT5aynlvkouoxm/NjD5oe6BzVIO9uktM+/5dEC5P7vZvarmuO/lKXz4sBabVPIATuKTrwbJP8XUkdM6uEctHKXICUJGjaZIWRbZp8czquQYfY6ynBUCfIU+gG6wqSIBmYIm9pZpXdaL121V7q0VjDjmQnXvMe7ysoEZnZL15B0SpxS1jjd83uNIOKZwu5MPzg2NhOx3xMOPYwEn2CUzbSrwAs5OAtrz3GAaUkJOU74XwjaYUmGJdZBS1NJVkGYrToINLKDjxcuIlyfVsKQSG/G4DyiO2SlQvJ0d0Ot1uOG5IFSAkq+PRVMgVMDvOIJMdqjeCFKUGRWBW9wigYvcbU7CQL/7meF2KZAaWl+4y9uhowAX7elogAvItAAxo2+SFxGRsHGEW9BnhlTuWigYxRcnVUBRQHV41LV+Fr5CJYV7sHfeywswx4XMtUx6EkBhR+q8AXXUA8uPJ73Pb49i9KG9fOljvXeyFj9ixgbo6CcbAJ7WHWqKHy/h+YjBwp6VcN7M89FGzQ04qbrQtgrOFybg3gQRTYG5xn73ArkfQWjCJROwy3J38Dx/D7jOa6BBNsitEw1wGq780EEioOeD+ZGp2J66ADiVGMayiHYucMk8nTK2zzT9CnEraAk95kQjy4k0GRElLL5YAKLQErJ5rp1eay9O4Fb6yJGm9U4FaMwPGxtKD6odIIHKoWnhKo1U8KIpFC+MVn59ZXmc7ZTBZfsg6FQ8W10YfTr4u0nYrpHZbZ1jXiLmooF0cOm0+mPnJBXQtepc7n0BqOipNCqI6yyloTeRShNKH04FIo0gcMk0H/xThyN4pPAWjDDkEp3lNNPRNVfpMI44CWRlRgViP64eK0JSRp0WUvCWYumlW/c58Vcz/yMwVcW5oYb9+26TEhwvbxiNg48hl1VI1UXTU//Eta+BMKnGUivctfL5wINDD0giQL1ipt6U7C9cd4+lgqY2lMUZ02Uv6Prs+ZEZer7ZfWBXVghlfOOrClwsoOFKzWEfz6RZu1eCs+K8fLvkts5+BX0gyrFYve0C3qHrn5U/Oh6D/CihmWIrY7HUZRhJaxde+tldu6adYJ+LeXupQw0XExC36RETdNFxcq9glMu4cNQSX9cqR/GQYp+IxUkIcNGWVU7ZtGa6P3XAyodRt0XeS3Tp01AnCh0ZbUh4VrSZeV9RWfSoWyxnY3hzcZ30G/InDq4wxRrEejreBxnhIQbkxenxkaxl+k7eLUQkUR6vKJ2iDFNGX3WmVA1yaOH+mvhBd+sE6vacQzFobwY5BqEAFmejwW5ne7HtVNolOUgJc8CsUxmc/LBi8N5mu9VsIA5HyErnS6zeCz7VLI9+n/hbT6hTokMXTVyXJRKSG2hd2labXTbtmK4fNH3IZBPreSA4FMeVouVN3zG5x9CiGpLw/3pceo4qGqp+rVp+z+7yQ98oEf+nyH4F3+J9IheDBa94Wi63zJbLBCIZm7P0asHGpIJt3PzE3m0S4YIWyXBCVXGikj8MudDPB/6Nm2v4IxJ5gU0ii0guy5SUHqGUYzTP0jIJU5E82RHUXtX4lDdrihBLdP1YaG1AGUC12rQKuIaGvCpMjZC9bWSCYnjDlvpWbkdXMTNeBHLKiuoozMGIvkczmP0aRJSJ8PYnLCVNhKHXBNckH79e8Z8Kc2wUej4sQZoH8qDRGkg86maW/ZQWGNnLcXmq3FlXM6ssR/3P6E/bHMvm6HLrv1yRixit25JsH3/IOr2UV4BWJhxXW5BJ6Xdr07n9kF3ZNAk6/Xpc5MSFmYJ2R7bdL8Kk7q1OU9Elg/tCxJ8giT27wSTySF0GOxg4PbYJdi/Nyia9Nn89CGDulfJemm1aiEr/eleGSN+5MRrVJ4K6lgyTTIW3i9cQ0dAi6FHt0YMbH3wDSAtGLSAccezzxHitt1QdhW36CQgPcA8vIIBh3/JNjf/Obmc2yzpk8edSlS4lVdwgW5vzbYEyFoF4GCBBby1keVNueHAH+evi+H7oOVfS3XuPQSNTXOONAbzJeSb5stwdQHl1ZjrGoE49I8+A9j3t+ahhQj74FCSWpZrj7wRSFJJnnwi1T9HL5qrCFW/JZq6P62XkMWTb+u4lGpKfmmwiJWx178GOG7KbrZGqyWwmuyKWPkNswkZ1q8uptUlviIi+AXh2bOOTOLsrtNkfqbQJeh24reebkINLkjut5r4d9GR/r8CBa9SU0UQhsnZp5cP+RqWCixRm7i4YRFbtZ4EAkhtNa6jHb6gPYQv7MKqkPLRmX3dFsK8XsRLVZ6IEVrCbmNDc8o5mqsogjAQfoC9Bc7R6gfw03m+lQpv6kTfhxscDIX6s0w+fBxtkhjXAXr10UouWCx3C/p/FYwJRS/AXRKkjOb5CLmK4XRe0+xeDDwVkJPZau52bzLEDHCqV0f44pPgKOkYKgTZJ33fmk3Tu8SdxJ02SHM8Fem5SMsWqRyi2F1ynfRJszcFKykdWlNqgDA/L9lKYBmc7Zu/q9ii1FPF47VJkqhirUob53zoiJtVVRVwMR34gV9iqcBaHbRu9kkvqk3yMpfRFG49pKKjIiq7h/VpRwPGTHoY4cg05X5028iHsLvUW/uz+kjPyIEhhcKUwCkJAwbR9pIEGOn8z6svAO8i89sJ3dL5qDWFYbS+HGPRMxYwJItFQN86YESeJQhn2urGiLRffQeLptDl8dAgb+Tp47UQPxWOw17OeChLN1WnzlkPL1T5O+O3Menpn4C3IY5LEepHpnPeZHbvuWfeVtPlkH4LZjPbBrkJT3NoRJzBt86CO0Xq59oQ+8dsm0ymRcmQyn8w71mhmcuEI5byuF+C88VPYly2sEzjlzAQ3vdn/1+Hzguw6qFNNbqenhZGbdiG6RwZaTG7jTA2X9RdXjDN9yj1uQpyO4Lx8KRAcZcbZMafp4wPOd5MdXoFY52V1A8M9hi3sso93+uprE0qYNMjkE22CvK4HuUxqN7oIz5pWuETq1lQAjqlSlqdD2Rnr/ggp/TVkQYjn9lMfYelk2sH5HPdopYo7MHwlV1or9Bxf+QCyLzm92vzG2wjiIjC/ZHEJzeroJl6bdFPTpZho5MV2U86fLQqxNlGIMqCGy+9WYhJ8ob1r0+Whxde9L2PdysETv97O+xVw+VNN1TZSQN5I6l9m5Ip6pLIqLm4a1B1ffH6gHyqT9p82NOjntRWGIofO3bJz5GhkvSWbsXueTAMaJDou99kGLqDlhwBZNEQ4mKPuDvVwSK4WmLluHyhA97pZiVe8g+JxmnJF8IkV/tCs4Jq/HgOoAEGR9tCDsDbDmi3OviUQpG5D8XmKcSAUaFLRXb2lmJTNYdhtYyfjBYZQmN5qT5CNuaD3BVnlkCk7bsMW3AtXkNMMTuW4HjUERSJnVQ0vsBGa1wo3Qh7115XGeTF3NTz8w0440AgU7c3bSXO/KMINaIWXd0oLpoq/0/QJxCQSJ9XnYy1W7TYLBJpHsVWD1ahsA7FjNvRd6mxCiHsm8g6Z0pnzqIpF1dHUtP2ITU5Z1hZHbu+L3BEEStBbL9XYvGfEakv1bmf+bOZGnoiuHEdlBnaChxYKNzB23b8sw8YyT7Ajxfk49eJIAvdbVkdFCe2J0gMefhQ0bIZxhx3fzMIysQNiN8PgOUKxOMur10LduigREDRMZyP4oGWrP1GFY4t6groASsZ421os48wAdnrbovNhLt7ScNULkwZ5AIZJTrbaKYTLjA1oJ3sIuN/aYocm/9uoQHEIlacF1s/TM1fLcPTL38O9fOsjMEIwoPKfvt7opuI9G2Hf/PR4aCLDQ7wNmIdEuXJ/QNL72k5q4NejAldPfe3UVVqzkys8YZ/jYOGOp6c+YzRCrCuq0M11y7TiN6qk7YXRMn/gukxrEimbMQjr3jwRM6dKVZ4RUfWQr8noPXLJq6yh5R3EH1IVOHESst/LItbG2D2vRsZRkAObzvQAAD3mb3/G4NzopI0FAiHfbpq0X72adg6SRj+8OHMShtFxxLZlf/nLgRLbClwl5WmaYSs+yEjkq48tY7Z2bE0N91mJwt+ua0NlRJIDh0HikF4UvSVorFj2YVu9YeS5tfvlVjPSoNu/Zu6dEUfBOT555hahBdN3Sa5Xuj2Rvau1lQNIaC944y0RWj9UiNDskAK1WoL+EfXcC6IbBXFRyVfX/WKXxPAwUyIAGW8ggZ08hcijKTt1YKnUO6QPvcrmDVAb0FCLIXn5id4fD/Jx4tw/gbXs7WF9b2RgXtPhLBG9vF5FEkdHAKrQHZAJC/HWvk7nvzzDzIXZlfFTJoC3JpGgLPBY7SQTjGlUvG577yNutZ1hTfs9/1nkSXK9zzKLRZ3VODeKUovJe0WCq1zVMYxCJMenmNzPIU2S8TA4E7wWmbNkxq9rI2dd6v0VpcAPVMxnDsvWTWFayyqvKZO7Z08a62i/oH2/jxf8rpmfO64in3FLiL1GX8IGtVE9M23yGsIqJbxDTy+LtaMWDaPqkymb5VrQdzOvqldeU0SUi6IirG8UZ3jcpRbwHa1C0Dww9G/SFX3gPvTJQE+kyz+g1BeMILKKO+olcHzctOWgzxYHnOD7dpCRtuZEXACjgqesZMasoPgnuDC4nUviAAxDc5pngjoAITIkvhKwg5d608pdrZcA+qn5TMT6Uo/QzBaOxBCLTJX3Mgk85rMfsnWx86oLxf7p2PX5ONqieTa/qM3tPw4ZXvlAp83NSD8F7+ZgctK1TpoYwtiU2h02HCGioH5tkVCqNVTMH5p00sRy2JU1qyDBP2CII/Dg4WDsIl+zgeX7589srx6YORRQMBfKbodbB743Tl4WLKOEnwWUVBsm94SOlCracU72MSyj068wdpYjyz1FwC2bjQnxnB6Mp/pZ+yyZXtguEaYB+kqhjQ6UUmwSFazOb+rhYjLaoiM+aN9/8KKn0zaCTFpN9eKwWy7/u4EHzO46TdFSNjMfn2iPSJwDPCFHc0I1+vjdAZw5ZjqR/uzi9Zn20oAa5JnLEk/EA3VRWE7J/XrupfFJPtCUuqHPpnlL7ISJtRpSVcB8qsZCm2QEkWoROtCKKxUh3yEcMbWYJwk6DlEBG0bZP6eg06FL3v6RPb7odGuwm7FN8fG4woqtB8e7M5klPpo97GoObNwt+ludTAmxyC5hmcFx+dIvEZKI6igFKHqLH01iY1o7903VzG9QGetyVx5RNmBYUU+zIuSva/yIcECUi4pRmE3VkF2avqulQEUY4yZ/wmNboBzPmAPey3+dSYtBZUjeWWT0pPwCz4Vozxp9xeClIU60qvEFMQCaPvPaA70WlOP9f/ey39macvpGCVa+zfa8gO44wbxpJUlC8GN/pRMTQtzY8Z8/hiNrU+Zq64ZfFGIkdj7m7abcK1EBtws1X4J/hnqvasPvvDSDYWN+QcQVGMqXalkDtTad5rYY0TIR1Eqox3czwPMjKPvF5sFv17Thujr1IZ1Ytl4VX1J0vjXKmLY4lmXipRAro0qVGEcXxEVMMEl54jQMd4J7RjgomU0j1ptjyxY+cLiSyXPfiEcIS2lWDK3ISAy6UZ3Hb5vnPncA94411jcy75ay6B6DSTzK6UTCZR9uDANtPBrvIDgjsfarMiwoax2OlLxaSoYn4iRgkpEGqEkwox5tyI8aKkLlfZ12lO11TxsqRMY89j5JaO55XfPJPDL1LGSnC88Re9Ai+Nu5bZjtwRrvFITUFHPR4ZmxGslQMecgbZO7nHk32qHxYkdvWpup07ojcMCaVrpFAyFZJJbNvBpZfdf39Hdo2kPtT7v0/f8R/B5Nz4f1t9/3zNM/7n6SUHfcWk5dfQFJvcJMgPolGCpOFb/WC0FGWU2asuQyT+rm88ZKZ78Cei/CAh939CH0JYbpZIPtxc2ufXqjS3pHH9lnWK4iJ7OjR/EESpCo2R3MYKyE7rHfhTvWho4cL1QdN4jFTyR6syMwFm124TVDDRXMNveI1Dp/ntwdz8k8kxw7iFSx6+Yx6O+1LzMVrN0BBzziZi9kneZSzgollBnVwBh6oSOPHXrglrOj+QmR/AESrhDpKrWT+8/AiMDxS/5wwRNuGQPLlJ9ovomhJWn8sMLVItQ8N/7IXvtD8kdOoHaw+vBSbFImQsv/OCAIui99E+YSIOMlMvBXkAt+NAZK8wB9Jf8CPtB+TOUOR+z71d/AFXpPBT6+A5FLjxMjLIEoJzrQfquvxEIi+WoUzGR1IzQFNvbYOnxb2PyQ0kGdyXKzW2axQL8lNAXPk6NEjqrRD1oZtKLlFoofrXw0dCNWASHzy+7PSzOUJ3XtaPZsxLDjr+o41fKuKWNmjiZtfkOzItvlV2MDGSheGF0ma04qE3TUEfqJMrXFm7DpK+27DSvCUVf7rbNoljPhha5W7KBqVq0ShUSTbRmuqPtQreVWH4JET5yMhuqMoSd4r/N8sDmeQiQQvi1tcZv7Moc7dT5X5AtCD6kNEGZOzVcNYlpX4AbTsLgSYYliiPyVoniuYYySxsBy5cgb3pD+EK0Gpb0wJg031dPgaL8JZt6sIvzNPEHfVPOjXmaXj4bd4voXzpZ5GApMhILgMbCEWZ2zwgdeQgjNHLbPIt+KqxRwWPLTN6HwZ0Ouijj4UF+Sg0Au8XuIKW0WxlexdrFrDcZJ8Shauat3X0XmHygqgL1nAu2hrJFb4wZXkcS+i36KMyU1yFvYv23bQUJi/3yQpqr/naUOoiEWOxckyq/gq43dFou1DVDaYMZK9tho7+IXXokBCs5GRfOcBK7g3A+jXQ39K4YA8PBRW4m5+yR0ZAxWJncjRVbITvIAPHYRt1EJ3YLiUbqIvoKHtzHKtUy1ddRUQ0AUO41vonZDUOW+mrszw+SW/6Q/IUgNpcXFjkM7F4CSSQ2ExZg85otsMs7kqsQD4OxYeBNDcSpifjMoLb7GEbGWTwasVObmB/bfPcUlq0wYhXCYEDWRW02TP5bBrYsKTGWjnWDDJ1F7zWai0zW/2XsCuvBQjPFcTYaQX3tSXRSm8hsAoDdjArK/OFp6vcWYOE7lizP0Yc+8p16i7/NiXIiiQTp7c7Xus925VEtlKAjUdFhyaiLT7VxDagprMFwix4wZ05u0qj7cDWFd0W9OYHIu3JbJKMXRJ1aYNovugg+QqRN7fNHSi26VSgBpn+JfMuPo3aeqPWik/wI5Rz3BWarPQX4i5+dM0npwVOsX+KsOhC7vDg+OJsz4Q5zlnIeflUWL6QYMbf9WDfLmosLF4Qev3mJiOuHjoor/dMeBpA9iKDkMjYBNbRo414HCxjsHrB4EXNbHzNMDHCLuNBG6Sf+J4MZ/ElVsDSLxjIiGsTPhw8BPjxbfQtskj+dyNMKOOcUYIRBEIqbazz3lmjlRQhplxq673VklMMY6597vu+d89ec/zq7Mi4gQvh87ehYbpOuZEXj5g/Q7S7BFDAAB9DzG35SC853xtWVcnZQoH54jeOqYLR9NDuwxsVthTV7V99n/B7HSbAytbEyVTz/5NhJ8gGIjG0E5j3griULUd5Rg7tQR+90hJgNQKQH2btbSfPcaTOfIexc1db1BxUOhM1vWCpLaYuKr3FdNTt/T3PWCpEUWDKEtzYrjpzlL/wri3MITKsFvtF8QVV/NhVo97aKIBgdliNc10dWdXVDpVtsNn+2UIolrgqdWA4EY8so0YvB4a+aLzMXiMAuOHQrXY0tr+CL10JbvZzgjJJuB1cRkdT7DUqTvnswVUp5kkUSFVtIIFYK05+tQxT6992HHNWVhWxUsD1PkceIrlXuUVRogwmfdhyrf6zzaL8+c0L7GXMZOteAhAVQVwdJh+7nrX7x4LaIIfz2F2v7Dg/uDfz2Fa+4gFm2zHAor8UqimJG3VTJtZEoFXhnDYXvxMJFc6ku2bhbCxzij2z5UNuK0jmp1mnvkVNUfR+SEmj1Lr94Lym75PO7Fs0MIr3GdsWXRXSfgLTVY0FLqba97u1In8NAcY7IC6TjWLigwKEIm43NxTdaVTv9mcKkzuzBkKd8x/xt1p/9BbP7Wyb4bpo1K1gnOpbLvKz58pWl3B55RJ/Z5mRDLPtNQg14jdOEs9+h/V5UVpwrAI8kGbX8KPVPDIMfIqKDjJD9UyDOPhjZ3vFAyecwyq4akUE9mDOtJEK1hpDyi6Ae87sWAClXGTiwPwN7PXWwjxaR79ArHRIPeYKTunVW24sPr/3HPz2IwH8oKH4OlWEmt4BLM6W5g4kMcYbLwj2usodD1088stZA7VOsUSpEVl4w7NMb1EUHMRxAxLF0CIV+0L3iZb+ekB1vSDSFjAZ3hfLJf7gFaXrOKn+mhR+rWw/eTXIcAgl4HvFuBg1LOmOAwJH3eoVEjjwheKA4icbrQCmvAtpQ0mXG0agYp5mj4Rb6mdQ+RV4QBPbxMqh9C7o8nP0Wko2ocnCHeRGhN1XVyT2b9ACsL+6ylUy+yC3QEnaKRIJK91YtaoSrcWZMMwxuM0E9J68Z+YyjA0g8p1PfHAAIROy6Sa04VXOuT6A351FOWhKfTGsFJ3RTJGWYPoLk5FVK4OaYR9hkJvezwF9vQN1126r6isMGXWTqFW+3HL3I/jurlIdDWIVvYY+s6yq7lrFSPAGRdnU7PVwY/SvWbZGpXzy3BQ2LmAJlrONUsZs4oGkly0V267xbD5KMY8woNNsmWG1VVgLCra8aQBBcI4DP2BlNwxhiCtHlaz6OWFoCW0vMR3ErrG7JyMjTSCnvRcsEHgmPnwA6iNpJ2DrFb4gLlhKJyZGaWkA97H6FFdwEcLT6DRQQL++fOkVC4cYGW1TG/3iK5dShRSuiBulmihqgjR45Vi03o2RbQbP3sxt90VxQ6vzdlGfkXmmKmjOi080JSHkLntjvsBJnv7gKscOaTOkEaRQqAnCA4HWtB4XnMtOhpRmH2FH8tTXrIjAGNWEmudQLCkcVlGTQ965Kh0H6ixXbgImQP6b42B49sO5C8pc7iRlgyvSYvcnH9FgQ3azLbQG2cUW96SDojTQStxkOJyOuDGTHAnnWkz29aEwN9FT8EJ4yhXOg+jLTrCPKeEoJ9a7lDXOjEr8AgX4BmnMQ668oW0zYPyQiVMPxKRHtpfnEEyaKhdzNVThlxxDQNdrHeZiUFb6NoY2KwvSb7BnRcpJy+/g/zAYx3fYSN5QEaVD2Y1VsNWxB0BSO12MRsRY8JLfAezRMz5lURuLUnG1ToKk6Q30FughqWN6gBNcFxP/nY/iv+iaUQOa+2Nuym46wtI/DvSfzSp1jEi4SdYBE7YhTiVV5cX9gwboVDMVgZp5YBQlHOQvaDNfcCoCJuYhf5kz5kwiIKPjzgpcRJHPbOhJajeoeRL53cuMahhV8Z7IRr6M4hW0JzT7mzaMUzQpm866zwM7Cs07fJYXuWvjAMkbe5O6V4bu71sOG6JQ4oL8zIeXHheFVavzxmlIyBkgc9IZlEDplMPr8xlcyss4pVUdwK1e7CK2kTsSdq7g5SHRAl3pYUB9Ko4fsh4qleOyJv1z3KFSTSvwEcRO/Ew8ozEDYZSqpfoVW9uhJfYrNAXR0Z3VmeoAD+rVWtwP/13sE/3ICX3HhDG3CMc476dEEC0K3umSAD4j+ZQLVdFOsWL2C1TH5+4KiSWH+lMibo+B55hR3Gq40G1n25sGcN0mEcoU2wN9FCVyQLBhYOu9aHVLWjEKx2JIUZi5ySoHUAI9b8hGzaLMxCZDMLhv8MkcpTqEwz9KFDpCpqQhVmsGQN8m24wyB82FAKNmjgfKRsXRmsSESovAwXjBIoMKSG51p6Um8b3i7GISs7kjTq/PZoioCfJzfKdJTN0Q45kQEQuh9H88M3yEs3DbtRTKALraM0YC8laiMiOOe6ADmTcCiREeAWZelBaEXRaSuj2lx0xHaRYqF65O0Lo5OCFU18A8cMDE4MLYm9w2QSr9NgQAIcRxZsNpA7UJR0e71JL+VU+ISWFk5I97lra8uGg7GlQYhGd4Gc6rxsLFRiIeGO4abP4S4ekQ1fiqDCy87GZHd52fn5aaDGuvOmIofrzpVwMvtbreZ/855OaXTRcNiNE0wzGZSxbjg26v8ko8L537v/XCCWP2MFaArJpvnkep0pA+O86MWjRAZPQRfznZiSIaTppy6m3p6HrNSsY7fDtz7Cl4V/DJAjQDoyiL2uwf1UHVd2AIrzBUSlJaTj4k6NL97a/GqhWKU9RUmjnYKpm2r+JYUcrkCuZKvcYvrg8pDoUKQywY9GDWg03DUFSirlUXBS5SWn/KAntnf0IdHGL/7mwXqDG+LZYjbEdQmqUqq4y54TNmWUP7IgcAw5816YBzwiNIJiE9M4lPCzeI/FGBeYy3p6IAmH4AjXXmvQ4Iy0Y82NTobcAggT2Cdqz6Mx4TdGoq9fn2etrWKUNFyatAHydQTVUQ2S5OWVUlugcNvoUrlA8cJJz9MqOa/W3iVno4zDHfE7zhoY5f5lRTVZDhrQbR8LS4eRLz8iPMyBL6o4PiLlp89FjdokQLaSBmKHUwWp0na5fE3v9zny2YcDXG/jfI9sctulHRbdkI5a4GOPJx4oAJQzVZ/yYAado8KNZUdEFs9ZPiBsausotXMNebEgr0dyopuqfScFJ3ODNPHgclACPdccwv0YJGQdsN2lhoV4HVGBxcEUeUX/alr4nqpcc1CCR3vR7g40zteQg/JvWmFlUE4mAiTpHlYGrB7w+U2KdSwQz2QJKBe/5eiixWipmfP15AFWrK8Sh1GBBYLgzki1wTMhGQmagXqJ2+FuqJ8f0XzXCVJFHQdMAw8xco11HhM347alrAu+wmX3pDFABOvkC+WPX0Uhg1Z5MVHKNROxaR84YV3s12UcM+70cJ460SzEaKLyh472vOMD3XnaK7zxZcXlWqenEvcjmgGNR2OKbI1s8U+iwiW+HotHalp3e1MGDy6BMVIvajnAzkFHbeVsgjmJUkrP9OAwnEHYXVBqYx3q7LvXjoVR0mY8h+ZaOnh053pdsGkmbqhyryN01eVHySr+CkDYkSMeZ1xjPNVM+gVLTDKu2VGsMUJqWO4TwPDP0VOg2/8ITbAUaMGb4LjL7L+Pi11lEVMXTYIlAZ/QHmTENjyx3kDkBdfcvvQt6tKk6jYFM4EG5UXDTaF5+1ZjRz6W7MdJPC+wTkbDUim4p5QQH3b9kGk2Bkilyeur8Bc20wm5uJSBO95GfYDI1EZipoRaH7uVveneqz43tlTZGRQ4a7CNmMHgXyOQQOL6WQkgMUTQDT8vh21aSdz7ERiZT1jK9F+v6wgFvuEmGngSvIUR2CJkc5tx1QygfZnAruONobB1idCLB1FCfO7N1ZdRocT8/Wye+EnDiO9pzqIpnLDl4bkaRKW+ekBVwHn46Shw1X0tclt/0ROijuUB4kIInrVJU4buWf4YITJtjOJ6iKdr1u+flgQeFH70GxKjhdgt/MrwfB4K/sXczQ+9zYcrD4dhY6qZhZ010rrxggWA8JaZyg2pYij8ieYEg1aZJkZK9O1Re7sB0iouf60rK0Gd+AYlp7soqCBCDGwfKeUQhCBn0E0o0GS6PdmjLi0TtCYZeqazqwN+yNINIA8Lk3iPDnWUiIPLGNcHmZDxfeK0iAdxm/T7LnN+gemRL61hHIc0NCAZaiYJR+OHnLWSe8sLrK905B5eEJHNlWq4RmEXIaFTmo49f8w61+NwfEUyuJAwVqZCLFcyHBKAcIVj3sNzfEOXzVKIndxHw+AR93owhbCxUZf6Gs8cz6/1VdrFEPrv330+9s6BtMVPJ3zl/Uf9rUi0Z/opexfdL3ykF76e999GPfVv8fJv/Y/+/5hEMon1tqNFyVRevV9y9/uIvsG3dbB8GRRrgaEXfhx+2xeOFt+cEn3RZanNxdEe2+B6MHpNbrRE53PlDifPvFcp4kO78ILR0T4xyW/WGPyBsqGdoA7zJJCu1TKbGfhnqgnRbxbB2B3UZoeQ2bz2sTVnUwokTcTU21RxN1PYPS3Sar7T0eRIsyCNowr9amwoMU/od9s2APtiKNL6ENOlyKADstAEWKA+sdKDhrJ6BOhRJmZ+QJbAaZ3/5Fq0/lumCgEzGEbu3yi0Y4I4EgVAjqxh4HbuQn0GrRhOWyAfsglQJAVL1y/6yezS2k8RE2MstJLh92NOB3GCYgFXznF4d25qiP4ZCyI4RYGesut6FXK6GwPpKK8WHEkhYui0AyEmr5Ml3uBFtPFdnioI8RiCooa7Z1G1WuyIi3nSNglutc+xY8BkeW3JJXPK6jd2VIMpaSxpVtFq+R+ySK9J6WG5Qvt+C+QH1hyYUOVK7857nFmyDBYgZ/o+AnibzNVqyYCJQvyDXDTK+iXdkA71bY7TL3bvuLxLBQ8kbTvTEY9aqkQ3+MiLWbEgjLzOH+lXgco1ERgzd80rDCymlpaRQbOYnKG/ODoFl46lzT0cjM5FYVvv0qLUbD5lyJtMUaC1pFlTkNONx6lliaX9o0i/1vws5bNKn5OuENQEKmLlcP4o2ZmJjD4zzd3Fk32uQ4uRWkPSUqb4LBe3EXHdORNB2BWsws5daRnMfNVX7isPSb1hMQdAJi1/qmDMfRUlCU74pmnzjbXfL8PVG8NsW6IQM2Ne23iCPIpryJjYbVnm5hCvKpMa7HLViNiNc+xTfDIaKm3jctViD8A1M9YPJNk003VVr4Zo2MuGW8vil8SLaGpPXqG7I4DLdtl8a4Rbx1Lt4w5Huqaa1XzZBtj208EJVGcmKYEuaeN27zT9EE6a09JerXdEbpaNgNqYJdhP1NdqiPKsbDRUi86XvvNC7rME5mrSQtrzAZVndtSjCMqd8BmaeGR4l4YFULGRBeXIV9Y4yxLFdyoUNpiy2IhePSWzBofYPP0eIa2q5JP4j9G8at/AqoSsLAUuRXtvgsqX/zYwsE+of6oSDbUOo4RMJw+DOUTJq+hnqwKim9Yy/napyZNTc2rCq6V9jHtJbxGPDwlzWj/Sk3zF/BHOlT/fSjSq7FqlPI1q6J+ru8Aku008SFINXZfOfnZNOvGPMtEmn2gLPt+H4QLA+/SYe4j398auzhKIp2Pok3mPC5q1IN1HgR+mnEfc4NeeHYwd2/kpszR3cBn7ni9NbIqhtSWFW8xbUJuUPVOeeXu3j0IGZmFNiwaNZ6rH4/zQ2ODz6tFxRLsUYZu1bfd1uIvfQDt4YD/efKYv8VF8bHGDgK22w2Wqwpi43vNCOXFJZCGMqWiPbL8mil6tsmOTXAWCyMCw73e2rADZj2IK6rqksM3EXF2cbLb4vjB14wa/yXK5vwU+05MzERJ5nXsXsW21o7M+gO0js2OyKciP5uF2iXyb2DiptwQeHeqygkrNsqVCSlldxBMpwHi1vfc8RKpP/4L3Lmpq6DZcvhDDfxTCE3splacTcOtXdK2g303dIWBVe2wD/Gvja1cClFQ67gw0t1ZUttsUgQ1Veky8oOpS6ksYEc4bqseCbZy766SvL3FodmnahlWJRgVCNjPxhL/fk2wyvlKhITH/VQCipOI0dNcRa5B1M5HmOBjTLeZQJy237e2mobwmDyJNHePhdDmiknvLKaDbShL+Is1XTCJuLQd2wmdJL7+mKvs294whXQD+vtd88KKk0DXP8B1Xu9J+xo69VOuFgexgTrcvI6SyltuLix9OPuE6/iRJYoBMEXxU4shQMf4Fjqwf1PtnJ/wWSZd29rhZjRmTGgiGTAUQqRz+nCdjeMfYhsBD5Lv60KILWEvNEHfmsDs2L0A252351eUoYxAysVaCJVLdH9QFWAmqJDCODUcdoo12+gd6bW2boY0pBVHWL6LQDK5bYWh1V8vFvi0cRpfwv7cJiMX3AZNJuTddHehTIdU0YQ/sQ1dLoF2xQPcCuHKiuCWOY30DHe1OwcClLAhqAKyqlnIbH/8u9ScJpcS4kgp6HKDUdiOgRaRGSiUCRBjzI5gSksMZKqy7Sd51aeg0tgJ+x0TH9YH2Mgsap9N7ENZdEB0bey2DMTrBA1hn56SErNHf3tKtqyL9b6yXEP97/rc+jgD2N1LNUH6RM9AzP3kSipr06RkKOolR7HO768jjWiH1X92jA7dkg7gcNcjqsZCgfqWw0tPXdLg20cF6vnQypg7gLtkazrHAodyYfENPQZsdfnjMZiNu4nJO97D1/sQE+3vNFzrSDOKw+keLECYf7RJwVHeP/j79833oZ0egonYB2FlFE5qj02B/LVOMJQlsB8uNg3Leg4qtZwntsOSNidR0abbZmAK4sCzvt8Yiuz2yrNCJoH5O8XvX/vLeR/BBYTWj0sOPYM/jyxRd5+/JziKAABaPcw/34UA3aj/gLZxZgRCWN6m4m3demanNgsx0P237/Q+Ew5VYnJPkyCY0cIVHoFn2Ay/e7U4P19APbPFXEHX94N6KhEMPG7iwB3+I+O1jd5n6VSgHegxgaSawO6iQCYFgDsPSMsNOcUj4q3sF6KzGaH/0u5PQoAj/8zq6Uc9MoNrGqhYeb2jQo0WlGlXjxtanZLS24/OIN5Gx/2g684BPDQpwlqnkFcxpmP/osnOXrFuu4PqifouQH0eF5qCkvITQbJw/Zvy5mAHWC9oU+cTiYhJmSfKsCyt1cGVxisKu+NymEQIAyaCgud/V09qT3nk/9s/SWsYtha7yNpzBIMM40rCSGaJ9u6lEkl00vXBiEt7p9P5IBCiavynEOv7FgLqPdeqxRiCwuFVMolSIUBcoyfUC2e2FJSAUgYdVGFf0b0Kn2EZlK97yyxrT2MVgvtRikfdaAW8RwEEfN+B7/eK8bBdp7URpbqn1xcrC6d2UjdsKbzCjBFqkKkoZt7Mrhg6YagE7spkqj0jOrWM+UGQ0MUlG2evP1uE1p2xSv4dMK0dna6ENcNUF+xkaJ7B764NdxLCpuvhblltVRAf7vK5qPttJ/9RYFUUSGcLdibnz6mf7WkPO3MkUUhR2mAOuGv8IWw5XG1ZvoVMnjSAZe6T7WYA99GENxoHkMiKxHlCuK5Gd0INrISImHQrQmv6F4mqU/TTQ8nHMDzCRivKySQ8dqkpQgnUMnwIkaAuc6/FGq1hw3b2Sba398BhUwUZSAIO8XZvnuLdY2n6hOXws+gq9BHUKcKFA6kz6FDnpxLPICa3qGhnc97bo1FT/XJk48LrkHJ2CAtBv0RtN97N21plfpXHvZ8gMJb7Zc4cfI6MbPwsW7AilCSXMFIEUEmir8XLEklA0ztYbGpTTGqttp5hpFTTIqUyaAIqvMT9A/x+Ji5ejA4Bhxb/cl1pUdOD6epd3yilIdO6j297xInoiBPuEDW2/UfslDyhGkQs7Wy253bVnlT+SWg89zYIK/9KXFl5fe+jow2rd5FXv8zDPrmfMXiUPt9QBO/iK4QGbX5j/7Rx1c1vzsY8ONbP3lVIaPrhL4+1QrECTN3nyKavGG0gBBtHvTKhGoBHgMXHStFowN+HKrPriYu+OZ05Frn8okQrPaaxoKP1ULCS/cmKFN3gcH7HQlVjraCeQmtjg1pSQxeuqXiSKgLpxc/1OiZsU4+n4lz4hpahGyWBURLi4642n1gn9qz9bIsaCeEPJ0uJmenMWp2tJmIwLQ6VSgDYErOeBCfSj9P4G/vI7oIF+l/n5fp956QgxGvur77ynawAu3G9MdFbJbu49NZnWnnFcQHjxRuhUYvg1U/e84N4JTecciDAKb/KYIFXzloyuE1eYXf54MmhjTq7B/yBToDzzpx3tJCTo3HCmVPYfmtBRe3mPYEE/6RlTIxbf4fSOcaKFGk4gbaUWe44hVk9SZzhW80yfW5QWBHxmtUzvMhfVQli4gZTktIOZd9mjJ5hsbmzttaHQB29Am3dZkmx3g/qvYocyhZ2PXAWsNQiIaf+Q8W/MWPIK7/TjvCx5q2XRp4lVWydMc2wIQkhadDB0xsnw/kSEyGjLKjI4coVIwtubTF3E7MJ6LS6UOsJKj82XVAVPJJcepfewbzE91ivXZvOvYfsmMevwtPpfMzGmC7WJlyW2j0jh7AF1JLmwEJSKYwIvu6DHc3YnyLH9ZdIBnQ+nOVDRiP+REpqv++typYHIvoJyICGA40d8bR7HR2k7do6UQTHF4oriYeIQbxKe4Th6+/l1BjUtS9hqORh3MbgvYrStXTfSwaBOmAVQZzpYNqsAmQyjY56MUqty3c/xH6GuhNvNaG9vGbG6cPtBM8UA3e8r51D0AR9kozKuGGSMgLz3nAHxDNnc7GTwpLj7/6HeWp1iksDeTjwCLpxejuMtpMnGJgsiku1sOACwQ9ukzESiDRN77YNESxR5LphOlcASXA5uIts1LnBIcn1J7BLWs49DMALSnuz95gdOrTZr0u1SeYHinno/pE58xYoXbVO/S+FEMMs5qyWkMnp8Q3ClyTlZP52Y9nq7b8fITPuVXUk9ohG5EFHw4gAEcjFxfKb3xuAsEjx2z1wxNbSZMcgS9GKyW3R6KwJONgtA64LTyxWm8Bvudp0M1FdJPEGopM4Fvg7G/hsptkhCfHFegv4ENwxPeXmYhxwZy7js+BeM27t9ODBMynVCLJ7RWcBMteZJtvjOYHb5lOnCLYWNEMKC59BA7covu1cANa2PXL05iGdufOzkgFqqHBOrgQVUmLEc+Mkz4Rq8O6WkNr7atNkH4M8d+SD1t/tSzt3oFql+neVs+AwEI5JaBJaxARtY2Z4mKoUqxds4UpZ0sv3zIbNoo0J4fihldQTX3XNcuNcZmcrB5LTWMdzeRuAtBk3cZHYQF6gTi3PNuDJ0nmR+4LPLoHvxQIxRgJ9iNNXqf2SYJhcvCtJiVWo85TsyFOuq7EyBPJrAdhEgE0cTq16FQXhYPJFqSfiVn0IQnPOy0LbU4BeG94QjdYNB0CiQ3QaxQqD2ebSMiNjaVaw8WaM4Z5WnzcVDsr4eGweSLa2DE3BWViaxhZFIcSTjgxNCAfelg+hznVOYoe5VqTYs1g7WtfTm3e4/WduC6p+qqAM8H4ZyrJCGpewThTDPe6H7CzX/zQ8Tm+r65HeZn+MsmxUciEWPlAVaK/VBaQBWfoG/aRL/jSZIQfep/89GjasWmbaWzeEZ2R1FOjvyJT37O9B8046SRSKVEnXWlBqbkb5XCS3qFeuE9xb9+frEknxWB5h1D/hruz2iVDEAS7+qkEz5Ot5agHJc7WCdY94Ws61sURcX5nG8UELGBAHZ3i+3VulAyT0nKNNz4K2LBHBWJcTBX1wzf+//u/j/9+//v87+9/l9Lbh/L/uyNYiTsWV2LwsjaA6MxTuzFMqmxW8Jw/+IppdX8t/Clgi1rI1SN0UC/r6tX/4lUc2VV1OQReSeCsjUpKZchw4XUcjHfw6ryCV3R8s6VXm67vp4n+lcPV9gJwmbKQEsmrJi9c2vkwrm8HFbVYNTaRGq8D91t9n5+U+aD/hNtN3HjC/nC/vUoGFSCkXP+NlRcmLUqLbiUBl4LYf1U/CCvwtd3ryCH8gUmGITAxiH1O5rnGTz7y1LuFjmnFGQ1UWuM7HwfXtWl2fPFKklYwNUpF2IL/TmaRETjQiM5SJacI+3Gv5MBU8lP5Io6gWkawpyzNEVGqOdx4YlO1dCvjbWFZWbCmeiFKPSlMKtKcMFLs/KQxtgAHi7NZNCQ32bBAW2mbHflVZ8wXKi1JKVHkW20bnYnl3dKWJeWJOiX3oKPBD6Zbi0ZvSIuWktUHB8qDR8DMMh1ZfkBL9FS9x5r0hBGLJ8pUCJv3NYH+Ae8p40mZWd5m5fhobFjQeQvqTT4VKWIYfRL0tfaXKiVl75hHReuTJEcqVlug+eOIIc4bdIydtn2K0iNZPsYWQvQio2qbO3OqAlPHDDOB7DfjGEfVF51FqqNacd6QmgFKJpMfLp5DHTv4wXlONKVXF9zTJpDV4m1sYZqJPhotcsliZM8yksKkCkzpiXt+EcRQvSQqmBS9WdWkxMTJXPSw94jqI3varCjQxTazjlMH8jTS8ilaW8014/vwA/LNa+YiFoyyx3s/KswP3O8QW1jtq45yTM/DX9a8M4voTVaO2ebvw1EooDw/yg6Y1faY+WwrdVs5Yt0hQ5EwRfYXSFxray1YvSM+kYmlpLG2/9mm1MfmbKHXr44Ih8nVKb1M537ZANUkCtdsPZ80JVKVKabVHCadaLXg+IV8i5GSwpZti0h6diTaKs9sdpUKEpd7jDUpYmHtiX33SKiO3tuydkaxA7pEc9XIQEOfWJlszj5YpL5bKeQyT7aZSBOamvSHl8xsWvgo26IP/bqk+0EJUz+gkkcvlUlyPp2kdKFtt7y5aCdks9ZJJcFp5ZWeaWKgtnXMN3ORwGLBE0PtkEIek5FY2aVssUZHtsWIvnljMVJtuVIjpZup/5VL1yPOHWWHkOMc6YySWMckczD5jUj2mlLVquFaMU8leGVaqeXis+aRRL8zm4WuBk6cyWfGMxgtr8useQEx7k/PvRoZyd9nde1GUCV84gMX8Ogu/BWezYPSR27llzQnA97oo0pYyxobYUJfsj+ysTm9zJ+S4pk0TGo9VTG0KjqYhTmALfoDZVKla2b5yhv241PxFaLJs3i05K0AAIdcGxCJZmT3ZdT7CliR7q+kur7WdQjygYtOWRL9B8E4s4LI8KpAj7bE0dg7DLOaX+MGeAi0hMMSSWZEz+RudXbZCsGYS0QqiXjH9XQbd8sCB+nIVTq7/T/FDS+zWY9q7Z2fdq1tdLb6v3hKKVDAw5gjj6o9r1wHFROdHc18MJp4SJ2Ucvu+iQ9EgkekW8VCM+psM6y+/2SBy8tNN4a3L1MzP+OLsyvESo5gS7IQOnIqMmviJBVc6zbVG1n8eXiA3j46kmvvtJlewwNDrxk4SbJOtP/TV/lIVK9ueShNbbMHfwnLTLLhbZuO79ec5XvfgRwLFK+w1r5ZWW15rVFZrE+wKqNRv5KqsLNfpGgnoUU6Y71NxEmN7MyqwqAQqoIULOw/LbuUB2+uE75gJt+kq1qY4LoxV+qR/zalupea3D5+WMeaRIn0sAI6DDWDh158fqUb4YhAxhREbUN0qyyJYkBU4V2KARXDT65gW3gRsiv7xSPYEKLwzgriWcWgPr0sbZnv7m1XHNFW6xPdGNZUdxFiUYlmXNjDVWuu7LCkX/nVkrXaJhiYktBISC2xgBXQnNEP+cptWl1eG62a7CPXrnrkTQ5BQASbEqUZWMDiZUisKyHDeLFOaJILUo5f6iDt4ZO8MlqaKLto0AmTHVVbkGuyPa1R/ywZsWRoRDoRdNMMHwYTsklMVnlAd2S0282bgMI8fiJpDh69OSL6K3qbo20KfpNMurnYGQSr/stFqZ7hYsxKlLnKAKhsmB8AIpEQ4bd/NrTLTXefsE6ChRmKWjXKVgpGoPs8GAicgKVw4K0qgDgy1A6hFq1WRat3fHF+FkU+b6H4NWpOU3KXTxrIb2qSHAb+qhm8hiSROi/9ofapjxhyKxxntPpge6KL5Z4+WBMYkAcE6+0Hd3Yh2zBsK2MV3iW0Y6cvOCroXlRb2MMJtdWx+3dkFzGh2Pe3DZ9QpSqpaR/rE1ImOrHqYYyccpiLC22amJIjRWVAherTfpQLmo6/K2pna85GrDuQPlH1Tsar8isAJbXLafSwOof4gg9RkAGm/oYpBQQiPUoyDk2BCQ1k+KILq48ErFo4WSRhHLq/y7mgw3+L85PpP6xWr6cgp9sOjYjKagOrxF148uhuaWtjet953fh1IQiEzgC+d2IgBCcUZqgTAICm2bR8oCjDLBsmg+ThyhfD+zBalsKBY1Ce54Y/t9cwfbLu9SFwEgphfopNA3yNxgyDafUM3mYTovZNgPGdd4ZFFOj1vtfFW3u7N+iHEN1HkeesDMXKPyoCDCGVMo4GCCD6PBhQ3dRZIHy0Y/3MaE5zU9mTCrwwnZojtE+qNpMSkJSpmGe0EzLyFelMJqhfFQ7a50uXxZ8pCc2wxtAKWgHoeamR2O7R+bq7IbPYItO0esdRgoTaY38hZLJ5y02oIVwoPokGIzxAMDuanQ1vn2WDQ00Rh6o5QOaCRu99fwDbQcN0XAuqkFpxT/cfz3slGRVokrNU0iqiMAJFEbKScZdmSkTUznC0U+MfwFOGdLgsewRyPKwBZYSmy6U325iUhBQNxbAC3FLKDV9VSOuQpOOukJ/GAmu/tyEbX9DgEp6dv1zoU0IqzpG6gssSjIYRVPGgU1QAQYRgIT8gEV0EXr1sqeh2I6rXjtmoCYyEDCe/PkFEi/Q48FuT29p557iN+LCwk5CK/CZ2WdAdfQZh2Z9QGrzPLSNRj5igUWzl9Vi0rCqH8G1Kp4QMLkuwMCAypdviDXyOIk0AHTM8HBYKh3b0/F+DxoNj4ZdoZfCpQVdnZarqoMaHWnMLNVcyevytGsrXQEoIbubqWYNo7NRHzdc0zvT21fWVirj7g36iy6pxogfvgHp1xH1Turbz8QyyHnXeBJicpYUctbzApwzZ1HT+FPEXMAgUZetgeGMwt4G+DHiDT2Lu+PT21fjJCAfV16a/Wu1PqOkUHSTKYhWW6PhhHUlNtWzFnA7MbY+r64vkwdpfNB2JfWgWXAvkzd42K4lN9x7Wrg4kIKgXCb4mcW595MCPJ/cTfPAMQMFWwnqwde4w8HZYJFpQwcSMhjVz4B8p6ncSCN1X4klxoIH4BN2J6taBMj6lHkAOs8JJAmXq5xsQtrPIPIIp/HG6i21xMGcFgqDXSRF0xQg14d2uy6HgKE13LSvQe52oShF5Jx1R6avyL4thhXQZHfC94oZzuPUBKFYf1VvDaxIrtV6dNGSx7DO0i1p6CzBkuAmEqyWceQY7F9+U0ObYDzoa1iKao/cOD/v6Q9gHrrr1uCeOk8fST9MG23Ul0KmM3r+Wn6Hi6WAcL7gEeaykicvgjzkjSwFsAXIR81Zx4QJ6oosVyJkCcT+4xAldCcihqvTf94HHUPXYp3REIaR4dhpQF6+FK1H0i9i7Pvh8owu3lO4PT1iuqu+DkL2Bj9+kdfGAg2TXw03iNHyobxofLE2ibjsYDPgeEQlRMR7afXbSGQcnPjI2D+sdtmuQ771dbASUsDndU7t58jrrNGRzISvwioAlHs5FA+cBE5Ccznkd8NMV6BR6ksnKLPZnMUawRDU1MZ/ib3xCdkTblHKu4blNiylH5n213yM0zubEie0o4JhzcfAy3H5qh2l17uLooBNLaO+gzonTH2uF8PQu9EyH+pjGsACTMy4cHzsPdymUSXYJOMP3yTkXqvO/lpvt0cX5ekDEu9PUfBeZODkFuAjXCaGdi6ew4qxJ8PmFfwmPpkgQjQlWqomFY6UkjmcnAtJG75EVR+NpzGpP1Ef5qUUbfowrC3zcSLX3BxgWEgEx/v9cP8H8u1Mvt9/rMDYf6sjwU1xSOPBgzFEeJLMRVFtKo5QHsUYT8ZRLCah27599EuqoC9PYjYO6aoAMHB8X1OHwEAYouHfHB3nyb2B+SnZxM/vw/bCtORjLMSy5aZoEpvgdGvlJfNPFUu/p7Z4VVK1hiI0/UTuB3ZPq4ohEbm7Mntgc1evEtknaosgZSwnDC2BdMmibpeg48X8Ixl+/8+xXdbshQXUPPvx8jT3fkELivHSmqbhblfNFShWAyQnJ3WBU6SMYSIpTDmHjdLVAdlADdz9gCplZw6mTiHqDwIsxbm9ErGusiVpg2w8Q3khKV/R9Oj8PFeF43hmW/nSd99nZzhyjCX3QOZkkB6BsH4H866WGyv9E0hVAzPYah2tkRfQZMmP2rinfOeQalge0ovhduBjJs9a1GBwReerceify49ctOh5/65ATYuMsAkVltmvTLBk4oHpdl6i+p8DoNj4Fb2vhdFYer2JSEilEwPd5n5zNoGBXEjreg/wh2NFnNRaIUHSOXa4eJRwygZoX6vnWnqVdCRT1ARxeFrNBJ+tsdooMwqnYhE7zIxnD8pZH+P0Nu1wWxCPTADfNWmqx626IBJJq6NeapcGeOmbtXvl0TeWG0Y7OGGV4+EHTtNBIT5Wd0Bujl7inXgZgfXTM5efD3qDTJ54O9v3Bkv+tdIRlq1kXcVD0BEMirmFxglNPt5pedb1AnxuCYMChUykwsTIWqT23XDpvTiKEru1cTcEMeniB+HQDehxPXNmkotFdwUPnilB/u4Nx5Xc6l8J9jH1EgKZUUt8t8cyoZleDBEt8oibDmJRAoMKJ5Oe9CSWS5ZMEJvacsGVdXDWjp/Ype5x0p9PXB2PAwt2LRD3d+ftNgpuyvxlP8pB84oB1i73vAVpwyrmXW72hfW6Dzn9Jkj4++0VQ4d0KSx1AsDA4OtXXDo63/w+GD+zC7w5SJaxsmnlYRQ4dgdjA7tTl2KNLnpJ+mvkoDxtt1a4oPaX3EVqj96o9sRKBQqU7ZOiupeAIyLMD+Y3YwHx30XWHB5CQiw7q3mj1EDlP2eBsZbz79ayUMbyHQ7s8gu4Lgip1LiGJj7NQj905/+rgUYKAA5qdrlHKIknWmqfuR+PB8RdBkDg/NgnlT89G72h2NvySnj7UyBwD+mi/IWs1xWbxuVwUIVXun5cMqBtFbrccI+DILjsVQg6eeq0itiRfedn89CvyFtpkxaauEvSANuZmB1p8FGPbU94J9medwsZ9HkUYjmI7OH5HuxendLbxTaYrPuIfE2ffXFKhoNBUp33HsFAXmCV/Vxpq5AYgFoRr5Ay93ZLRlgaIPjhZjXZZChT+aE5iWAXMX0oSFQEtwjiuhQQItTQX5IYrKfKB+queTNplR1Hoflo5/I6aPPmACwQCE2jTOYo5Dz1cs7Sod0KTG/3kEDGk3kUaUCON19xSJCab3kNpWZhSWkO8l+SpW70Wn3g0ciOIJO5JXma6dbos6jyisuxXwUUhj2+1uGhcvuliKtWwsUTw4gi1c/diEEpZHoKoxTBeMDmhPhKTx7TXWRakV8imJR355DcIHkR9IREHxohP4TbyR5LtFU24umRPRmEYHbpe1LghyxPx7YgUHjNbbQFRQhh4KeU1EabXx8FS3JAxp2rwRDoeWkJgWRUSKw6gGP5U2PuO9V4ZuiKXGGzFQuRuf+tkSSsbBtRJKhCi3ENuLlXhPbjTKD4djXVnfXFds6Zb+1XiUrRfyayGxJq1+SYBEfbKlgjiSmk0orgTqzSS+DZ5rTqsJbttiNtp+KMqGE2AHGFw6jQqM5vD6vMptmXV9OAjq49Uf/Lx9Opam+Hn5O9p8qoBBAQixzQZ4eNVkO9sPzJAMyR1y4/RCQQ1s0pV5KAU5sKLw3tkcFbI/JqrjCsK4Mw+W8aod4lioYuawUiCyVWBE/qPaFi5bnkgpfu/ae47174rI1fqQoTbW0HrU6FAejq7ByM0V4zkZTg02/YJK2N7hUQRCeZ4BIgSEqgD8XsjzG6LIsSbuHoIdz/LhFzbNn1clci1NHWJ0/6/O8HJMdIpEZbqi1RrrFfoo/rI/7ufm2MPG5lUI0IYJ4MAiHRTSOFJ2oTverFHYXThkYFIoyFx6rMYFgaOKM4xNWdlOnIcKb/suptptgTOTdVIf4YgdaAjJnIAm4qNNHNQqqAzvi53GkyRCEoseUBrHohZsjUbkR8gfKtc/+Oa72lwxJ8Mq6HDfDATbfbJhzeIuFQJSiw1uZprHlzUf90WgqG76zO0eCB1WdPv1IT6sNxxh91GEL2YpgC97ikFHyoaH92ndwduqZ6IYjkg20DX33MWdoZk7QkcKUCgisIYslOaaLyvIIqRKWQj16jE1DlQWJJaPopWTJjXfixEjRJJo8g4++wuQjbq+WVYjsqCuNIQW3YjnxKe2M5ZKEqq+cX7ZVgnkbsU3RWIyXA1rxv4kGersYJjD//auldXGmcEbcfTeF16Y1708FB1HIfmWv6dSFi6oD4E+RIjCsEZ+kY7dKnwReJJw3xCjKvi3kGN42rvyhUlIz0Bp+fNSV5xwFiuBzG296e5s/oHoFtUyUplmPulIPl+e1CQIQVtjlzLzzzbV+D/OVQtYzo5ixtMi5BmHuG4N/uKfJk5UIREp7+12oZlKtPBomXSzAY0KgtbPzzZoHQxujnREUgBU+O/jKKhgxVhRPtbqyHiUaRwRpHv7pgRPyUrnE7fYkVblGmfTY28tFCvlILC04Tz3ivkNWVazA+OsYrxvRM/hiNn8Fc4bQBeUZABGx5S/xFf9Lbbmk298X7iFg2yeimvsQqqJ+hYbt6uq+Zf9jC+Jcwiccd61NKQtFvGWrgJiHB5lwi6fR8KzYS7EaEHf/ka9EC7H8D+WEa3TEACHBkNSj/cXxFeq4RllC+fUFm2xtstYLL2nos1DfzsC9vqDDdRVcPA3Ho95aEQHvExVThXPqym65llkKlfRXbPTRiDepdylHjmV9YTWAEjlD9DdQnCem7Aj/ml58On366392214B5zrmQz/9ySG2mFqEwjq5sFl5tYJPw5hNz8lyZPUTsr5E0F2C9VMPnZckWP7+mbwp/BiN7f4kf7vtGnZF2JGvjK/sDX1RtcFY5oPQnE4lIAYV49U3C9SP0LCY/9i/WIFK9ORjzM9kG/KGrAuwFmgdEpdLaiqQNpCTGZVuAO65afkY1h33hrqyLjZy92JK3/twdj9pafFcwfXONmPQWldPlMe7jlP24Js0v9m8bIJ9TgS2IuRvE9ZVRaCwSJYOtAfL5H/YS4FfzKWKbek+GFulheyKtDNlBtrdmr+KU+ibHTdalzFUmMfxw3f36x+3cQbJLItSilW9cuvZEMjKw987jykZRlsH/UI+HlKfo2tLwemBEeBFtmxF2xmItA/dAIfQ+rXnm88dqvXa+GapOYVt/2waFimXFx3TC2MUiOi5/Ml+3rj/YU6Ihx2hXgiDXFsUeQkRAD6wF3SCPi2flk7XwKAA4zboqynuELD312EJ88lmDEVOMa1W/K/a8tGylZRMrMoILyoMQzzbDJHNZrhH77L9qSC42HVmKiZ5S0016UTp83gOhCwz9XItK9fgXfK3F5d7nZCBUekoLxrutQaPHa16Rjsa0gTrzyjqTnmcIcrxg6X6dkKiucudc0DD5W4pJPf0vuDW8r5/uw24YfMuxFRpD2ovT2mFX79xH6Jf+MVdv2TYqR6/955QgVPe3JCD/WjAYcLA9tpXgFiEjge2J5ljeI/iUzg91KQuHkII4mmHZxC3XQORLAC6G7uFn5LOmlnXkjFdoO976moNTxElS8HdxWoPAkjjocDR136m2l+f5t6xaaNgdodOvTu0rievnhNAB79WNrVs6EsPgkgfahF9gSFzzAd+rJSraw5Mllit7vUP5YxA843lUpu6/5jAR0RvH4rRXkSg3nE+O5GFyfe+L0s5r3k05FyghSFnKo4TTgs07qj4nTLqOYj6qaW9knJTDkF5OFMYbmCP+8H16Ty482OjvERV6OFyw043L9w3hoJi408sR+SGo1WviXUu8d7qS+ehKjpKwxeCthsm2LBFSFeetx0x4AaKPxtp3CxdWqCsLrB1s/j5TAhc1jNZsXWl6tjo/WDoewxzg8T8NnhZ1niUwL/nhfygLanCnRwaFGDyLw+sfZhyZ1UtYTp8TYB6dE7R3VsKKH95CUxJ8u8N+9u2/9HUNKHW3x3w5GQrfOPafk2w5qZq8MaHT0ebeY3wIsp3rN9lrpIsW9c1ws3VNV+JwNz0Lo9+V7zZr6GD56We6gWVIvtmam5GPPkVAbr74r6SwhuL+TRXtW/0pgyX16VNl4/EAD50TnUPuwrW6OcUO2VlWXS0inq872kk7GUlW6o/ozFKq+Sip6LcTtSDfDrPTcCHhx75H8BeRon+KG2wRwzfDgWhALmiWOMO6h3pm1UCZEPEjScyk7tdLx6WrdA2N1QTPENvNnhCQjW6kl057/qv7IwRryHrZBCwVSbLLnFRiHdTwk8mlYixFt1slEcPD7FVht13HyqVeyD55HOXrh2ElAxJyinGeoFzwKA91zfrdLvDxJSjzmImfvTisreI25EDcVfGsmxLVbfU8PGe/7NmWWKjXcdTJ11jAlVIY/Bv/mcxg/Q10vCHwKG1GW/XbJq5nxDhyLqiorn7Wd7VEVL8UgVzpHMjQ+Z8DUgSukiVwWAKkeTlVVeZ7t1DGnCgJVIdBPZAEK5f8CDyDNo7tK4/5DBjdD5MPV86TaEhGsLVFPQSI68KlBYy84FievdU9gWh6XZrugvtCZmi9vfd6db6V7FmoEcRHnG36VZH8N4aZaldq9zZawt1uBFgxYYx+Gs/qW1jwANeFy+LCoymyM6zgG7j8bGzUyLhvrbJkTYAEdICEb4kMKusKT9V3eIwMLsjdUdgijMc+7iKrr+TxrVWG0U+W95SGrxnxGrE4eaJFfgvAjUM4SAy8UaRwE9j6ZQH5qYAWGtXByvDiLSDfOD0yFA3UCMKSyQ30fyy1mIRg4ZcgZHLNHWl+c9SeijOvbOJxoQy7lTN2r3Y8p6ovxvUY74aOYbuVezryqXA6U+fcp6wSV9X5/OZKP18tB56Ua0gMyxJI7XyNT7IrqN8GsB9rL/kP5KMrjXxgqKLDa+V5OCH6a5hmOWemMUsea9vQl9t5Oce76PrTyTv50ExOqngE3PHPfSL//AItPdB7kGnyTRhVUUFNdJJ2z7RtktZwgmQzhBG/G7QsjZmJfCE7k75EmdIKH7xlnmDrNM/XbTT6FzldcH/rcRGxlPrv4qDScqE7JSmQABJWqRT/TUcJSwoQM+1jvDigvrjjH8oeK2in1S+/yO1j8xAws/T5u0VnIvAPqaE1atNuN0cuRliLcH2j0nTL4JpcR7w9Qya0JoaHgsOiALLCCzRkl1UUESz+ze/gIXHGtDwgYrK6pCFKJ1webSDog4zTlPkgXZqxlQDiYMjhDpwTtBW2WxthWbov9dt2X9XFLFmcF+eEc1UaQ74gqZiZsdj63pH1qcv3Vy8JYciogIVKsJ8Yy3J9w/GhjWVSQAmrS0BPOWK+RKV+0lWqXgYMnIFwpcZVD7zPSp547i9HlflB8gVnSTGmmq1ClO081OW/UH11pEQMfkEdDFzjLC1Cdo/BdL3s7cXb8J++Hzz1rhOUVZFIPehRiZ8VYu6+7Er7j5PSZu9g/GBdmNzJmyCD9wiswj9BZw+T3iBrg81re36ihMLjoVLoWc+62a1U/7qVX5CpvTVF7rocSAKwv4cBVqZm7lLDS/qoXs4fMs/VQi6BtVbNA3uSzKpQfjH1o3x4LrvkOn40zhm6hjduDglzJUwA0POabgdXIndp9fzhOo23Pe+Rk9GSLX0d71Poqry8NQDTzNlsa+JTNG9+UrEf+ngxCjGEsDCc0bz+udVRyHQI1jmEO3S+IOQycEq7XwB6z3wfMfa73m8PVRp+iOgtZfeSBl01xn03vMaQJkyj7vnhGCklsCWVRUl4y+5oNUzQ63B2dbjDF3vikd/3RUMifPYnX5Glfuk2FsV/7RqjI9yKTbE8wJY+74p7qXO8+dIYgjtLD/N8TJtRh04N9tXJA4H59IkMmLElgvr0Q5OCeVfdAt+5hkh4pQgfRMHpL74XatLQpPiOyHRs/OdmHtBf8nOZcxVKzdGclIN16lE7kJ+pVMjspOI+5+TqLRO6m0ZpNXJoZRv9MPDRcAfJUtNZHyig/s2wwReakFgPPJwCQmu1I30/tcBbji+Na53i1W1N+BqoY7Zxo+U/M9XyJ4Ok2SSkBtoOrwuhAY3a03Eu6l8wFdIG1cN+e8hopTkiKF093KuH/BcB39rMiGDLn6XVhGKEaaT/vqb/lufuAdpGExevF1+J9itkFhCfymWr9vGb3BTK4j598zRH7+e+MU9maruZqb0pkGxRDRE1CD4Z8LV4vhgPidk5w2Bq816g3nHw1//j3JStz7NR9HIWELO8TMn3QrP/zZp//+Dv9p429/ogv+GATR+n/UdF+ns9xNkXZQJXY4t9jMkJNUFygAtzndXwjss+yWH9HAnLQQfhAskdZS2l01HLWv7L7us5uTH409pqitvfSOQg/c+Zt7k879P3K9+WV68n7+3cZfuRd/dDPP/03rn+d+/nBvWfgDlt8+LzjqJ/vx3CnNOwiXhho778C96iD+1TBvRZYeP+EH81LE0vVwOOrmCLB3iKzI1x+vJEsrPH4uF0UB4TJ4X3uDfOCo3PYpYe0MF4bouh0DQ/l43fxUF7Y+dpWuvTSffB0yO2UQUETI/LwCZE3BvnevJ7c9zUlY3H58xzke6DNFDQG8n0WtDN4LAYN4nogKav1ezOfK/z+t6tsCTp+dhx4ymjWuCJk1dEUifDP+HyS4iP/Vg9B2jTo9L4NbiBuDS4nuuHW6H+JDQn2JtqRKGkEQPEYE7uzazXIkcxIAqUq1esasZBETlEZY7y7Jo+RoV/IsjY9eIMkUvr42Hc0xqtsavZvhz1OLwSxMOTuqzlhb0WbdOwBH9EYiyBjatz40bUxTHbiWxqJ0uma19qhPruvcWJlbiSSH48OLDDpaHPszvyct41ZfTu10+vjox6kOqK6v0K/gEPphEvMl/vwSv+A4Hhm36JSP9IXTyCZDm4kKsqD5ay8b1Sad/vaiyO5N/sDfEV6Z4q95E+yfjxpqBoBETW2C7xl4pIO2bDODDFurUPwE7EWC2Uplq+AHmBHvir2PSgkR12/Ry65O0aZtQPeXi9mTlF/Wj5GQ+vFkYyhXsLTjrBSP9hwk4GPqDP5rBn5/l8b0mLRAvRSzXHc293bs3s8EsdE3m2exxidWVB4joHR+S+dz5/W+v00K3TqN14CDBth8eWcsTbiwXPsygHdGid0PEdy6HHm2v/IUuV5RVapYmzGsX90mpnIdNGcOOq64Dbc5GUbYpD9M7S+6cLY//QmjxFLP5cuTFRm3vA5rkFZroFnO3bjHF35uU3s8mvL7Tp9nyTc4mymTJ5sLIp7umSnGkO23faehtz3mmTS7fbVx5rP7x3HXIjRNeq/A3xCs9JNB08c9S9BF2O3bOur0ItslFxXgRPdaapBIi4dRpKGxVz7ir69t/bc9qTxjvtOyGOfiLGDhR4fYywHv1WdOplxIV87TpLBy3Wc0QP0P9s4G7FBNOdITS/tep3o3h1TEa5XDDii7fWtqRzUEReP2fbxz7bHWWJdbIOxOUJZtItNZpTFRfj6vm9sYjRxQVO+WTdiOhdPeTJ+8YirPvoeL88l5iLYOHd3b/Imkq+1ZN1El3UikhftuteEYxf1Wujof8Pr4ICTu5ezZyZ4tHQMxlzUHLYO2VMOoNMGL/20S5i2o2obfk+8qqdR7xzbRDbgU0lnuIgz4LelQ5XS7xbLuSQtNS95v3ZUOdaUx/Qd8qxCt6xf2E62yb/HukLO6RyorV8KgYl5YNc75y+KvefrxY+lc/64y9kvWP0a0bDz/rojq+RWjO06WeruWqNFU7r3HPIcLWRql8ICZsz2Ls/qOm/CLn6++X+Qf7mGspYCrZod/lpl6Rw4xN/yuq8gqV4B6aHk1hVE1SfILxWu5gvXqbfARYQpspcxKp1F/c8XOPzkZvmoSw+vEqBLdrq1fr3wAPv5NnM9i8F+jdAuxkP5Z71c6uhK3enlnGymr7UsWZKC12qgUiG8XXGQ9mxnqz4GSIlybF9eXmbqj2sHX+a1jf0gRoONHRdRSrIq03Ty89eQ1GbV/Bk+du4+V15zls+vvERvZ4E7ZbnxWTVjDjb4o/k8jlw44pTIrUGxxuJvBeO+heuhOjpFsO6lVJ/aXnJDa/bM0Ql1cLbXE/Pbv3EZ3vj3iVrB5irjupZTzlnv677NrI9UNYNqbPgp/HZXS+lJmk87wec+7YOxTDo2aw2l3NfDr34VNlvqWJBknuK7oSlZ6/T10zuOoPZOeoIk81N+sL843WJ2Q4Z0fZ3scsqC/JV2fuhWi1jGURSKZV637lf53Xnnx16/vKEXY89aVJ0fv91jGdfG+G4+sniwHes4hS+udOr4RfhFhG/F5gUG35QaU+McuLmclb5ZWmR+sG5V6nf+PxYzlrnFGxpZaK8eqqVo0NfmAWoGfXDiT/FnUbWvzGDOTr8aktOZWg4BYvz5YH12ZbfCcGtNk+dDAZNGWvHov+PIOnY9Prjg8h/wLRrT69suaMVZ5bNuK00lSVpnqSX1NON/81FoP92rYndionwgOiA8WMf4vc8l15KqEEG4yAm2+WAN5Brfu1sq9suWYqgoajgOYt/JCk1gC8wPkK+XKCtRX6TAtgvrnuBgNRmn6I8lVDipOVB9kX6Oxkp4ZKyd1M6Gj8/v2U7k+YQBL95Kb9PQENucJb0JlW3b5tObN7m/Z1j1ev388d7o15zgXsI9CikAGAViR6lkJv7nb4Ak40M2G8TJ447kN+pvfHiOFjSUSP6PM+QfbAywKJCBaxSVxpizHseZUyUBhq59vFwrkyGoRiHbo0apweEZeSLuNiQ+HAekOnarFg00dZNXaPeoHPTRR0FmEyqYExOVaaaO8c0uFUh7U4e/UxdBmthlBDgg257Q33j1hA7HTxSeTTSuVnPZbgW1nodwmG16aKBDKxEetv7D9OjO0JhrbJTnoe+kcGoDJazFSO8/fUN9Jy/g4XK5PUkw2dgPDGpJqBfhe7GA+cjzfE/EGsMM+FV9nj9IAhrSfT/J3QE5TEIYyk5UjsI6ZZcCPr6A8FZUF4g9nnpVmjX90MLSQysIPD0nFzqwCcSJmIb5mYv2Cmk+C1MDFkZQyCBq4c/Yai9LJ6xYkGS/x2s5/frIW2vmG2Wrv0APpCdgCA9snFvfpe8uc0OwdRs4G9973PGEBnQB5qKrCQ6m6X/H7NInZ7y/1674/ZXOVp7OeuCRk8JFS516VHrnH1HkIUIlTIljjHaQtEtkJtosYul77cVwjk3gW1Ajaa6zWeyHGLlpk3VHE2VFzT2yI/EvlGUSz2H9zYE1s4nsKMtMqNyKNtL/59CpFJki5Fou6VXGm8vWATEPwrUVOLvoA8jLuwOzVBCgHB2Cr5V6OwEWtJEKokJkfc87h+sNHTvMb0KVTp5284QTPupoWvQVUwUeogZR3kBMESYo0mfukewRVPKh5+rzLQb7HKjFFIgWhj1w3yN/qCNoPI8XFiUgBNT1hCHBsAz8L7Oyt8wQWUFj92ONn/APyJFg8hzueqoJdNj57ROrFbffuS/XxrSXLTRgj5uxZjpgQYceeMc2wJrahReSKpm3QjHfqExTLAB2ipVumE8pqcZv8LYXQiPHHsgb5BMW8zM5pvQit+mQx8XGaVDcfVbLyMTlY8xcfmm/RSAT/H09UQol5gIz7rESDmnrQ4bURIB4iRXMDQwxgex1GgtDxKp2HayIkR+E/aDmCttNm2C6lytWdfOVzD6X2SpDWjQDlMRvAp1symWv4my1bPCD+E1EmGnMGWhNwmycJnDV2WrQNxO45ukEb08AAffizYKVULp15I4vbNK5DzWwCSUADfmKhfGSUqii1L2UsE8rB7mLuHuUJZOx4+WiizHBJ/hwboaBzhpNOVvgFTf5cJsHef7L1HCI9dOUUbb+YxUJWn6dYOLz+THi91kzY5dtO5c+grX7v0jEbsuoOGnoIreDIg/sFMyG+TyCLIcAWd1IZ1UNFxE8Uie13ucm40U2fcxC0u3WLvLOxwu+F7MWUsHsdtFQZ7W+nlfCASiAKyh8rnP3EyDByvtJb6Kax6/HkLzT9SyEyTMVM1zPtM0MJY14DmsWh4MgD15Ea9Hd00AdkTZ0EiG5NAGuIBzQJJ0JR0na+OB7lQA6UKxMfihIQ7GCCnVz694QvykWXTxpS2soDu+smru1UdIxSvAszBFD1c8c6ZOobA8bJiJIvuycgIXBQIXWwhyTgZDQxJTRXgEwRNAawGSXO0a1DKjdihLVNp/taE/xYhsgwe+VpKEEB4LlraQyE84gEihxCnbfoyOuJIEXy2FIYw+JjRusybKlU2g/vhTSGTydvCvXhYBdtAXtS2v7LkHtmXh/8fly1do8FI/D0f8UbzVb5h+KRhMGSAmR2mhi0YG/uj7wgxcfzCrMvdjitUIpXDX8ae2JcF/36qUWIMwN6JsjaRGNj+jEteGDcFyTUb8X/NHSucKMJp7pduxtD6KuxVlyxxwaeiC1FbGBESO84lbyrAugYxdl+2N8/6AgWpo/IeoAOcsG35IA/b3AuSyoa55L7llBLlaWlEWvuCFd8f8NfcTUgzJv6CbB+6ohWwodlk9nGWFpBAOaz5uEW5xBvmjnHFeDsb0mXwayj3mdYq5gxxNf3H3/tnCgHwjSrpSgVxLmiTtuszdRUFIsn6LiMPjL808vL1uQhDbM7aA43mISXReqjSskynIRcHCJ9qeFopJfx9tqyUoGbSwJex/0aDE3plBPGtNBYgWbdLom3+Q/bjdizR2/AS/c/dH/d3G7pyl1qDXgtOFtEqidwLqxPYtrNEveasWq3vPUUtqTeu8gpov4bdOQRI2kneFvRNMrShyVeEupK1PoLDPMSfWMIJcs267mGB8X9CehQCF0gIyhpP10mbyM7lwW1e6TGvHBV1sg/UyTghHPGRqMyaebC6pbB1WKNCQtlai1GGvmq9zUKaUzLaXsXEBYtHxmFbEZ2kJhR164LhWW2Tlp1dhsGE7ZgIWRBOx3Zcu2DxgH+G83WTPceKG0TgQKKiiNNOlWgvqNEbnrk6fVD+AqRam2OguZb0YWSTX88N+i/ELSxbaUUpPx4vJUzYg/WonSeA8xUK6u7DPHgpqWpEe6D4cXg5uK9FIYVba47V/nb+wyOtk+zG8RrS4EA0ouwa04iByRLSvoJA2FzaobbZtXnq8GdbfqEp5I2dpfpj59TCVif6+E75p665faiX8gS213RqBxTZqfHP46nF6NSenOneuT+vgbLUbdTH2/t0REFXZJOEB6DHvx6N6g9956CYrY/AYcm9gELJXYkrSi+0F0geKDZgOCIYkLU/+GOW5aGj8mvLFgtFH5+XC8hvAE3CvHRfl4ofM/Qwk4x2A+R+nyc9gNu/9Tem7XW4XRnyRymf52z09cTOdr+PG6+P/Vb4QiXlwauc5WB1z3o+IJjlbxI8MyWtSzT+k4sKVbhF3xa+vDts3NxXa87iiu+xRH9cAprnOL2h6vV54iQRXuOAj1s8nLFK8gZ70ThIQcWdF19/2xaJmT0efrkNDkWbpAQPdo92Z8+Hn/aLjbOzB9AI/k12fPs9HhUNDJ1u6ax2VxD3R6PywN7BrLJ26z6s3QoMp76qzzwetrDABKSGkfW5PwS1GvYNUbK6uRqxfyVGNyFB0E+OugMM8kKwmJmupuRWO8XkXXXQECyRVw9UyIrtCtcc4oNqXqr7AURBmKn6Khz3eBN96LwIJrAGP9mr/59uTOSx631suyT+QujDd4beUFpZ0kJEEnjlP+X/Kr2kCKhnENTg4BsMTOmMqlj2WMFLRUlVG0fzdCBgUta9odrJfpVdFomTi6ak0tFjXTcdqqvWBAzjY6hVrH9sbt3Z9gn+AVDpTcQImefbB4edirjzrsNievve4ZT4EUZWV3TxEsIW+9MT/RJoKfZZYSRGfC1CwPG/9rdMOM8qR/LUYvw5f/emUSoD7YSFuOoqchdUg2UePd1eCtFSKgxLSZ764oy4lvRCIH6bowPxZWwxNFctksLeil47pfevcBipkkBIc4ngZG+kxGZ71a72KQ7VaZ6MZOZkQJZXM6kb/Ac0/XkJx8dvyfJcWbI3zONEaEPIW8GbkYjsZcwy+eMoKrYjDmvEEixHzkCSCRPRzhOfJZuLdcbx19EL23MA8rnjTZZ787FGMnkqnpuzB5/90w1gtUSRaWcb0eta8198VEeZMUSfIhyuc4/nywFQ9uqn7jdqXh+5wwv+RK9XouNPbYdoEelNGo34KyySwigsrfCe0v/PlWPvQvQg8R0KgHO18mTVThhQrlbEQ0Kp/JxPdjHyR7E1QPw/ut0r+HDDG7BwZFm9IqEUZRpv2WpzlMkOemeLcAt5CsrzskLGaVOAxyySzZV/D2EY7ydNZMf8e8VhHcKGHAWNszf1EOq8fNstijMY4JXyATwTdncFFqcNDfDo+mWFvxJJpc4sEZtjXyBdoFcxbUmniCoKq5jydUHNjYJxMqN1KzYV62MugcELVhS3Bnd+TLLOh7dws/zSXWzxEb4Nj4aFun5x4kDWLK5TUF/yCXB/cZYvI9kPgVsG2jShtXkxfgT+xzjJofXqPEnIXIQ1lnIdmVzBOM90EXvJUW6a0nZ/7XjJGl8ToO3H/fdxnxmTNKBZxnkpXLVgLXCZywGT3YyS75w/PAH5I/jMuRspej8xZObU9kREbRA+kqjmKRFaKGWAmFQspC+QLbKPf0RaK3OXvBSWqo46p70ws/eZpu6jCtZUgQy6r4tHMPUdAgWGGUYNbuv/1a6K+MVFsd3T183+T8capSo6m0+Sh57fEeG/95dykGJBQMj09DSW2bY0mUonDy9a8trLnnL5B5LW3Nl8rJZNysO8Zb+80zXxqUGFpud3Qzwb7bf+8mq6x0TAnJU9pDQR9YQmZhlna2xuxJt0aCO/f1SU8gblOrbIyMsxTlVUW69VJPzYU2HlRXcqE2lLLxnObZuz2tT9CivfTAUYfmzJlt/lOPgsR6VN64/xQd4Jlk/RV7UKVv2Gx/AWsmTAuCWKhdwC+4HmKEKYZh2Xis4KsUR1BeObs1c13wqFRnocdmuheaTV30gvVXZcouzHKK5zwrN52jXJEuX6dGx3BCpV/++4f3hyaW/cQJLFKqasjsMuO3B3WlMq2gyYfdK1e7L2pO/tRye2mwzwZPfdUMrl5wdLqdd2Kv/wVtnpyWYhd49L6rsOV+8HXPrWH2Kup89l2tz6bf80iYSd+V4LROSOHeamvexR524q4r43rTmtFzQvArpvWfLYFZrbFspBsXNUqqenjxNNsFXatZvlIhk7teUPfK+YL32F8McTnjv0BZNppb+vshoCrtLXjIWq3EJXpVXIlG6ZNL0dh6qEm2WMwDjD3LfOfkGh1/czYc/0qhiD2ozNnH4882MVVt3JbVFkbwowNCO3KL5IoYW5wlVeGCViOuv1svZx7FbzxKzA4zGqBlRRaRWCobXaVq4yYCWbZf8eiJwt3OY+MFiSJengcFP2t0JMfzOiJ7cECvpx7neg1Rc5x+7myPJOXt2FohVRyXtD+/rDoTOyGYInJelZMjolecVHUhUNqvdZWg2J2t0jPmiLFeRD/8fOT4o+NGILb+TufCo9ceBBm3JLVn+MO2675n7qiEX/6W+188cYg3Zn5NSTjgOKfWFSAANa6raCxSoVU851oJLY11WIoYK0du0ec5E4tCnAPoKh71riTsjVIp3gKvBbEYQiNYrmH22oLQWA2AdwMnID6PX9b58dR2QKo4qag1D1Z+L/FwEKTR7osOZPWECPJIHQqPUsM5i/CH5YupVPfFA5pHUBcsesh8eO5YhyWnaVRPZn/BmdXVumZWPxMP5e28zm2uqHgFoT9CymHYNNrzrrjlXZM06HnzDxYNlI5b/QosxLmmrqDFqmogQdqk0WLkUceoAvQxHgkIyvWU69BPFr24VB6+lx75Rna6dGtrmOxDnvBojvi1/4dHjVeg8owofPe1cOnxU1ioh016s/Vudv9mhV9f35At+Sh28h1bpp8xhr09+vf47Elx3Ms6hyp6QvB3t0vnLbOhwo660cp7K0vvepabK7YJfxEWWfrC2YzJfYOjygPwfwd/1amTqa0hZ5ueebhWYVMubRTwIjj+0Oq0ohU3zfRfuL8gt59XsHdwKtxTQQ4Y2qz6gisxnm2UdlmpEkgOsZz7iEk6QOt8BuPwr+NR01LTqXmJo1C76o1N274twJvl+I069TiLpenK/miRxhyY8jvYV6W1WuSwhH9q7kuwnJMtm7IWcqs7HsnyHSqWXLSpYtZGaR1V3t0gauninFPZGtWskF65rtti48UV9uV9KM8kfDYs0pgB00S+TlzTXV6P8mxq15b9En8sz3jWSszcifZa/NuufPNnNTb031pptt0+sRSH/7UG8pzbsgtt3OG3ut7B9JzDMt2mTZuyRNIV8D54TuTrpNcHtgmMlYJeiY9XS83NYJicjRjtJSf9BZLsQv629QdDsKQhTK5CnXhpk7vMNkHzPhm0ExW/VCGApHfPyBagtZQTQmPHx7g5IXXsrQDPzIVhv2LB6Ih138iSDww1JNHrDvzUxvp73MsQBVhW8EbrReaVUcLB1R3PUXyaYG4HpJUcLVxMgDxcPkVRQpL7VTAGabDzbKcvg12t5P8TSGQkrj/gOrpnbiDHwluA73xbXts/L7u468cRWSWRtgTwlQnA47EKg0OiZDgFxAKQQUcsbGomITgeXUAAyKe03eA7Mp4gnyKQmm0LXJtEk6ddksMJCuxDmmHzmVhO+XaN2A54MIh3niw5CF7PwiXFZrnA8wOdeHLvvhdoqIDG9PDI7UnWWHq526T8y6ixJPhkuVKZnoUruOpUgOOp3iIKBjk+yi1vHo5cItHXb1PIKzGaZlRS0g5d3MV2pD8FQdGYLZ73aae/eEIUePMc4NFz8pIUfLCrrF4jVWH5gQneN3S8vANBmUXrEcKGn6hIUN95y1vpsvLwbGpzV9L0ZKTan6TDXM05236uLJcIEMKVAxKNT0K8WljuwNny3BNQRfzovA85beI9zr1AGNYnYCVkR1aGngWURUrgqR+gRrQhxW81l3CHevjvGEPzPMTxdsIfB9dfGRbZU0cg/1mcubtECX4tvaedmNAvTxCJtc2QaoUalGfENCGK7IS/O8CRpdOVca8EWCRwv2sSWE8CJPW5PCugjCXPd3h6U60cPD+bdhtXZuYB6stcoveE7Sm5MM2yvfUHXFSW7KzLmi7/EeEWL0wqcOH9MOSKjhCHHmw+JGLcYE/7SBZQCRggox0ZZTAxrlzNNXYXL5fNIjkdT4YMqVUz6p8YDt049v4OXGdg3qTrtLBUXOZf7ahPlZAY/O+7Sp0bvGSHdyQ8B1LOsplqMb9Se8VAE7gIdSZvxbRSrfl+Lk5Qaqi5QJceqjitdErcHXg/3MryljPSIAMaaloFm1cVwBJ8DNmkDqoGROSHFetrgjQ5CahuKkdH5pRPigMrgTtlFI8ufJPJSUlGgTjbBSvpRc0zypiUn6U5KZqcRoyrtzhmJ7/caeZkmVRwJQeLOG8LY6vP5ChpKhc8Js0El+n6FXqbx9ItdtLtYP92kKfaTLtCi8StLZdENJa9Ex1nOoz1kQ7qxoiZFKRyLf4O4CHRT0T/0W9F8epNKVoeyxUXhy3sQMMsJjQJEyMOjmOhMFgOmmlscV4eFi1CldU92yjwleirEKPW3bPAuEhRZV7JsKV3Lr5cETAiFuX5Nw5UlF7d2HZ96Bh0sgFIL5KGaKSoVYVlvdKpZJVP5+NZ7xDEkQhmDgsDKciazJCXJ6ZN2B3FY2f6VZyGl/t4aunGIAk/BHaS+i+SpdRfnB/OktOvyjinWNfM9Ksr6WwtCa1hCmeRI6icpFM4o8quCLsikU0tMoZI/9EqXRMpKGaWzofl4nQuVQm17d5fU5qXCQeCDqVaL9XJ9qJ08n3G3EFZS28SHEb3cdRBdtO0YcTzil3QknNKEe/smQ1fTb0XbpyNB5xAeuIlf+5KWlEY0DqJbsnzJlQxJPOVyHiKMx5Xu9FcEv1Fbg6Fhm4t+Jyy5JC1W3YO8dYLsO0PXPbxodBgttTbH3rt9Cp1lJIk2r3O1Zqu94eRbnIz2f50lWolYzuKsj4PMok4abHLO8NAC884hiXx5Fy5pWKO0bWL7uEGXaJCtznhP67SlQ4xjWIfgq6EpZ28QMtuZK7JC0RGbl9nA4XtFLug/NLMoH1pGt9IonAJqcEDLyH6TDROcbsmGPaGIxMo41IUAnQVPMPGByp4mOmh9ZQMkBAcksUK55LsZj7E5z5XuZoyWCKu6nHmDq22xI/9Z8YdxJy4kWpD16jLVrpwGLWfyOD0Wd+cBzFBxVaGv7S5k9qwh/5t/LQEXsRqI3Q9Rm3QIoaZW9GlsDaKOUyykyWuhNOprSEi0s1G4rgoiX1V743EELti+pJu5og6X0g6oTynUqlhH9k6ezyRi05NGZHz0nvp3HOJr7ebrAUFrDjbkFBObEvdQWkkUbL0pEvMU46X58vF9j9F3j6kpyetNUBItrEubW9ZvMPM4qNqLlsSBJqOH3XbNwv/cXDXNxN8iFLzUhteisYY+RlHYOuP29/Cb+L+xv+35Rv7xudnZ6ohK4cMPfCG8KI7dNmjNk/H4e84pOxn/sZHK9psfvj8ncA8qJz7O8xqbxESDivGJOZzF7o5PJLQ7g34qAWoyuA+x3btU98LT6ZyGyceIXjrqob2CAVql4VOTQPUQYvHV/g4zAuCZGvYQBtf0wmd5lilrvuEn1BXLny01B4h4SMDlYsnNpm9d7m9h578ufpef9Z4WplqWQvqo52fyUA7J24eZD5av6SyGIV9kpmHNqyvdfzcpEMw97BvknV2fq+MFHun9BT3Lsf8pbzvisWiIQvYkng+8Vxk1V+dli1u56kY50LRjaPdotvT5BwqtwyF+emo/z9J3yVUVGfKrxQtJMOAQWoQii/4dp9wgybSa5mkucmRLtEQZ/pz0tL/NVcgWAd95nEQ3Tg6tNbuyn3Iepz65L3huMUUBntllWuu4DbtOFSMSbpILV4fy6wlM0SOvi6CpLh81c1LreIvKd61uEWBcDw1lUBUW1I0Z+m/PaRlX+PQ/oxg0Ye6KUiIiTF4ADNk59Ydpt5/rkxmq9tV5Kcp/eQLUVVmBzQNVuytQCP6Ezd0G8eLxWyHpmZWJ3bAzkWTtg4lZlw42SQezEmiUPaJUuR/qklVA/87S4ArFCpALdY3QRdUw3G3XbWUp6aq9z0zUizcPa7351p9JXOZyfdZBFnqt90VzQndXB/mwf8LC9STj5kenVpNuqOQQP3mIRJj7eV21FxG8VAxKrEn3c+XfmZ800EPb9/5lIlijscUbB6da0RQaMook0zug1G0tKi/JBC4rw7/D3m4ARzAkzMcVrDcT2SyFtUdWAsFlsPDFqV3N+EjyXaoEePwroaZCiLqEzb8MW+PNE9TmTC01EzWli51PzZvUqkmyuROU+V6ik+Le/9qT6nwzUzf9tP68tYei0YaDGx6kAd7jn1cKqOCuYbiELH9zYqcc4MnRJjkeGiqaGwLImhyeKs+xKJMBlOJ05ow9gGCKZ1VpnMKoSCTbMS+X+23y042zOb5MtcY/6oBeAo1Vy89OTyhpavFP78jXCcFH0t7Gx24hMEOm2gsEfGabVpQgvFqbQKMsknFRRmuPHcZu0Su/WMFphZvB2r/EGbG72rpGGho3h+Msz0uGzJ7hNK2uqQiE1qmn0zgacKYYZBCqsxV+sjbpoVdSilW/b94n2xNb648VmNIoizqEWhBnsen+d0kbCPmRItfWqSBeOd9Wne3c6bcd6uvXOJ6WdiSsuXq0ndhqrQ4QoWUjCjYtZ0EAhnSOP1m44xkf0O7jXghrzSJWxP4a/t72jU29Vu2rvu4n7HfHkkmQOMGSS+NPeLGO5I73mC2B7+lMiBQQZRM9/9liLIfowupUFAbPBbR+lxDM6M8Ptgh1paJq5Rvs7yEuLQv/7d1oU2woFSb3FMPWQOKMuCuJ7pDDjpIclus5TeEoMBy2YdVB4fxmesaCeMNsEgTHKS5WDSGyNUOoEpcC2OFWtIRf0w27ck34/DjxRTVIcc9+kqZE6iMSiVDsiKdP/Xz5XfEhm/sBhO50p1rvJDlkyyxuJ9SPgs7YeUJBjXdeAkE+P9OQJm6SZnn1svcduI78dYmbkE2mtziPrcjVisXG78spLvbZaSFx/Rks9zP4LKn0Cdz/3JsetkT06A8f/yCgMO6Mb1Hme0JJ7b2wZz1qleqTuKBGokhPVUZ0dVu+tnQYNEY1fmkZSz6+EGZ5EzL7657mreZGR3jUfaEk458PDniBzsSmBKhDRzfXameryJv9/D5m6HIqZ0R+ouCE54Dzp4IJuuD1e4Dc5i+PpSORJfG23uVgqixAMDvchMR0nZdH5brclYwRoJRWv/rlxGRI5ffD5NPGmIDt7vDE1434pYdVZIFh89Bs94HGGJbTwrN8T6lh1HZFTOB4lWzWj6EVqxSMvC0/ljWBQ3F2kc/mO2b6tWonT2JEqEwFts8rz2h+oWNds9ceR2cb7zZvJTDppHaEhK5avWqsseWa2Dt5BBhabdWSktS80oMQrL4TvAM9b5HMmyDnO+OkkbMXfUJG7eXqTIG6lqSOEbqVR+qYdP7uWb57WEJqzyh411GAVsDinPs7KvUeXItlcMdOUWzXBH6zscymV1LLVCtc8IePojzXHF9m5b5zGwBRdzcyUJkiu938ApmAayRdJrX1PmVguWUvt2ThQ62czItTyWJMW2An/hdDfMK7SiFQlGIdAbltHz3ycoh7j9V7GxNWBpbtcSdqm4XxRwTawc3cbZ+xfSv9qQfEkDKfZTwCkqWGI/ur250ItXlMlh6vUNWEYIg9A3GzbgmbqvTN8js2YMo87CU5y6nZ4dbJLDQJj9fc7yM7tZzJDZFtqOcU8+mZjYlq4VmifI23iHb1ZoT9E+kT2dolnP1AfiOkt7PQCSykBiXy5mv637IegWSKj9IKrYZf4Lu9+I7ub+mkRdlvYzehh/jaJ9n7HUH5b2IbgeNdkY7wx1yVzxS7pbvky6+nmVUtRllEFfweUQ0/nG017WoUYSxs+j2B4FV/F62EtHlMWZXYrjGHpthnNb1x66LKZ0Qe92INWHdfR/vqp02wMS8r1G4dJqHok8KmQ7947G13a4YXbsGgHcBvRuVu1eAi4/A5+ZixmdSXM73LupB/LH7O9yxLTVXJTyBbI1S49TIROrfVCOb/czZ9pM4JsZx8kUz8dQGv7gUWKxXvTH7QM/3J2OuXXgciUhqY+cgtaOliQQVOYthBLV3xpESZT3rmfEYNZxmpBbb24CRao86prn+i9TNOh8VxRJGXJfXHATJHs1T5txgc/opYrY8XjlGQQbRcoxIBcnVsMjmU1ymmIUL4dviJXndMAJ0Yet+c7O52/p98ytlmAsGBaTAmMhimAnvp1TWNGM9BpuitGj+t810CU2UhorrjPKGtThVC8WaXw04WFnT5fTjqmPyrQ0tN3CkLsctVy2xr0ZWgiWVZ1OrlFjjxJYsOiZv2cAoOvE+7sY0I/TwWcZqMoyIKNOftwP7w++Rfg67ljfovKYa50if3fzE/8aPYVey/Nq35+nH2sLPh/fP5TsylSKGOZ4k69d2PnH43+kq++sRXHQqGArWdwhx+hpwQC6JgT2uxehYU4Zbw7oNb6/HLikPyJROGK2ouyr+vzseESp9G50T4AyFrSqOQ0rroCYP4sMDFBrHn342EyZTMlSyk47rHSq89Y9/nI3zG5lX16Z5lxphguLOcZUndL8wNcrkyjH82jqg8Bo8OYkynrxZvbFno5lUS3OPr8Ko3mX9NoRPdYOKKjD07bvgFgpZ/RF+YzkWvJ/Hs/tUbfeGzGWLxNAjfDzHHMVSDwB5SabQLsIZHiBp43FjGkaienYoDd18hu2BGwOK7U3o70K/WY/kuuKdmdrykIBUdG2mvE91L1JtTbh20mOLbk1vCAamu7utlXeGU2ooVikbU/actcgmsC1FKk2qmj3GWeIWbj4tGIxE7BLcBWUvvcnd/lYxsMV4F917fWeFB/XbINN3qGvIyTpCalz1lVewdIGqeAS/gB8Mi+sA+BqDiX3VGD2eUunTRbSY+AuDy4E3Qx3hAhwnSXX+B0zuj3eQ1miS8Vux2z/l6/BkWtjKGU72aJkOCWhGcSf3+kFkkB15vGOsQrSdFr6qTj0gBYiOlnBO41170gOWHSUoBVRU2JjwppYdhIFDfu7tIRHccSNM5KZOFDPz0TGMAjzzEpeLwTWp+kn201kU6NjbiMQJx83+LX1e1tZ10kuChJZ/XBUQ1dwaBHjTDJDqOympEk8X2M3VtVw21JksChA8w1tTefO3RJ1FMbqZ01bHHkudDB/OhLfe7P5GOHaI28ZXKTMuqo0hLWQ4HabBsGG7NbP1RiXtETz074er6w/OerJWEqjmkq2y51q1BVI+JUudnVa3ogBpzdhFE7fC7kybrAt2Z6RqDjATAUEYeYK45WMupBKQRtQlU+uNsjnzj6ZmGrezA+ASrWxQ6LMkHRXqXwNq7ftv28dUx/ZSJciDXP2SWJsWaN0FjPX9Yko6LobZ7aYW/IdUktI9apTLyHS8DyWPyuoZyxN1TK/vtfxk3HwWh6JczZC8Ftn0bIJay2g+n5wd7lm9rEsKO+svqVmi+c1j88hSCxbzrg4+HEP0Nt1/B6YW1XVm09T1CpAKjc9n18hjqsaFGdfyva1ZG0Xu3ip6N6JGpyTSqY5h4BOlpLPaOnyw45PdXTN+DtAKg7DLrLFTnWusoSBHk3s0d7YouJHq85/R09Tfc37ENXZF48eAYLnq9GLioNcwDZrC6FW6godB8JnqYUPvn0pWLfQz0lM0Yy8Mybgn84Ds3Q9bDP10bLyOV+qzxa4Rd9Dhu7cju8mMaONXK3UqmBQ9qIg7etIwEqM/kECk/Dzja4Bs1xR+Q/tCbc8IKrSGsTdJJ0vge7IG20W687uVmK6icWQ6cD3lwFzgNMGtFvO5qyJeKflGLAAcQZOrkxVwy3cWvqlGpvjmf9Qe6Ap20MPbV92DPV0OhFM4kz8Yr0ffC2zLWSQ1kqY6QdQrttR3kh1YLtQd1kCEv5hVoPIRWl5ERcUTttBIrWp6Xs5Ehh5OUUwI5aEBvuiDmUoENmnVw1FohCrbRp1A1E+XSlWVOTi7ADW+5Ohb9z1vK4qx5R5lPdGCPBJZ00mC+Ssp8VUbgpGAvXWMuWQQRbCqI6Rr2jtxZxtfP7W/8onz+yz0Gs76LaT5HX9ecyiZCB/ZR/gFtMxPsDwohoeCRtiuLxE1GM1vUEUgBv86+eehL58/P56QFGQ/MqOe/vC76L63jzmeax4exd/OKTUvkXg+fOJUHych9xt/9goJMrapSgvXrj8+8vk/N80f22Sewj6cyGqt1B6mztoeklVHHraouhvHJaG/OuBz6DHKMpFmQULU1bRWlyYE0RPXYYkUycIemN7TLtgNCJX6BqdyxDKkegO7nJK5xQ7OVYDZTMf9bVHidtk6DQX9Et+V9M7esgbsYBdEeUpsB0Xvw2kd9+rI7V+m47u+O/tq7mw7262HU1WlS9uFzsV6JxIHNmUCy0QS9e077JGRFbG65z3/dOKB/Zk+yDdKpUmdXjn/aS3N5nv4fK7bMHHmPlHd4E2+iTbV5rpzScRnxk6KARuDTJ8Q1LpK2mP8gj1EbuJ9RIyY+EWK4hCiIDBAS1Tm2IEXAFfgKPgdL9O6mAa06wjCcUAL6EsxPQWO9VNegBPm/0GgkZbDxCynxujX/92vmGcjZRMAY45puak2sFLCLSwXpEsyy5fnF0jGJBhm+fNSHKKUUfy+276A7/feLOFxxUuHRNJI2Osenxyvf8DAGObT60pfTTlhEg9u/KKkhJqm5U1/+BEcSkpFDA5XeCqxwXmPac1jcuZ3JWQ+p0NdWzb/5v1ZvF8GtMTFFEdQjpLO0bwPb0BHNWnip3liDXI2fXf05jjvfJ0NpjLCUgfTh9CMFYVFKEd4Z/OG/2C+N435mnK+9t1gvCiVcaaH7rK4+PjCvpVNiz+t2QyqH1O8x3JKZVl6Q+Lp/XK8wMjVMslOq9FdSw5FtUs/CptXH9PW+wbWHgrV17R5jTVOtGtKFu3nb80T+E0tv9QkzW3J2dbaw/8ddAKZ0pxIaEqLjlPrji3VgJ3GvdFvlqD8075woxh4fVt0JZE0KVFsAvqhe0dqN9b35jtSpnYMXkU+vZq+IAHad3IHc2s/LYrnD1anfG46IFiMIr9oNbZDWvwthqYNqOigaKd/XlLU4XHfk/PXIjPsLy/9/kAtQ+/wKH+hI/IROWj5FPvTZAT9f7j4ZXQyG4M0TujMAFXYkKvEHv1xhySekgXGGqNxWeWKlf8dDAlLuB1cb/qOD+rk7cmwt+1yKpk9cudqBanTi6zTbXRtV8qylNtjyOVKy1HTz0GW9rjt6sSjAZcT5R+KdtyYb0zyqG9pSLuCw5WBwAn7fjBjKLLoxLXMI+52L9cLwIR2B6OllJZLHJ8vDxmWdtF+QJnmt1rsHPIWY20lftk8fYePkAIg6Hgn532QoIpegMxiWgAOfe5/U44APR8Ac0NeZrVh3gEhs12W+tVSiWiUQekf/YBECUy5fdYbA08dd7VzPAP9aiVcIB9k6tY7WdJ1wNV+bHeydNtmC6G5ICtFC1ZwmJU/j8hf0I8TRVKSiz5oYIa93EpUI78X8GYIAZabx47/n8LDAAJ0nNtP1rpROprqKMBRecShca6qXuTSI3jZBLOB3Vp381B5rCGhjSvh/NSVkYp2qIdP/Bg=";
},
"dec/dictionary-browser.js": function(e, t, r) {
var n = e("base64-js");
r.init = function() {
var t = e("./decode").BrotliDecompressBuffer,
r = n.toByteArray(e("./dictionary.bin.js"));
return t(r);
};
},
"dec/huffman.js": function(e, t, r) {
function n(e, t) {
(this.bits = e), (this.value = t);
}
function o(e, t) {
for (var r = 1 << (t - 1); e & r; ) r >>= 1;
return (e & (r - 1)) + r;
}
function a(e, t, r, o, a) {
do (o -= r), (e[t + o] = new n(a.bits, a.value));
while (o > 0);
}
function i(e, t, r) {
for (var n = 1 << (t - r); t < s && ((n -= e[t]), !(n <= 0)); )
++t, (n <<= 1);
return t - r;
}
r.HuffmanCode = n;
const s = 15;
r.BrotliBuildHuffmanTable = function(e, t, r, d, l) {
var u,
c,
f,
h,
p,
w,
m,
b,
y,
g,
v,
A = t,
U = new Int32Array(16),
x = new Int32Array(16);
for (v = new Int32Array(l), f = 0; f < l; f++) U[d[f]]++;
for (x[1] = 0, c = 1; c < s; c++) x[c + 1] = x[c] + U[c];
for (f = 0; f < l; f++) 0 !== d[f] && (v[x[d[f]]++] = f);
if (((b = r), (y = 1 << b), (g = y), 1 === x[s])) {
for (h = 0; h < g; ++h) e[t + h] = new n(0, 65535 & v[0]);
return g;
}
for (h = 0, f = 0, c = 1, p = 2; c <= r; ++c, p <<= 1)
for (; U[c] > 0; --U[c])
(u = new n(255 & c, 65535 & v[f++])),
a(e, t + h, p, y, u),
(h = o(h, c));
for (m = g - 1, w = -1, c = r + 1, p = 2; c <= s; ++c, p <<= 1)
for (; U[c] > 0; --U[c])
(h & m) !== w &&
((t += y),
(b = i(U, c, r)),
(y = 1 << b),
(g += y),
(w = h & m),
(e[A + w] = new n((b + r) & 255, (t - A - w) & 65535))),
(u = new n((c - r) & 255, 65535 & v[f++])),
a(e, t + (h >> r), p, y, u),
(h = o(h, c));
return g;
};
},
"dec/prefix.js": function(e, t, r) {
function n(e, t) {
(this.offset = e), (this.nbits = t);
}
(r.kBlockLengthPrefixCode = [
new n(1, 2),
new n(5, 2),
new n(9, 2),
new n(13, 2),
new n(17, 3),
new n(25, 3),
new n(33, 3),
new n(41, 3),
new n(49, 4),
new n(65, 4),
new n(81, 4),
new n(97, 4),
new n(113, 5),
new n(145, 5),
new n(177, 5),
new n(209, 5),
new n(241, 6),
new n(305, 6),
new n(369, 7),
new n(497, 8),
new n(753, 9),
new n(1265, 10),
new n(2289, 11),
new n(4337, 12),
new n(8433, 13),
new n(16625, 24)
]),
(r.kInsertLengthPrefixCode = [
new n(0, 0),
new n(1, 0),
new n(2, 0),
new n(3, 0),
new n(4, 0),
new n(5, 0),
new n(6, 1),
new n(8, 1),
new n(10, 2),
new n(14, 2),
new n(18, 3),
new n(26, 3),
new n(34, 4),
new n(50, 4),
new n(66, 5),
new n(98, 5),
new n(130, 6),
new n(194, 7),
new n(322, 8),
new n(578, 9),
new n(1090, 10),
new n(2114, 12),
new n(6210, 14),
new n(22594, 24)
]),
(r.kCopyLengthPrefixCode = [
new n(2, 0),
new n(3, 0),
new n(4, 0),
new n(5, 0),
new n(6, 0),
new n(7, 0),
new n(8, 0),
new n(9, 0),
new n(10, 1),
new n(12, 1),
new n(14, 2),
new n(18, 2),
new n(22, 3),
new n(30, 3),
new n(38, 4),
new n(54, 4),
new n(70, 5),
new n(102, 5),
new n(134, 6),
new n(198, 7),
new n(326, 8),
new n(582, 9),
new n(1094, 10),
new n(2118, 24)
]),
(r.kInsertRangeLut = [0, 0, 8, 8, 0, 16, 8, 16, 16]),
(r.kCopyRangeLut = [0, 8, 0, 8, 16, 0, 16, 8, 16]);
},
"dec/streams.js": function(e, t, r) {
function n(e) {
(this.buffer = e), (this.pos = 0);
}
function o(e) {
(this.buffer = e), (this.pos = 0);
}
(n.prototype.read = function(e, t, r) {
this.pos + r > this.buffer.length &&
(r = this.buffer.length - this.pos);
for (var n = 0; n < r; n++) e[t + n] = this.buffer[this.pos + n];
return (this.pos += r), r;
}),
(r.BrotliInput = n),
(o.prototype.write = function(e, t) {
if (this.pos + t > this.buffer.length)
throw new Error("Output buffer is not large enough");
return (
this.buffer.set(e.subarray(0, t), this.pos),
(this.pos += t),
t
);
}),
(r.BrotliOutput = o);
},
"dec/transform.js": function(e, t, r) {
function n(e, t, r) {
(this.prefix = new Uint8Array(e.length)),
(this.transform = t),
(this.suffix = new Uint8Array(r.length));
for (var n = 0; n < e.length; n++)
this.prefix[n] = e.charCodeAt(n);
for (var n = 0; n < r.length; n++)
this.suffix[n] = r.charCodeAt(n);
}
function o(e, t) {
return e[t] < 192
? (e[t] >= 97 && e[t] <= 122 && (e[t] ^= 32), 1)
: e[t] < 224
? ((e[t + 1] ^= 32), 2)
: ((e[t + 2] ^= 5), 3);
}
var a = e("./dictionary");
const i = 0,
s = 1,
d = 2,
l = 3,
u = 4,
c = 5,
f = 6,
h = 7,
p = 8,
w = 9,
m = 10,
b = 11,
y = 12,
g = 13,
v = 14,
A = 15,
U = 16,
x = 17,
E = 18,
k = 20;
var B = [
new n("", i, ""),
new n("", i, " "),
new n(" ", i, " "),
new n("", y, ""),
new n("", m, " "),
new n("", i, " the "),
new n(" ", i, ""),
new n("s ", i, " "),
new n("", i, " of "),
new n("", m, ""),
new n("", i, " and "),
new n("", g, ""),
new n("", s, ""),
new n(", ", i, " "),
new n("", i, ", "),
new n(" ", m, " "),
new n("", i, " in "),
new n("", i, " to "),
new n("e ", i, " "),
new n("", i, '"'),
new n("", i, "."),
new n("", i, '">'),
new n("", i, "\n"),
new n("", l, ""),
new n("", i, "]"),
new n("", i, " for "),
new n("", v, ""),
new n("", d, ""),
new n("", i, " a "),
new n("", i, " that "),
new n(" ", m, ""),
new n("", i, ". "),
new n(".", i, ""),
new n(" ", i, ", "),
new n("", A, ""),
new n("", i, " with "),
new n("", i, "'"),
new n("", i, " from "),
new n("", i, " by "),
new n("", U, ""),
new n("", x, ""),
new n(" the ", i, ""),
new n("", u, ""),
new n("", i, ". The "),
new n("", b, ""),
new n("", i, " on "),
new n("", i, " as "),
new n("", i, " is "),
new n("", h, ""),
new n("", s, "ing "),
new n("", i, "\n\t"),
new n("", i, ":"),
new n(" ", i, ". "),
new n("", i, "ed "),
new n("", k, ""),
new n("", E, ""),
new n("", f, ""),
new n("", i, "("),
new n("", m, ", "),
new n("", p, ""),
new n("", i, " at "),
new n("", i, "ly "),
new n(" the ", i, " of "),
new n("", c, ""),
new n("", w, ""),
new n(" ", m, ", "),
new n("", m, '"'),
new n(".", i, "("),
new n("", b, " "),
new n("", m, '">'),
new n("", i, '="'),
new n(" ", i, "."),
new n(".com/", i, ""),
new n(" the ", i, " of the "),
new n("", m, "'"),
new n("", i, ". This "),
new n("", i, ","),
new n(".", i, " "),
new n("", m, "("),
new n("", m, "."),
new n("", i, " not "),
new n(" ", i, '="'),
new n("", i, "er "),
new n(" ", b, " "),
new n("", i, "al "),
new n(" ", b, ""),
new n("", i, "='"),
new n("", b, '"'),
new n("", m, ". "),
new n(" ", i, "("),
new n("", i, "ful "),
new n(" ", m, ". "),
new n("", i, "ive "),
new n("", i, "less "),
new n("", b, "'"),
new n("", i, "est "),
new n(" ", m, "."),
new n("", b, '">'),
new n(" ", i, "='"),
new n("", m, ","),
new n("", i, "ize "),
new n("", b, "."),
new n("\xc2\xa0", i, ""),
new n(" ", i, ","),
new n("", m, '="'),
new n("", b, '="'),
new n("", i, "ous "),
new n("", b, ", "),
new n("", m, "='"),
new n(" ", m, ","),
new n(" ", b, '="'),
new n(" ", b, ", "),
new n("", b, ","),
new n("", b, "("),
new n("", b, ". "),
new n(" ", b, "."),
new n("", b, "='"),
new n(" ", b, ". "),
new n(" ", m, '="'),
new n(" ", b, "='"),
new n(" ", m, "='")
];
(r.kTransforms = B),
(r.kNumTransforms = B.length),
(r.transformDictionaryWord = function(e, t, r, n, i) {
var s,
d = B[i].prefix,
l = B[i].suffix,
u = B[i].transform,
c = u < y ? 0 : u - 11,
f = 0,
h = t;
c > n && (c = n);
for (var p = 0; p < d.length; ) e[t++] = d[p++];
for (r += c, n -= c, u <= w && (n -= u), f = 0; f < n; f++)
e[t++] = a.dictionary[r + f];
if (((s = t - n), u === m)) o(e, s);
else if (u === b)
for (; n > 0; ) {
var g = o(e, s);
(s += g), (n -= g);
}
for (var v = 0; v < l.length; ) e[t++] = l[v++];
return t - h;
});
},
"node_modules/base64-js/index.js": function(e, t, r) {
"use strict";
function n(e) {
var t = e.length;
if (t % 4 > 0)
throw new Error(
"Invalid string. Length must be a multiple of 4"
);
return "=" === e[t - 2] ? 2 : "=" === e[t - 1] ? 1 : 0;
}
function o(e) {
return (3 * e.length) / 4 - n(e);
}
function a(e) {
var t,
r,
o,
a,
i,
s,
d = e.length;
(i = n(e)), (s = new c((3 * d) / 4 - i)), (o = i > 0 ? d - 4 : d);
var l = 0;
for (t = 0, r = 0; t < o; t += 4, r += 3)
(a =
(u[e.charCodeAt(t)] << 18) |
(u[e.charCodeAt(t + 1)] << 12) |
(u[e.charCodeAt(t + 2)] << 6) |
u[e.charCodeAt(t + 3)]),
(s[l++] = (a >> 16) & 255),
(s[l++] = (a >> 8) & 255),
(s[l++] = 255 & a);
return (
2 === i
? ((a =
(u[e.charCodeAt(t)] << 2) |
(u[e.charCodeAt(t + 1)] >> 4)),
(s[l++] = 255 & a))
: 1 === i &&
((a =
(u[e.charCodeAt(t)] << 10) |
(u[e.charCodeAt(t + 1)] << 4) |
(u[e.charCodeAt(t + 2)] >> 2)),
(s[l++] = (a >> 8) & 255),
(s[l++] = 255 & a)),
s
);
}
function i(e) {
return (
l[(e >> 18) & 63] +
l[(e >> 12) & 63] +
l[(e >> 6) & 63] +
l[63 & e]
);
}
function s(e, t, r) {
for (var n, o = [], a = t; a < r; a += 3)
(n = (e[a] << 16) + (e[a + 1] << 8) + e[a + 2]), o.push(i(n));
return o.join("");
}
function d(e) {
for (
var t,
r = e.length,
n = r % 3,
o = "",
a = [],
i = 16383,
d = 0,
u = r - n;
d < u;
d += i
)
a.push(s(e, d, d + i > u ? u : d + i));
return (
1 === n
? ((t = e[r - 1]),
(o += l[t >> 2]),
(o += l[(t << 4) & 63]),
(o += "=="))
: 2 === n &&
((t = (e[r - 2] << 8) + e[r - 1]),
(o += l[t >> 10]),
(o += l[(t >> 4) & 63]),
(o += l[(t << 2) & 63]),
(o += "=")),
a.push(o),
a.join("")
);
}
(r.byteLength = o), (r.toByteArray = a), (r.fromByteArray = d);
for (
var l = [],
u = [],
c = "undefined" != typeof Uint8Array ? Uint8Array : Array,
f =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
h = 0,
p = f.length;
h < p;
++h
)
(l[h] = f[h]), (u[f.charCodeAt(h)] = h);
(u["-".charCodeAt(0)] = 62), (u["_".charCodeAt(0)] = 63);
}
};
for (var r in t) t[r].folder = r.substring(0, r.lastIndexOf("/") + 1);
var n = function(e) {
var r = [];
return (
(e = e.split("/").every(function(e) {
return ".." == e ? r.pop() : "." == e || "" == e || r.push(e);
})
? r.join("/")
: null),
e ? t[e] || t[e + ".js"] || t[e + "/index.js"] : null
);
},
o = function(e, t) {
return e
? n(e.folder + "node_modules/" + t) || o(e.parent, t)
: null;
},
a = function(e, t) {
var r = t.match(/^\//)
? null
: e
? t.match(/^\.\.?\//)
? n(e.folder + t)
: o(e, t)
: n(t);
if (!r) throw "module not found: " + t;
return (
r.exports ||
((r.parent = e), r(a.bind(null, r), r, (r.exports = {}))),
r.exports
);
};
return a(null, e);
},
decompress: function(e) {
this.exports || (this.exports = this.require("decompress.js"));
try {
return this.exports(e);
} catch (e) {}
},
hasUnityMarker: function(e) {
var t = "UnityWeb Compressed Content (brotli)";
if (!e.length) return !1;
var r = 1 & e[0] ? (14 & e[0] ? 4 : 7) : 1,
n = e[0] & ((1 << r) - 1),
o = 1 + ((Math.log(t.length - 1) / Math.log(2)) >> 3);
if (
((commentOffset = (r + 1 + 2 + 1 + 2 + (o << 3) + 7) >> 3),
17 == n || commentOffset > e.length)
)
return !1;
for (
var a = n + ((6 + (o << 4) + ((t.length - 1) << 6)) << r), i = 0;
i < commentOffset;
i++, a >>>= 8
)
if (e[i] != (255 & a)) return !1;
return (
String.fromCharCode.apply(
null,
e.subarray(commentOffset, commentOffset + t.length)
) == t
);
}
},
decompress: function(e, t) {
var r = this.gzip.hasUnityMarker(e)
? this.gzip
: this.brotli.hasUnityMarker(e)
? this.brotli
: this.identity;
if (
(this.serverSetupWarningEnabled &&
r != this.identity &&
(console.log(
"You can reduce your startup time if you configure your web server to host .unityweb files using " +
(r == this.gzip ? "gzip" : "brotli") +
" compression."
),
(this.serverSetupWarningEnabled = !1)),
"function" != typeof t)
)
return r.decompress(e);
if (!r.worker) {
var n = URL.createObjectURL(
new Blob(
[
"this.require = ",
r.require.toString(),
"; this.decompress = ",
r.decompress.toString(),
"; this.onmessage = ",
function(e) {
var t = {
id: e.data.id,
decompressed: this.decompress(e.data.compressed)
};
postMessage(t, t.decompressed ? [t.decompressed.buffer] : []);
}.toString(),
"; postMessage({ ready: true });"
],
{ type: "text/javascript" }
)
);
(r.worker = new Worker(n)),
(r.worker.onmessage = function(e) {
return e.data.ready
? void URL.revokeObjectURL(n)
: (this.callbacks[e.data.id](e.data.decompressed),
void delete this.callbacks[e.data.id]);
}),
(r.worker.callbacks = {}),
(r.worker.nextCallbackId = 0);
}
var o = r.worker.nextCallbackId++;
(r.worker.callbacks[o] = t),
r.worker.postMessage({ id: o, compressed: e }, [e.buffer]);
},
serverSetupWarningEnabled: !0
},
Cryptography: {
crc32: function(e) {
var t = UnityLoader.Cryptography.crc32.module;
if (!t) {
var r = new ArrayBuffer(16777216),
n = (function(e, t, r) {
"use asm";
var n = new e.Uint8Array(r);
var o = new e.Uint32Array(r);
function a(e, t) {
e = e | 0;
t = t | 0;
var r = 0;
for (r = o[1024 >> 2] | 0; t; e = (e + 1) | 0, t = (t - 1) | 0)
r = o[(((r & 255) ^ n[e]) << 2) >> 2] ^ (r >>> 8) ^ 4278190080;
o[1024 >> 2] = r;
}
return { process: a };
})({ Uint8Array: Uint8Array, Uint32Array: Uint32Array }, null, r);
t = UnityLoader.Cryptography.crc32.module = {
buffer: r,
HEAPU8: new Uint8Array(r),
HEAPU32: new Uint32Array(r),
process: n.process,
crc32: 1024,
data: 1028
};
for (var o = 0; o < 256; o++) {
for (var a = 255 ^ o, i = 0; i < 8; i++)
a = (a >>> 1) ^ (1 & a ? 3988292384 : 0);
t.HEAPU32[o] = a;
}
}
t.HEAPU32[t.crc32 >> 2] = 0;
for (var s = 0; s < e.length; ) {
var d = Math.min(t.HEAPU8.length - t.data, e.length - s);
t.HEAPU8.set(e.subarray(s, s + d), t.data),
(crc = t.process(t.data, d)),
(s += d);
}
var l = t.HEAPU32[t.crc32 >> 2];
return new Uint8Array([l >> 24, l >> 16, l >> 8, l]);
},
md5: function(e) {
var t = UnityLoader.Cryptography.md5.module;
if (!t) {
var r = new ArrayBuffer(16777216),
n = (function(e, t, r) {
"use asm";
var n = new e.Uint32Array(r);
function o(e, t) {
e = e | 0;
t = t | 0;
var r = 0,
o = 0,
a = 0,
i = 0,
s = 0,
d = 0,
l = 0,
u = 0,
c = 0,
f = 0,
h = 0,
p = 0;
(r = n[128] | 0),
(o = n[129] | 0),
(a = n[130] | 0),
(i = n[131] | 0);
for (; t; e = (e + 64) | 0, t = (t - 1) | 0) {
s = r;
d = o;
l = a;
u = i;
for (f = 0; (f | 0) < 512; f = (f + 8) | 0) {
p = n[f >> 2] | 0;
r =
(r +
(n[(f + 4) >> 2] | 0) +
(n[(e + (p >>> 14)) >> 2] | 0) +
((f | 0) < 128
? i ^ (o & (a ^ i))
: (f | 0) < 256
? a ^ (i & (o ^ a))
: (f | 0) < 384
? o ^ a ^ i
: a ^ (o | ~i))) |
0;
h = (((r << (p & 31)) | (r >>> (32 - (p & 31)))) + o) | 0;
r = i;
i = a;
a = o;
o = h;
}
r = (r + s) | 0;
o = (o + d) | 0;
a = (a + l) | 0;
i = (i + u) | 0;
}
n[128] = r;
n[129] = o;
n[130] = a;
n[131] = i;
}
return { process: o };
})({ Uint32Array: Uint32Array }, null, r);
(t = UnityLoader.Cryptography.md5.module = {
buffer: r,
HEAPU8: new Uint8Array(r),
HEAPU32: new Uint32Array(r),
process: n.process,
md5: 512,
data: 576
}),
t.HEAPU32.set(
new Uint32Array([
7,
3614090360,
65548,
3905402710,
131089,
606105819,
196630,
3250441966,
262151,
4118548399,
327692,
1200080426,
393233,
2821735955,
458774,
4249261313,
524295,
1770035416,
589836,
2336552879,
655377,
4294925233,
720918,
2304563134,
786439,
1804603682,
851980,
4254626195,
917521,
2792965006,
983062,
1236535329,
65541,
4129170786,
393225,
3225465664,
720910,
643717713,
20,
3921069994,
327685,
3593408605,
655369,
38016083,
983054,
3634488961,
262164,
3889429448,
589829,
568446438,
917513,
3275163606,
196622,
4107603335,
524308,
1163531501,
851973,
2850285829,
131081,
4243563512,
458766,
1735328473,
786452,
2368359562,
327684,
4294588738,
524299,
2272392833,
720912,
1839030562,
917527,
4259657740,
65540,
2763975236,
262155,
1272893353,
458768,
4139469664,
655383,
3200236656,
851972,
681279174,
11,
3936430074,
196624,
3572445317,
393239,
76029189,
589828,
3654602809,
786443,
3873151461,
983056,
530742520,
131095,
3299628645,
6,
4096336452,
458762,
1126891415,
917519,
2878612391,
327701,
4237533241,
786438,
1700485571,
196618,
2399980690,
655375,
4293915773,
65557,
2240044497,
524294,
1873313359,
983050,
4264355552,
393231,
2734768916,
851989,
1309151649,
262150,
4149444226,
720906,
3174756917,
131087,
718787259,
589845,
3951481745
])
);
}
t.HEAPU32.set(
new Uint32Array([1732584193, 4023233417, 2562383102, 271733878]),
t.md5 >> 2
);
for (var o = 0; o < e.length; ) {
var a = Math.min(t.HEAPU8.length - t.data, e.length - o) & -64;
if (
(t.HEAPU8.set(e.subarray(o, o + a), t.data),
(o += a),
t.process(t.data, a >> 6),
e.length - o < 64)
) {
if (
((a = e.length - o),
t.HEAPU8.set(e.subarray(e.length - a, e.length), t.data),
(o += a),
(t.HEAPU8[t.data + a++] = 128),
a > 56)
) {
for (var i = a; i < 64; i++) t.HEAPU8[t.data + i] = 0;
t.process(t.data, 1), (a = 0);
}
for (var i = a; i < 64; i++) t.HEAPU8[t.data + i] = 0;
for (
var s = e.length, d = 0, i = 56;
i < 64;
i++, d = (224 & s) >> 5, s /= 256
)
t.HEAPU8[t.data + i] = ((31 & s) << 3) + d;
t.process(t.data, 1);
}
}
return new Uint8Array(t.HEAPU8.subarray(t.md5, t.md5 + 16));
},
sha1: function(e) {
var t = UnityLoader.Cryptography.sha1.module;
if (!t) {
var r = new ArrayBuffer(16777216),
n = (function(e, t, r) {
"use asm";
var n = new e.Uint32Array(r);
function o(e, t) {
e = e | 0;
t = t | 0;
var r = 0,
o = 0,
a = 0,
i = 0,
s = 0,
d = 0,
l = 0,
u = 0,
c = 0,
f = 0,
h = 0,
p = 0;
(r = n[80] | 0),
(o = n[81] | 0),
(a = n[82] | 0),
(i = n[83] | 0),
(s = n[84] | 0);
for (; t; e = (e + 64) | 0, t = (t - 1) | 0) {
d = r;
l = o;
u = a;
c = i;
f = s;
for (
p = 0;
(p | 0) < 320;
p = (p + 4) | 0,
s = i,
i = a,
a = (o << 30) | (o >>> 2),
o = r,
r = h
) {
if ((p | 0) < 64) {
h = n[(e + p) >> 2] | 0;
h =
((h << 24) & 4278190080) |
((h << 8) & 16711680) |
((h >>> 8) & 65280) |
((h >>> 24) & 255);
} else {
h =
n[(p - 12) >> 2] ^
n[(p - 32) >> 2] ^
n[(p - 56) >> 2] ^
n[(p - 64) >> 2];
h = (h << 1) | (h >>> 31);
}
n[p >> 2] = h;
h =
(h +
(((r << 5) | (r >>> 27)) + s) +
((p | 0) < 80
? (((o & a) | (~o & i) | 0) + 1518500249) | 0
: (p | 0) < 160
? ((o ^ a ^ i) + 1859775393) | 0
: (p | 0) < 240
? (((o & a) | (o & i) | (a & i)) + 2400959708) | 0
: ((o ^ a ^ i) + 3395469782) | 0)) |
0;
}
r = (r + d) | 0;
o = (o + l) | 0;
a = (a + u) | 0;
i = (i + c) | 0;
s = (s + f) | 0;
}
n[80] = r;
n[81] = o;
n[82] = a;
n[83] = i;
n[84] = s;
}
return { process: o };
})({ Uint32Array: Uint32Array }, null, r);
t = UnityLoader.Cryptography.sha1.module = {
buffer: r,
HEAPU8: new Uint8Array(r),
HEAPU32: new Uint32Array(r),
process: n.process,
sha1: 320,
data: 384
};
}
t.HEAPU32.set(
new Uint32Array([
1732584193,
4023233417,
2562383102,
271733878,
3285377520
]),
t.sha1 >> 2
);
for (var o = 0; o < e.length; ) {
var a = Math.min(t.HEAPU8.length - t.data, e.length - o) & -64;
if (
(t.HEAPU8.set(e.subarray(o, o + a), t.data),
(o += a),
t.process(t.data, a >> 6),
e.length - o < 64)
) {
if (
((a = e.length - o),
t.HEAPU8.set(e.subarray(e.length - a, e.length), t.data),
(o += a),
(t.HEAPU8[t.data + a++] = 128),
a > 56)
) {
for (var i = a; i < 64; i++) t.HEAPU8[t.data + i] = 0;
t.process(t.data, 1), (a = 0);
}
for (var i = a; i < 64; i++) t.HEAPU8[t.data + i] = 0;
for (
var s = e.length, d = 0, i = 63;
i >= 56;
i--, d = (224 & s) >> 5, s /= 256
)
t.HEAPU8[t.data + i] = ((31 & s) << 3) + d;
t.process(t.data, 1);
}
}
for (var l = new Uint8Array(20), i = 0; i < l.length; i++)
l[i] = t.HEAPU8[t.sha1 + (i & -4) + 3 - (3 & i)];
return l;
}
},
Error: {
init: (function() {
return (
(Error.stackTraceLimit = 50),
window.addEventListener("error", function(e) {
var t = UnityLoader.Error.getModule(e);
if (!t) return UnityLoader.Error.handler(e);
var r = t.useWasm ? t.wasmSymbolsUrl : t.asmSymbolsUrl;
if (!r) return UnityLoader.Error.handler(e, t);
var n = new XMLHttpRequest();
n.open("GET", t.resolveBuildUrl(r)),
(n.responseType = "arraybuffer"),
(n.onload = function() {
UnityLoader.loadCode(
UnityLoader.Compression.decompress(new Uint8Array(n.response)),
function(r) {
(t.demangleSymbol = UnityLoader[r]()),
UnityLoader.Error.handler(e, t);
}
);
}),
n.send();
}),
!0
);
})(),
stackTraceFormat:
navigator.userAgent.indexOf("Chrome") != -1
? "(\\s+at\\s+)(([\\w\\d_\\.]*?)([\\w\\d_$]+)(/[\\w\\d_\\./]+|))(\\s+\\[.*\\]|)\\s*\\((blob:.*)\\)"
: "(\\s*)(([\\w\\d_\\.]*?)([\\w\\d_$]+)(/[\\w\\d_\\./]+|))(\\s+\\[.*\\]|)\\s*@(blob:.*)",
stackTraceFormatWasm:
navigator.userAgent.indexOf("Chrome") != -1
? "((\\s+at\\s*)\\s\\(<WASM>\\[(\\d+)\\]\\+\\d+\\))()"
: "((\\s*)wasm-function\\[(\\d+)\\])@(blob:.*)",
blobParseRegExp: new RegExp("^(blob:.*)(:\\d+:\\d+)$"),
getModule: function(e) {
var t = e.message.match(new RegExp(this.stackTraceFormat, "g"));
for (var r in t) {
var n = t[r].match(new RegExp("^" + this.stackTraceFormat + "$")),
o = n[7].match(this.blobParseRegExp);
if (o && UnityLoader.Blobs[o[1]] && UnityLoader.Blobs[o[1]].Module)
return UnityLoader.Blobs[o[1]].Module;
}
},
demangle: function(e, t) {
var r = e.message;
return t
? ((r = r.replace(
new RegExp(this.stackTraceFormat, "g"),
function(e) {
var r = e.match(new RegExp("^" + this.stackTraceFormat + "$")),
n = r[7].match(this.blobParseRegExp),
o = t.demangleSymbol ? t.demangleSymbol(r[4]) : r[4],
a =
n && UnityLoader.Blobs[n[1]] && UnityLoader.Blobs[n[1]].url
? UnityLoader.Blobs[n[1]].url
: "blob";
return (
r[1] +
o +
(r[2] != o ? " [" + r[2] + "]" : "") +
" (" +
(n ? a.substr(a.lastIndexOf("/") + 1) + n[2] : r[7]) +
")"
);
}.bind(this)
)),
t.useWasm &&
(r = r.replace(
new RegExp(this.stackTraceFormatWasm, "g"),
function(e) {
var r = e.match(
new RegExp("^" + this.stackTraceFormatWasm + "$")
),
n = t.demangleSymbol ? t.demangleSymbol(r[3]) : r[3],
o = r[4].match(this.blobParseRegExp),
a =
o && UnityLoader.Blobs[o[1]] && UnityLoader.Blobs[o[1]].url
? UnityLoader.Blobs[o[1]].url
: "blob";
return (
(n == r[3] ? r[1] : r[2] + n + " [wasm:" + r[3] + "]") +
(r[4]
? " (" +
(o ? a.substr(a.lastIndexOf("/") + 1) + o[2] : r[4]) +
")"
: "")
);
}.bind(this)
)),
r)
: r;
},
handler: function(e, t) {
var r = t ? this.demangle(e, t) : e.message;
if (
!(
(t && t.errorhandler && t.errorhandler(r, e.filename, e.lineno)) ||
(console.log("Invoking error handler due to\n" + r),
"function" == typeof dump &&
dump("Invoking error handler due to\n" + r),
r.indexOf("UnknownError") != -1 ||
r.indexOf("Program terminated with exit(0)") != -1 ||
this.didShowErrorMessage)
)
) {
var r =
"An error occurred running the Unity content on this page. See your browser JavaScript console for more info. The error was:\n" +
r;
r.indexOf("DISABLE_EXCEPTION_CATCHING") != -1
? (r =
"An exception has occurred, but exception handling has been disabled in this build. If you are the developer of this content, enable exceptions in your project WebGL player settings to be able to catch the exception or see the stack trace.")
: r.indexOf("Cannot enlarge memory arrays") != -1
? (r =
"Out of memory. If you are the developer of this content, try allocating more memory to your WebGL build in the WebGL player settings.")
: (r.indexOf("Invalid array buffer length") == -1 &&
r.indexOf("Invalid typed array length") == -1 &&
r.indexOf("out of memory") == -1 &&
r.indexOf("could not allocate memory") == -1) ||
(r =
"The browser could not allocate enough memory for the WebGL content. If you are the developer of this content, try allocating less memory to your WebGL build in the WebGL player settings."),
alert(r),
(this.didShowErrorMessage = !0);
}
},
popup: function(e, t, r) {
r = r || [{ text: "OK" }];
var n = document.createElement("div");
n.style.cssText =
"position: absolute; top: 50%; left: 50%; -webkit-transform: translate(-50%, -50%); transform: translate(-50%, -50%); text-align: center; border: 1px solid black; padding: 5px; background: #E8E8E8";
var o = document.createElement("span");
(o.textContent = t),
n.appendChild(o),
n.appendChild(document.createElement("br"));
for (var a = 0; a < r.length; a++) {
var i = document.createElement("button");
r[a].text && (i.textContent = r[a].text),
r[a].callback && (i.onclick = r[a].callback),
(i.style.margin = "5px"),
i.addEventListener("click", function() {
e.container.removeChild(n);
}),
n.appendChild(i);
}
e.container.appendChild(n);
}
},
Job: {
schedule: function(e, t, r, n, o) {
o = o || {};
var a = e.Jobs[t];
if (
(a || (a = e.Jobs[t] = { dependencies: {}, dependants: {} }),
a.callback)
)
throw "[UnityLoader.Job.schedule] job '" +
t +
"' has been already scheduled";
if ("function" != typeof n)
throw "[UnityLoader.Job.schedule] job '" + t + "' has invalid callback";
if ("object" != typeof o)
throw "[UnityLoader.Job.schedule] job '" +
t +
"' has invalid parameters";
(a.callback = function(e, t) {
(a.starttime = performance.now()), n(e, t);
}),
(a.parameters = o),
(a.complete = function(r) {
(a.endtime = performance.now()), (a.result = { value: r });
for (var n in a.dependants) {
var o = e.Jobs[n];
o.dependencies[t] = a.dependants[n] = !1;
var i = "function" != typeof o.callback;
for (var s in o.dependencies) i = i || o.dependencies[s];
if (!i) {
if (o.executed)
throw "[UnityLoader.Job.schedule] job '" +
t +
"' has already been executed";
(o.executed = !0), setTimeout(o.callback.bind(null, e, o), 0);
}
}
});
var i = !1;
r.forEach(function(r) {
var n = e.Jobs[r];
n || (n = e.Jobs[r] = { dependencies: {}, dependants: {} }),
(a.dependencies[r] = n.dependants[t] = !n.result) && (i = !0);
}),
i || ((a.executed = !0), setTimeout(a.callback.bind(null, e, a), 0));
},
result: function(e, t) {
var r = e.Jobs[t];
if (!r) throw "[UnityLoader.Job.result] job '" + t + "' does not exist";
if ("object" != typeof r.result)
throw "[UnityLoader.Job.result] job '" + t + "' has invalid result";
return r.result.value;
}
},
Progress: {
Styles: {
Dark: {
progressLogoUrl:
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAJoAAACCCAYAAAC+etHhAAAACXBIWXMAAAsSAAALEgHS3X78AAAI2UlEQVR42u2d7VXjSgyGpZwtwHRgOjAVYCrAVLDZCjZUsKGCsBWEDhIqiKkg6SB0QDqY+yOTe3J9iePRfMkz0jkcfkDsGfuJpHk1H6iUAjEx3zaRRyAWxJRS//6IjeJ9VUqpmVJqpY42s33vIX7wHDBElDfJD6wSAGoAuNe/y86/tIj4QAEtpAlo/MAqOmBVV18i4cWFBu2HvFoe4RAAmjO4TD9fI2LLuY8CWrxweA5WYXnJRwAQ0AQsVXTAKh3foub+DCRH8wdXrT3NoDzLgd0g4kFytDzyrHO4QlsDAG8SOtOVHR4d5Vm2di+gpSc7NB7yrKTzNMnRrudZJ69VjaDJt4j4KTnaePKsk9camzUA8CoejW+e5Ut2CG1rRHzi6NGyBU0ptRqp1+qzAyLecAQty2lCSqkmQcgAAAod/tnZJEPICgBYJNzFRkDjYbMEcrE+u5fBAI/kfwvxxVXfdrUcJTmaX/vDBLKD5+vXEjrjebMaAKYRwVoDwDMA3OnfWYXPnATbP4HBagHgA45TrXedwcgmN4+WBWhKqWmAh38Ca30O1oXBiO/wXSmlyqHlKBkMuIGs0AOA0hNY7dBp1Howsg/U9V+I+MZlMJCDR3MlZxiD9Y2F1O9YTRtK2qNZyhk7Dde7i4UfejCyCdj93nKUeDS3tjCAbNfxWgcPbaHYGo5TlEy9cqGUqq7kiwLaWRL/0+ThwvB5Y77B6vaDWoN81iPmKXH0uePyMlluiaCUmiq3tldKLZRSjR4gBBuMKKW+iG2e62s0xM+vhrz3ED8sQXMI2Ze+VhmxLwuLL0ZxBivJBLQwnqyK3JfSou3TzrW2xOvUHECbcAuXALB0qCPFzk+ofWm/0cDeideqJUfz58mmDJ5rbdH+2uH1thI6E4VM92lPbP+y55rUQUWRPWiJQjazGLwUPdddEa/bZJ2jecjJ3hhAVgB9psjfK3oeNU97zDZHS9GT2coZHkex+yxDZ8KQ2cgZzcB7UHO/MqvQmWK4dCRnrAf+75p4jzr2tzCYR0vVkzmQM0qD+zgpRyUbOlOGzDKkLQj3Io1okwfNMWRLhpB5kTN67rexLckll6M5zsneEPEXM8hs5IwX4vQkqszRxHxQ3jxa6p5M93HpsjQ08J4V8Z6b5EJnJpBVFn2qLe9NygmTCp2ph8szI0/PdrAOoSW+myjhcyKQkfvZELWpA7hZqf5B/Nx9rAfmLHTmEC4dyBlzV4MQm9xwtDlaZpDNbadnO2oHddZtMcocLaOc7CRn/A4sZzjN02LIHBOBjDQAoHil1kNdlqqnlaPK0RyHyy1zwGzljMpTmyizbsvRhE7HnmwHAA/A36hyxpvHhTKm4fMlyi5DFI/m2pOFXNBrI2eErGcatGtGGYywH3VmClkRW87oaZvJZMvpdw6GHWg5QmYrZzDS9DaXIhkr0DKGrLRY5lYHauPCdDASGrQfQ8Olw8T/ZCvFbGOZHimAKme0gdr4AccNBy/Za+xV+1c34vMEWQ52G2p0p6PD14U/H3RbDl2PxkawFcjI9hpSQtAQtT1yxiH2A5kIZM7tAAAvEe773WyOHSKyOL9zIpA5t+dIHuS7ZXjPXB7K/3I0gczKdoh4F3GE/HU2cOmtG0fN0fT6QoGMbn8j3/88T3vn9GAmnaTyEwB+CS9k+x35/iWjtvTnaHoqi8BGsyrW4mYdjc5F2ZrTQuvJheGywEa3RaSqR82oLcNAE9isrIB+ld6XPV5oyx8OD0UqA/7sNqRo2xlxdu2uW4IKPeocdBaUB9h24P8UXpcJdkkZASLiQyDIKjieeTW4LcHrzDJ743qSHWs1ukEb5yZz0brvXeaj8YFtwXw+2pDdhf4z0ze3GbarkYBmc57TLEDbjGf7jmIBcU6LhR302feaAdO1DOVoQMsYNurK8IXHNplum7UZFWg5wma5T62vdZ2URTPNqLZEcCzqTrnDpqdmU3fFXniAjCq9VDG+pdabvGS2wYv3swQM2kLdO7eW3YQS303IcTsoZ0N9jS5HyxU2LguKbSSl0e9hmxFsUeUOi4HJLAnQMoNtE6tPFtWKMhnQcoEtptxB1PT2o6oMRIJtzhS2JbE/mwgj32WSoHmAbZpYHXQa+Jk2yYKWCWxBN0+28KJF0qBlAlswuYPoQbeXhHqV2gnEKu3zOm12hCwN7lO5AFqlfAKx49rokhNs+gThlvBR0wUk1DJWG/ubKGequ+uX90PIiNrdV997Ty50ZgIbVUjdDLg29VieVbagpQqbT7nDIg+cZQ1awrB5OfratuyUNWgJw+Zc7iBec38tN88GNA+w1QxAs6mDlj7KTtnIGwGlj5WvOfoG/WktJIWFQ1mDxz5pXDyaB8/2FRs25XCVO3E2rbqU82UbOj3C1kTuC7UOunVddhLQ/OdsSgud89D5mwu5wyLfm3MBbdBuQjFhA4CfxI8X0L+srIXjluneTzhR9N2YDgBwq0tUlK0VHi71TXHctmqsptX2oR7MK3g6jFFyxlfdB9PPHhDxps+jCWgOJQYAoM5kdQqeZVsotkbEJy6gsc3RHPZvySXHc9gWUtlJcjTPEgMA+NinzNjj6bZsgXZanqn1bm0qHo2XxODc4wVqy97kvYtHcygxaK8WcofJbz2ebssWaJuzDLXe43lkMMBTYnAOnobMZ1ue9IxfAS0SbFSJYWx2c+2EPcXpYNgE7TmDPu44HASbNWiWMyrGYu8cG5WbRwNI/9ihVkDj4dU+4VjWSdEOvuu2ApqZvcB4jggavTfLFjREPBWc7zR0qeRtH2yfeU7yxjXTkyTvgTZbgoMNPlFPdDQ+0BVwnKd/Aq9k3uRPRLw16J+AxhS8sgMetwPTrpadBLRxgldr4E7gxbarZScBLY0wW0fO725MKgICWjphtg6Y3+0Q8c6wjQJaguBVHfBc53cviDgX0MR853cPphUBAU3yO6ernQQ0MVf5Xe9qJy6gZbFmYOz5nd5vbXVhxfvM9r3LmgGxvvzuUYfZwWUnNqFTTMyXTeQRiAloYsnYP6b+7B7jJdwAAAAAAElFTkSuQmCC",
progressEmptyUrl:
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAI0AAAASCAYAAABmbl0zAAAACXBIWXMAAAsSAAALEgHS3X78AAAATUlEQVRo3u3aIQ4AIAwEQUr4/5cPiyMVBDOj0M2mCKgkGdAwjYCudZzLOLiITYPrCdEgGkSDaEA0iAbRIBpEA6JBNHx1vnL7V4NNwxsbCNMGI3YImu0AAAAASUVORK5CYII=",
progressFullUrl:
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAI0AAAASCAYAAABmbl0zAAAACXBIWXMAAAsSAAALEgHS3X78AAAAO0lEQVRo3u3SQREAAAjDMMC/56EB3omEXjtJCg5GAkyDaTANpsE0YBpMg2kwDaYB02AaTINpMA2Yhr8FO18EIBpZMeQAAAAASUVORK5CYII="
},
Light: {
progressLogoUrl:
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAJoAAACCCAYAAAC+etHhAAAACXBIWXMAAAsSAAALEgHS3X78AAAIhUlEQVR42u2dzW3bSBTH/yFcgNIBg5wDMKccPa5ATAVxKkhUga0KbFdgdmCpglDHnFZAzsGyBHWgPYjcMIQlkm++3sy8P7AInI3tGfKnN+9rZt4cj0eIRLaVySMQudBV/4v3Hz7JE+GvAoACcA2gBLAC8Dj3h/z+9dMfaCKWyntgqfbrvpYU0LxaNBELLQZgFSP/XgW3dIq8LodlD665UgBqAU302nLYB2uh+fOWApqoWw7LC36WrtgvnwKaPanW0kzxs0wsvQsABwEtnbTD0pOFKQFUAlq8aYelIT9LV9cCWnxph9KCnxW1nyagjb+8zmoVzMeat/81Alo4flZntUJTCaZVgtRBy3G5vBOargU0fnoJ1GoF6ael2iZURghZF7AUAhqfl/EQ+YdIQGOg7xH4YmN+moDGwPn/FvkcFfwnj5MH7Y7JSzg4gE1A8/hJv/UI1gantuuP7Z9JLZ8ppTfuHINVA9i1f+4HwciP1CxaKqDdOnj4HVibAVivBSO2l+8CzMpRKYC2sGTN+harnhGMuLKsCoy6OVIAzVQ6gwLWUC7zd9cCmjvloKcz9i1QW5jpx1dwm0wtAXwV0NzoYYY/tB9YrYOFsVC06flcc12GYsRfFNB6TvwXwsPlANZwHtQa5Kr1626JVlRAm/Byng3+vKa1Di7AGsJPtWbrdtxbImhs2oauIofs0FqE2mOoT61GND1IqD4imwJ7FjFkAHDTRl6+IMvbqJdqzQ69Dwx1CVQCml3IvjLwT6hzqV9JTWwFNJ6QVZ7nozRe8voMfBQtBbR4IdOxZtUZqKgBTAEGHSuZQGZF1GpEF7xcWlKDXD4zgcxKOoNaz3wasVpUP22ZMmgxQgbopTPuJwQJYtEEMq10xmoijA1xXHlqoMUKmU4AUONUtZiiDfF3qJRAixkypfEy53RZ7EL00zKBzLs1e5y5HIpFcwRZxRAynXTGmrjUUqLhImbQTEP2lRlkOumMfj1zjqhpjjJW0GKHDJjXXNnXHvQWnpr4fdcxgpYCZAXoe0V19nbuQUtzqNhASwGyzppRtIH+PgTq95exgJYKZCXRQozVM6eKmua4jgG0VCDTsWZPMNOIGVSaIxPISLoHLZ3RwFwPP7Xr1kvbUCaQzdYC9L2i1HRG8H5aJpCRlswFEYrK8Fio+bQ8NNBMQrYPADJf6YxL8B6IH+hgQDMN2Q34ixoAVLC3UWbu8rmGh11hGSPIDswh853OOKc5aQ6TwYh10FKETGe3+ZPl+c1Jc6x9PetMIJskandGg/H2bF01E5dCG8GIFdBShSzXSGe4Cm6mWLWVz4d45QGyTi8IQ7lGOqN2NMYdLu9VeITnXftXniArEL9cpmrqkWBk7fthZB4gS0Fz27N1dbgAm7cAYCpoAhn9pfuwILszvjCL89Eygcy4Vp4syIZbADAGmkCmF01XHn93H/DKYTAyG7RcINPSk+ff3wdry+nBDEFrwL+wzVm+b87LGY1ldOmsBDaydLo7TEDWTxspj2OZHAwIbHRR+9V0pRiNZTJoAhtdC9BPFNLR8sxY7riDJrDRdQf3XazqzN9/B4NKzJQSVBeum4xGh6E4Z+VEaJ7hrplzbMPJAzw3lk4tqtuA7TPC6d74l2hhFNzkssoJY7lFIG1CJpfRAqdbeBcBgNaAXsZxlZOcsinYa2Awt/HRNGyhJIephencQWCwwLQWc19BCgk007CVgcCm0/dPPTxZNwjgEqSQQTMN220gsFWgNQ/aTjHMPTL0OSTQUoWNatVsphgU4d8Ht1M9Ndhq0A9XsXGfek5cCovQQEsRNqpVs2FJSo0PTHCgpQZbA3oHrWmrRjnr7BAyaKnBRt0TkMPsPk+KRat9PDDTB/GlApvOvoBvMJPuUMTv28UAWkqwVaCf929iCaXehLKJBbSUYFtrzEk38qNYtAae7pfPLH/iTcJ2zxC0GvRCtY5Vy4mg1r4elO0LLUzCdgdGrck9UbfXKY35UP2zbaygmYbtmSFsB9B3P1HroNQj3OuYQUsBtnvQ0x2UjgpKWsNrs6nLaxRjh41aMfiGeWUk6vHtXvd5ur4YNmbYqNfuzO3uCKbs5BO02GGjWrXbGQ5+MGUn36DFDJvO6T1TrNoCtIiz9v1gMo+/O1bYqG3fasIcFHFMu5RBixU2nTro2AYSalpjkzposcJG7e4Y20BCCQQaeCo7cQPNBmyKwZyo8zm3gSQHrZu25vCCuYBmGrYX+D8GoNZ4yQ+GrBnA5Jw0TqCZhG2B0wZl37BR5/LadUDBlZ04g2YDttLjXBqYa/umuANszjjhCJpp2F4AHFvo7j34b4/El90/1E8hwLJTX1fgq6r984sGZMMTEBX+JEZrnPJLOr7U1HTHCrTmzYc2NUHtpq25vMw3x+Px/y/ef/iEyPRjhgWzDd4/RJ/xsZ1DQQD87bn/+fvXTwHNoFQLG9UamARPZywUbXA6GowFaBniVg16q3W3zP4w5OPpjIWiHacXEbtFA+gH6dmweHm7hLo4p+wdLlQExKLxSjGYtngN3Fx60YBB2Sk10HRSDDbAc3HzXc3tBaQCms5BeqbBK2D/9rsttxeQgo9mIsUQmt6OWXDx0exqlcAcWR6tnxpocyLEULXlOKjUQAPivwmmFtB4qAGT658tBT0CGiOxuNA+FWuWMmhdwfljC10sftuO68CukLb2+PvugBKnTlaFMNMgGwEtnBfVvazFALw8AN+zEdDCXF4r/Om4yAfgcbswjfXynwlPs6PVz61/d8PMv9tyfnhi0fQsSN1bZpVn/64W0NJYZvv+XT4Az7Z/x/5GZwHN3jLb9++KAXim/bst9wcioLlRl0bpKhJqAF7Uy6aAFod/dxDQRC78uzqESQpo4ft3OwFNZNO/W7YQbkKYxF+t3CKRLUllQCSgieLRf80sS5fCDVbiAAAAAElFTkSuQmCC",
progressEmptyUrl:
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAI0AAAASCAYAAABmbl0zAAAACXBIWXMAAAsSAAALEgHS3X78AAAAUUlEQVRo3u3aMQ4AEAxAUcRJzGb3v1mt3cQglvcmc/NTA3XMFQUuNCPgVk/nahwchE2D6wnRIBpEg2hANIgG0SAaRAOiQTR8lV+5/avBpuGNDcz6A6oq1CgNAAAAAElFTkSuQmCC",
progressFullUrl:
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAI0AAAASCAYAAABmbl0zAAAACXBIWXMAAAsSAAALEgHS3X78AAAAQElEQVRo3u3SMREAMAgAsVIpnTvj3xlogDmR8PfxftaBgSsBpsE0mAbTYBowDabBNJgG04BpMA2mwTSYBkzDXgP/hgGnr4PpeAAAAABJRU5ErkJggg=="
}
},
handler: function(e, t) {
if (e.Module) {
var r = UnityLoader.Progress.Styles[e.Module.splashScreenStyle],
n = e.Module.progressLogoUrl
? e.Module.resolveBuildUrl(e.Module.progressLogoUrl)
: r.progressLogoUrl,
o = e.Module.progressEmptyUrl
? e.Module.resolveBuildUrl(e.Module.progressEmptyUrl)
: r.progressEmptyUrl,
a = e.Module.progressFullUrl
? e.Module.resolveBuildUrl(e.Module.progressFullUrl)
: r.progressFullUrl,
i =
"position: absolute; left: 50%; top: 50%; -webkit-transform: translate(-50%, -50%); transform: translate(-50%, -50%);";
e.logo ||
((e.logo = document.createElement("div")),
(e.logo.style.cssText =
i +
"background: url('" +
n +
"') no-repeat center / contain; width: 154px; height: 130px;"),
e.container.appendChild(e.logo)),
e.progress ||
((e.progress = document.createElement("div")),
(e.progress.style.cssText =
i + " height: 18px; width: 141px; margin-top: 90px;"),
(e.progress.empty = document.createElement("div")),
(e.progress.empty.style.cssText =
"background: url('" +
o +
"') no-repeat right / cover; float: right; width: 100%; height: 100%; display: inline-block;"),
e.progress.appendChild(e.progress.empty),
(e.progress.full = document.createElement("div")),
(e.progress.full.style.cssText =
"background: url('" +
a +
"') no-repeat left / cover; float: left; width: 0%; height: 100%; display: inline-block;"),
e.progress.appendChild(e.progress.full),
e.container.appendChild(e.progress)),
(e.progress.full.style.width = 100 * t + "%"),
(e.progress.empty.style.width = 100 * (1 - t) + "%"),
1 == t && (e.logo.style.display = e.progress.style.display = "none");
}
},
update: function(e, t, r) {
var n = e.buildDownloadProgress[t];
n ||
(n = e.buildDownloadProgress[t] = {
started: !1,
finished: !1,
lengthComputable: !1,
total: 0,
loaded: 0
}),
"object" != typeof r ||
("progress" != r.type && "load" != r.type) ||
(n.started ||
((n.started = !0),
(n.lengthComputable = r.lengthComputable),
(n.total = r.total)),
(n.loaded = r.loaded),
"load" == r.type && (n.finished = !0));
var o = 0,
a = 0,
i = 0,
s = 0,
d = 0;
for (var t in e.buildDownloadProgress) {
var n = e.buildDownloadProgress[t];
if (!n.started) return 0;
i++,
n.lengthComputable
? ((o += n.loaded), (a += n.total), s++)
: n.finished || d++;
}
var l = i ? (i - d - (a ? (s * (a - o)) / a : 0)) / i : 0;
e.gameInstance.onProgress(e.gameInstance, 0.9 * l);
}
},
SystemInfo: (function() {
var e,
t,
r,
n = "-",
o = navigator.appVersion,
a = navigator.userAgent,
i = navigator.appName,
s = navigator.appVersion,
d = parseInt(navigator.appVersion, 10);
(t = a.indexOf("Opera")) != -1
? ((i = "Opera"),
(s = a.substring(t + 6)),
(t = a.indexOf("Version")) != -1 && (s = a.substring(t + 8)))
: (t = a.indexOf("MSIE")) != -1
? ((i = "Microsoft Internet Explorer"), (s = a.substring(t + 5)))
: (t = a.indexOf("Edge")) != -1
? ((i = "Edge"), (s = a.substring(t + 5)))
: (t = a.indexOf("Chrome")) != -1
? ((i = "Chrome"), (s = a.substring(t + 7)))
: (t = a.indexOf("Safari")) != -1
? ((i = "Safari"),
(s = a.substring(t + 7)),
(t = a.indexOf("Version")) != -1 && (s = a.substring(t + 8)))
: (t = a.indexOf("Firefox")) != -1
? ((i = "Firefox"), (s = a.substring(t + 8)))
: a.indexOf("Trident/") != -1
? ((i = "Microsoft Internet Explorer"),
(s = a.substring(a.indexOf("rv:") + 3)))
: (e = a.lastIndexOf(" ") + 1) < (t = a.lastIndexOf("/")) &&
((i = a.substring(e, t)),
(s = a.substring(t + 1)),
i.toLowerCase() == i.toUpperCase() && (i = navigator.appName)),
(r = s.indexOf(";")) != -1 && (s = s.substring(0, r)),
(r = s.indexOf(" ")) != -1 && (s = s.substring(0, r)),
(r = s.indexOf(")")) != -1 && (s = s.substring(0, r)),
(d = parseInt("" + s, 10)),
isNaN(d)
? ((s = "" + parseFloat(navigator.appVersion)),
(d = parseInt(navigator.appVersion, 10)))
: (s = "" + parseFloat(s));
var l = /Mobile|mini|Fennec|Android|iP(ad|od|hone)/.test(o),
u = n,
c = [
{ s: "Windows 3.11", r: /Win16/ },
{ s: "Windows 95", r: /(Windows 95|Win95|Windows_95)/ },
{ s: "Windows ME", r: /(Win 9x 4.90|Windows ME)/ },
{ s: "Windows 98", r: /(Windows 98|Win98)/ },
{ s: "Windows CE", r: /Windows CE/ },
{ s: "Windows 2000", r: /(Windows NT 5.0|Windows 2000)/ },
{ s: "Windows XP", r: /(Windows NT 5.1|Windows XP)/ },
{ s: "Windows Server 2003", r: /Windows NT 5.2/ },
{ s: "Windows Vista", r: /Windows NT 6.0/ },
{ s: "Windows 7", r: /(Windows 7|Windows NT 6.1)/ },
{ s: "Windows 8.1", r: /(Windows 8.1|Windows NT 6.3)/ },
{ s: "Windows 8", r: /(Windows 8|Windows NT 6.2)/ },
{ s: "Windows 10", r: /(Windows 10|Windows NT 10.0)/ },
{
s: "Windows NT 4.0",
r: /(Windows NT 4.0|WinNT4.0|WinNT|Windows NT)/
},
{ s: "Windows ME", r: /Windows ME/ },
{ s: "Android", r: /Android/ },
{ s: "Open BSD", r: /OpenBSD/ },
{ s: "Sun OS", r: /SunOS/ },
{ s: "Linux", r: /(Linux|X11)/ },
{ s: "iOS", r: /(iPhone|iPad|iPod)/ },
{ s: "Mac OS X", r: /Mac OS X/ },
{ s: "Mac OS", r: /(MacPPC|MacIntel|Mac_PowerPC|Macintosh)/ },
{ s: "QNX", r: /QNX/ },
{ s: "UNIX", r: /UNIX/ },
{ s: "BeOS", r: /BeOS/ },
{ s: "OS/2", r: /OS\/2/ },
{
s: "Search Bot",
r: /(nuhk|Googlebot|Yammybot|Openbot|Slurp|MSNBot|Ask Jeeves\/Teoma|ia_archiver)/
}
];
for (var f in c) {
var h = c[f];
if (h.r.test(a)) {
u = h.s;
break;
}
}
var p = n;
switch (
(/Windows/.test(u) && ((p = /Windows (.*)/.exec(u)[1]), (u = "Windows")),
u)
) {
case "Mac OS X":
p = /Mac OS X (10[\.\_\d]+)/.exec(a)[1];
break;
case "Android":
p = /Android ([\.\_\d]+)/.exec(a)[1];
break;
case "iOS":
(p = /OS (\d+)_(\d+)_?(\d+)?/.exec(o)),
(p = p[1] + "." + p[2] + "." + (0 | p[3]));
}
return {
width: screen.width ? screen.width : 0,
height: screen.height ? screen.height : 0,
browser: i,
browserVersion: s,
mobile: l,
os: u,
osVersion: p,
gpu: (function() {
var e = document.createElement("canvas"),
t = e.getContext("experimental-webgl");
if (t) {
var r = t.getExtension("WEBGL_debug_renderer_info");
if (r) return t.getParameter(r.UNMASKED_RENDERER_WEBGL);
}
return n;
})(),
language: window.navigator.userLanguage || window.navigator.language,
hasWebGL: (function() {
if (!window.WebGLRenderingContext) return 0;
var e = document.createElement("canvas"),
t = e.getContext("webgl2");
return t
? 2
: ((t = e.getContext("experimental-webgl2")),
t
? 2
: ((t = e.getContext("webgl")),
t || (t = e.getContext("experimental-webgl")) ? 1 : 0));
})(),
hasCursorLock: (function() {
var e = document.createElement("canvas");
return e.requestPointerLock ||
e.mozRequestPointerLock ||
e.webkitRequestPointerLock ||
e.msRequestPointerLock
? 1
: 0;
})(),
hasFullscreen: (function() {
var e = document.createElement("canvas");
return (e.requestFullScreen ||
e.mozRequestFullScreen ||
e.msRequestFullscreen ||
e.webkitRequestFullScreen) &&
(i.indexOf("Safari") == -1 || s >= 10.1)
? 1
: 0;
})(),
hasThreads: "undefined" != typeof SharedArrayBuffer,
hasWasm:
"object" == typeof WebAssembly &&
"function" == typeof WebAssembly.validate &&
"function" == typeof WebAssembly.compile
};
})(),
compatibilityCheck: function(e, t, r) {
UnityLoader.SystemInfo.hasWebGL
? UnityLoader.SystemInfo.mobile
? e.popup(
"Please note that Unity WebGL is not currently supported on mobiles. Press OK if you wish to continue anyway.",
[{ text: "OK", callback: t }]
)
: ["Edge", "Firefox", "Chrome", "Safari"].indexOf(
UnityLoader.SystemInfo.browser
) == -1
? e.popup(
"Please note that your browser is not currently supported for this Unity WebGL content. Press OK if you wish to continue anyway.",
[{ text: "OK", callback: t }]
)
: t()
: e.popup("Your browser does not support WebGL", [
{ text: "OK", callback: r }
]);
},
Blobs: {},
loadCode: function(e, t, r) {
var n = [].slice
.call(UnityLoader.Cryptography.md5(e))
.map(function(e) {
return ("0" + e.toString(16)).substr(-2);
})
.join(""),
o = document.createElement("script"),
a = URL.createObjectURL(
new Blob(['UnityLoader["' + n + '"]=', e], { type: "text/javascript" })
);
(UnityLoader.Blobs[a] = r),
(o.src = a),
(o.onload = function() {
URL.revokeObjectURL(a), t(n), delete o.onload;
}),
document.body.appendChild(o);
},
setupIndexedDBJob: function(e, t) {
function r(n) {
r.called || ((r.called = !0), (e.indexedDB = n), t.complete());
}
try {
var n =
window.indexedDB ||
window.mozIndexedDB ||
window.webkitIndexedDB ||
window.msIndexedDB,
o = n.open("/idbfs-test");
(o.onerror = function(e) {
e.preventDefault(), r();
}),
(o.onsuccess = function() {
o.result.close(), r(n);
}),
setTimeout(r, 1e3);
} catch (e) {
r();
}
},
processWasmCodeJob: function(e, t) {
(e.wasmBinary = UnityLoader.Job.result(e, "downloadWasmCode")),
t.complete();
},
processWasmFrameworkJob: function(e, t) {
var r = UnityLoader.Job.result(e, "downloadWasmFramework");
UnityLoader.loadCode(
r,
function(n) {
var o = new Blob([r], { type: "application/javascript" });
(e.mainScriptUrlOrBlob = o), UnityLoader[n](e), t.complete();
},
{ Module: e, url: e.wasmFrameworkUrl }
);
},
processAsmCodeJob: function(e, t) {
var r = UnityLoader.Job.result(e, "downloadAsmCode");
UnityLoader.loadCode(
Math.fround ? r : UnityLoader.Utils.optimizeMathFround(r),
function(r) {
(e.asm = UnityLoader[r]), t.complete();
},
{ Module: e, url: e.asmCodeUrl }
);
},
processAsmFrameworkJob: function(e, t) {
var r = UnityLoader.Job.result(e, "downloadAsmFramework");
UnityLoader.loadCode(
r,
function(n) {
var o = new Blob([r], { type: "application/javascript" });
(e.mainScriptUrlOrBlob = o), UnityLoader[n](e), t.complete();
},
{ Module: e, url: e.asmFrameworkUrl }
);
},
processMemoryInitializerJob: function(e, t) {
(e.memoryInitializerRequest.status = 200),
(e.memoryInitializerRequest.response = UnityLoader.Job.result(
e,
"downloadMemoryInitializer"
)),
e.memoryInitializerRequest.callback &&
e.memoryInitializerRequest.callback(),
t.complete();
},
processDataJob: function(e, t) {
var r = UnityLoader.Job.result(e, "downloadData"),
n = new DataView(r.buffer, r.byteOffset, r.byteLength),
o = 0,
a = "UnityWebData1.0\0";
if (!String.fromCharCode.apply(null, r.subarray(o, o + a.length)) == a)
throw "unknown data format";
o += a.length;
var i = n.getUint32(o, !0);
for (o += 4; o < i; ) {
var s = n.getUint32(o, !0);
o += 4;
var d = n.getUint32(o, !0);
o += 4;
var l = n.getUint32(o, !0);
o += 4;
var u = String.fromCharCode.apply(null, r.subarray(o, o + l));
o += l;
for (
var c = 0, f = u.indexOf("/", c) + 1;
f > 0;
c = f, f = u.indexOf("/", c) + 1
)
e.FS_createPath(u.substring(0, c), u.substring(c, f - 1), !0, !0);
e.FS_createDataFile(u, null, r.subarray(s, s + d), !0, !0, !0);
}
e.removeRunDependency("processDataJob"), t.complete();
},
downloadJob: function(e, t) {
var r = t.parameters.objParameters
? new UnityLoader.UnityCache.XMLHttpRequest(t.parameters.objParameters)
: new XMLHttpRequest();
r.open("GET", t.parameters.url),
(r.responseType = "arraybuffer"),
(r.onload = function() {
UnityLoader.Compression.decompress(new Uint8Array(r.response), function(
e
) {
t.complete(e);
});
}),
t.parameters.onprogress &&
r.addEventListener("progress", t.parameters.onprogress),
t.parameters.onload && r.addEventListener("load", t.parameters.onload),
r.send();
},
scheduleBuildDownloadJob: function(e, t, r) {
UnityLoader.Progress.update(e, t),
UnityLoader.Job.schedule(e, t, [], UnityLoader.downloadJob, {
url: e.resolveBuildUrl(e[r]),
onprogress: function(r) {
UnityLoader.Progress.update(e, t, r);
},
onload: function(r) {
UnityLoader.Progress.update(e, t, r);
},
objParameters:
e.companyName &&
e.productName &&
e.cacheControl &&
(e.cacheControl[r] || e.cacheControl.default)
? {
companyName: e.companyName,
productName: e.productName,
cacheControl: e.cacheControl[r] || e.cacheControl.default
}
: null
});
},
loadModule: function(e) {
if (
((e.useWasm = e.wasmCodeUrl && UnityLoader.SystemInfo.hasWasm), e.useWasm)
)
UnityLoader.scheduleBuildDownloadJob(
e,
"downloadWasmCode",
"wasmCodeUrl"
),
UnityLoader.Job.schedule(
e,
"processWasmCode",
["downloadWasmCode"],
UnityLoader.processWasmCodeJob
),
e.wasmMemoryUrl &&
(UnityLoader.scheduleBuildDownloadJob(
e,
"downloadMemoryInitializer",
"wasmMemoryUrl"
),
UnityLoader.Job.schedule(
e,
"processMemoryInitializer",
["downloadMemoryInitializer"],
UnityLoader.processMemoryInitializerJob
),
(e.memoryInitializerRequest = {
addEventListener: function(t, r) {
e.memoryInitializerRequest.callback = r;
}
})),
UnityLoader.scheduleBuildDownloadJob(
e,
"downloadWasmFramework",
"wasmFrameworkUrl"
),
UnityLoader.Job.schedule(
e,
"processWasmFramework",
["downloadWasmFramework", "processWasmCode", "setupIndexedDB"],
UnityLoader.processWasmFrameworkJob
);
else {
if (!e.asmCodeUrl)
throw "WebAssembly support is not detected in this browser.";
UnityLoader.scheduleBuildDownloadJob(e, "downloadAsmCode", "asmCodeUrl"),
UnityLoader.Job.schedule(
e,
"processAsmCode",
["downloadAsmCode"],
UnityLoader.processAsmCodeJob
),
UnityLoader.scheduleBuildDownloadJob(
e,
"downloadMemoryInitializer",
"asmMemoryUrl"
),
UnityLoader.Job.schedule(
e,
"processMemoryInitializer",
["downloadMemoryInitializer"],
UnityLoader.processMemoryInitializerJob
),
(e.memoryInitializerRequest = {
addEventListener: function(t, r) {
e.memoryInitializerRequest.callback = r;
}
}),
e.asmLibraryUrl &&
(e.dynamicLibraries = [e.asmLibraryUrl].map(e.resolveBuildUrl)),
UnityLoader.scheduleBuildDownloadJob(
e,
"downloadAsmFramework",
"asmFrameworkUrl"
),
UnityLoader.Job.schedule(
e,
"processAsmFramework",
["downloadAsmFramework", "processAsmCode", "setupIndexedDB"],
UnityLoader.processAsmFrameworkJob
);
}
UnityLoader.scheduleBuildDownloadJob(e, "downloadData", "dataUrl"),
UnityLoader.Job.schedule(
e,
"setupIndexedDB",
[],
UnityLoader.setupIndexedDBJob
),
e.preRun.push(function() {
e.addRunDependency("processDataJob"),
UnityLoader.Job.schedule(
e,
"processData",
["downloadData"],
UnityLoader.processDataJob
);
});
},
instantiate: function(e, t, r) {
function n(e, r) {
if ("string" == typeof e && !(e = document.getElementById(e))) return !1;
(e.innerHTML = ""),
(e.style.border = e.style.margin = e.style.padding = 0),
"static" == getComputedStyle(e).getPropertyValue("position") &&
(e.style.position = "relative"),
(e.style.width = r.width || e.style.width),
(e.style.height = r.height || e.style.height),
(r.container = e);
var n = r.Module;
return (
(n.canvas = document.createElement("canvas")),
(n.canvas.style.width = "100%"),
(n.canvas.style.height = "100%"),
n.canvas.addEventListener("contextmenu", function(e) {
e.preventDefault();
}),
(n.canvas.id = "#canvas"),
e.appendChild(n.canvas),
r.compatibilityCheck(
r,
function() {
var t = new XMLHttpRequest();
t.open("GET", r.url, !0),
(t.responseType = "text"),
(t.onerror = function() {
n.print("Could not download " + r.url),
0 == document.URL.indexOf("file:") &&
alert(
"It seems your browser does not support running Unity WebGL content from file:// urls. Please upload it to an http server, or try a different browser."
);
}),
(t.onload = function() {
var o = JSON.parse(t.responseText);
for (var a in o) "undefined" == typeof n[a] && (n[a] = o[a]);
for (var i = !1, s = 0; s < n.graphicsAPI.length; s++) {
var d = n.graphicsAPI[s];
"WebGL 2.0" == d && 2 == UnityLoader.SystemInfo.hasWebGL
? (i = !0)
: "WebGL 1.0" == d && UnityLoader.SystemInfo.hasWebGL >= 1
? (i = !0)
: n.print("Warning: Unsupported graphics API " + d);
}
return i
? ((e.style.background = n.backgroundUrl
? "center/cover url('" +
n.resolveBuildUrl(n.backgroundUrl) +
"')"
: n.backgroundColor
? " " + n.backgroundColor
: ""),
r.onProgress(r, 0),
void UnityLoader.loadModule(n))
: void r.popup(
"Your browser does not support any of the required graphics API for this content: " +
n.graphicsAPI,
[{ text: "OK" }]
);
}),
t.send();
},
function() {
n.printErr(
"Instantiation of the '" +
t +
"' terminated due to the failed compatibility check."
);
}
),
!0
);
}
function o(e) {
return (
(o.link = o.link || document.createElement("a")),
(o.link.href = e),
o.link.href
);
}
var a = {
url: t,
onProgress: UnityLoader.Progress.handler,
compatibilityCheck: UnityLoader.compatibilityCheck,
Module: {
graphicsAPI: ["WebGL 2.0", "WebGL 1.0"],
onAbort: function(e) {
throw (void 0 !== e
? (this.print(e), this.printErr(e), (e = JSON.stringify(e)))
: (e = ""),
"abort(" + e + ") at " + this.stackTrace());
},
preRun: [],
postRun: [],
print: function(e) {
console.log(e);
},
printErr: function(e) {
console.error(e);
},
Jobs: {},
buildDownloadProgress: {},
resolveBuildUrl: function(e) {
return e.match(/(http|https|ftp|file):\/\//)
? e
: t.substring(0, t.lastIndexOf("/") + 1) + e;
},
streamingAssetsUrl: function() {
return o(this.resolveBuildUrl("../StreamingAssets"));
},
pthreadMainPrefixURL: "Build/"
},
SetFullscreen: function() {
if (a.Module.SetFullscreen)
return a.Module.SetFullscreen.apply(a.Module, arguments);
},
SendMessage: function() {
if (a.Module.SendMessage)
return a.Module.SendMessage.apply(a.Module, arguments);
}
};
(a.Module.gameInstance = a),
(a.popup = function(e, t) {
return UnityLoader.Error.popup(a, e, t);
}),
a.Module.postRun.push(function() {
a.onProgress(a, 1);
});
for (var i in r)
if ("Module" == i) for (var s in r[i]) a.Module[s] = r[i][s];
else a[i] = r[i];
return (
n(e, a) ||
document.addEventListener("DOMContentLoaded", function() {
n(e, a);
}),
a
);
},
Utils: {
assert: function(e, t) {
e || abort("Assertion failed: " + t);
},
optimizeMathFround: function(e, t) {
console.log("optimizing out Math.fround calls");
for (
var r = {
LOOKING_FOR_MODULE: 0,
SCANNING_MODULE_VARIABLES: 1,
SCANNING_MODULE_FUNCTIONS: 2
},
n = [
"EMSCRIPTEN_START_ASM",
"EMSCRIPTEN_START_FUNCS",
"EMSCRIPTEN_END_FUNCS"
],
o = "var",
a = "global.Math.fround;",
i = 0,
s = t ? r.LOOKING_FOR_MODULE : r.SCANNING_MODULE_VARIABLES,
d = 0,
l = 0;
s <= r.SCANNING_MODULE_FUNCTIONS && i < e.length;
i++
)
if (
47 == e[i] &&
47 == e[i + 1] &&
32 == e[i + 2] &&
String.fromCharCode.apply(
null,
e.subarray(i + 3, i + 3 + n[s].length)
) === n[s]
)
s++;
else if (
s != r.SCANNING_MODULE_VARIABLES ||
l ||
61 != e[i] ||
String.fromCharCode.apply(
null,
e.subarray(i + 1, i + 1 + a.length)
) !== a
) {
if (l && 40 == e[i]) {
for (var u = 0; u < l && e[i - 1 - u] == e[d - u]; ) u++;
if (u == l) {
var c = e[i - 1 - u];
if (
c < 36 ||
(36 < c && c < 48) ||
(57 < c && c < 65) ||
(90 < c && c < 95) ||
(95 < c && c < 97) ||
122 < c
)
for (; u; u--) e[i - u] = 32;
}
}
} else {
for (d = i - 1; 32 != e[d - l]; ) l++;
(l &&
String.fromCharCode.apply(
null,
e.subarray(d - l - o.length, d - l)
) === o) ||
(d = l = 0);
}
return e;
}
},
UnityCache: (function() {
function e(e) {
console.log("[UnityCache] " + e);
}
function t(e) {
return (
(t.link = t.link || document.createElement("a")),
(t.link.href = e),
t.link.href
);
}
function r(e) {
var t = window.location.href.match(/^[a-z]+:\/\/[^\/]+/);
return !t || e.lastIndexOf(t[0], 0);
}
function n() {
function t(t) {
if ("undefined" == typeof n.database)
for (
n.database = t,
n.database || e("indexedDB database could not be opened");
n.queue.length;
) {
var r = n.queue.shift();
n.database
? n.execute.apply(n, r)
: "function" == typeof r.onerror &&
r.onerror(new Error("operation cancelled"));
}
}
function r() {
var e = o.open(i.name, i.version);
(e.onupgradeneeded = function(e) {
var t = e.target.result;
t.objectStoreNames.contains(d.name) || t.createObjectStore(d.name);
}),
(e.onsuccess = function(e) {
t(e.target.result);
}),
(e.onerror = function() {
t(null);
});
}
var n = this;
n.queue = [];
try {
var o =
window.indexedDB ||
window.mozIndexedDB ||
window.webkitIndexedDB ||
window.msIndexedDB,
a = o.open(i.name);
(a.onupgradeneeded = function(e) {
var t = e.target.result.createObjectStore(s.name, { keyPath: "url" });
[
"version",
"company",
"product",
"updated",
"revalidated",
"accessed"
].forEach(function(e) {
t.createIndex(e, e);
});
}),
(a.onsuccess = function(e) {
var n = e.target.result;
n.version < i.version ? (n.close(), r()) : t(n);
}),
(a.onerror = function() {
t(null);
}),
setTimeout(a.onerror, 1e3);
} catch (e) {
t(null);
}
}
function o(e, t, r, n, o) {
var a = {
url: e,
version: s.version,
company: t,
product: r,
updated: n,
revalidated: n,
accessed: n,
responseHeaders: {},
xhr: {}
};
return (
o &&
(["Last-Modified", "ETag"].forEach(function(e) {
a.responseHeaders[e] = o.getResponseHeader(e);
}),
["responseURL", "status", "statusText", "response"].forEach(function(
e
) {
a.xhr[e] = o[e];
})),
a
);
}
function a(t) {
(this.cache = { enabled: !1 }),
t &&
((this.cache.control = t.cacheControl),
(this.cache.company = t.companyName),
(this.cache.product = t.productName)),
(this.xhr = new XMLHttpRequest(t)),
this.xhr.addEventListener(
"load",
function() {
var t = this.xhr,
r = this.cache;
r.enabled &&
!r.revalidated &&
(304 == t.status
? ((r.result.revalidated = r.result.accessed),
(r.revalidated = !0),
l.execute(s.name, "put", [r.result]),
e(
"'" +
r.result.url +
"' successfully revalidated and served from the indexedDB cache"
))
: 200 == t.status
? ((r.result = o(
r.result.url,
r.company,
r.product,
r.result.accessed,
t
)),
(r.revalidated = !0),
l.execute(
s.name,
"put",
[r.result],
function(t) {
e(
"'" +
r.result.url +
"' successfully downloaded and stored in the indexedDB cache"
);
},
function(t) {
e(
"'" +
r.result.url +
"' successfully downloaded but not stored in the indexedDB cache due to the error: " +
t
);
}
))
: e(
"'" +
r.result.url +
"' request failed with status: " +
t.status +
" " +
t.statusText
));
}.bind(this)
);
}
var i = { name: "UnityCache", version: 2 },
s = { name: "XMLHttpRequest", version: 1 },
d = { name: "WebAssembly", version: 1 };
n.prototype.execute = function(e, t, r, n, o) {
if (this.database)
try {
var a = this.database
.transaction(
[e],
["put", "delete", "clear"].indexOf(t) != -1
? "readwrite"
: "readonly"
)
.objectStore(e);
"openKeyCursor" == t && ((a = a.index(r[0])), (r = r.slice(1)));
var i = a[t].apply(a, r);
"function" == typeof n &&
(i.onsuccess = function(e) {
n(e.target.result);
}),
(i.onerror = o);
} catch (e) {
"function" == typeof o && o(e);
}
else
"undefined" == typeof this.database
? this.queue.push(arguments)
: "function" == typeof o && o(new Error("indexedDB access denied"));
};
var l = new n();
(a.prototype.send = function(t) {
var n = this.xhr,
o = this.cache,
a = arguments;
return (
(o.enabled = o.enabled && "arraybuffer" == n.responseType && !t),
o.enabled
? void l.execute(
s.name,
"get",
[o.result.url],
function(t) {
if (!t || t.version != s.version)
return void n.send.apply(n, a);
if (
((o.result = t),
(o.result.accessed = Date.now()),
"immutable" == o.control)
)
(o.revalidated = !0),
l.execute(s.name, "put", [o.result]),
n.dispatchEvent(new Event("load")),
e(
"'" +
o.result.url +
"' served from the indexedDB cache without revalidation"
);
else if (
r(o.result.url) &&
(o.result.responseHeaders["Last-Modified"] ||
o.result.responseHeaders.ETag)
) {
var i = new XMLHttpRequest();
i.open("HEAD", o.result.url),
(i.onload = function() {
(o.revalidated = ["Last-Modified", "ETag"].every(function(
e
) {
return (
!o.result.responseHeaders[e] ||
o.result.responseHeaders[e] == i.getResponseHeader(e)
);
})),
o.revalidated
? ((o.result.revalidated = o.result.accessed),
l.execute(s.name, "put", [o.result]),
n.dispatchEvent(new Event("load")),
e(
"'" +
o.result.url +
"' successfully revalidated and served from the indexedDB cache"
))
: n.send.apply(n, a);
}),
i.send();
} else
o.result.responseHeaders["Last-Modified"]
? (n.setRequestHeader(
"If-Modified-Since",
o.result.responseHeaders["Last-Modified"]
),
n.setRequestHeader("Cache-Control", "no-cache"))
: o.result.responseHeaders.ETag &&
(n.setRequestHeader(
"If-None-Match",
o.result.responseHeaders.ETag
),
n.setRequestHeader("Cache-Control", "no-cache")),
n.send.apply(n, a);
},
function(e) {
n.send.apply(n, a);
}
)
: n.send.apply(n, a)
);
}),
(a.prototype.open = function(e, r, n, a, i) {
return (
(this.cache.result = o(
t(r),
this.cache.company,
this.cache.product,
Date.now()
)),
(this.cache.enabled =
["must-revalidate", "immutable"].indexOf(this.cache.control) !=
-1 &&
"GET" == e &&
this.cache.result.url.match("^https?://") &&
("undefined" == typeof n || n) &&
"undefined" == typeof a &&
"undefined" == typeof i),
(this.cache.revalidated = !1),
this.xhr.open.apply(this.xhr, arguments)
);
}),
(a.prototype.setRequestHeader = function(e, t) {
return (
(this.cache.enabled = !1),
this.xhr.setRequestHeader.apply(this.xhr, arguments)
);
});
var u = new XMLHttpRequest();
for (var c in u)
a.prototype.hasOwnProperty(c) ||
!(function(e) {
Object.defineProperty(
a.prototype,
e,
"function" == typeof u[e]
? {
value: function() {
return this.xhr[e].apply(this.xhr, arguments);
}
}
: {
get: function() {
return this.cache.revalidated &&
this.cache.result.xhr.hasOwnProperty(e)
? this.cache.result.xhr[e]
: this.xhr[e];
},
set: function(t) {
this.xhr[e] = t;
}
}
);
})(c);
return {
XMLHttpRequest: a,
WebAssembly: {
get: function(e, r) {
var n = { url: t(e), version: d.version, module: null, md5: null };
l.execute(
d.name,
"get",
[n.url],
function(e) {
r(e && e.version == d.version ? e : n);
},
function() {
r(n);
}
);
},
put: function(e, t, r) {
l.execute(d.name, "put", [e, e.url], t, r);
}
}
};
})()
};
| p |
reporter.py | import os
import traceback
from uuid import uuid4
import cv2
from cvlog import log
from cvlog.config import Config
from cvtest.csv_reporter import CsvReporter
class Reporter:
def __new__(cls):
if not hasattr(cls, 'instance') or not cls.instance:
cls.instance = super().__new__(cls)
cls.instance.__initialised = False
return cls.instance
def __init__(self):
if not self.__initialised:
self.__initialised = True
report_path = Config().log_path() + "/report"
self.image_path = report_path + '/images/'
self.reporter = CsvReporter(report_path + "/report.csv")
def result(self, input_image, key_pressed, output_img):
message = ""
if (key_pressed == ord("y")): | else:
result = "Fail"
message = self.__save_image(output_img)
self.reporter.log_report([input_image, result.upper(), message])
def __save_image(self, img):
if not os.path.exists(self.image_path):
os.makedirs(self.image_path)
output_path = self.image_path + str(uuid4()) + '.png'
cv2.imwrite(output_path, img)
return output_path
def error(self, input_image, ex):
self.reporter.log_report([input_image, "ERROR", self.__stack_trace(ex)])
def __stack_trace(self, ex):
stacks = traceback.extract_tb(ex.__traceback__)[1:]
stack_trace = ""
for x in stacks[:10]:
stack_trace += x.filename + ":" + str(x.lineno) + ";"
return stack_trace
def report(input_image_path, processing_method):
for image_path in input_image_path:
try:
img = processing_method(image_path)
key_pressed = log.show_image(image_path, img)
except Exception as e:
Reporter().error(image_path, e)
else:
Reporter().result(image_path, key_pressed, img) | result = "Pass" |
env.rs | //! MAX_STORE_SIZE: 100 MB
//!
//! MAX_POST_SIZE: 32 KB
//!
//! MAX_EXPIRATION: 7 days
//!
//! CLEAN_DURATION: 5000 ms
//!
//! ADDR: "localhost:8088"
//!
//! CRYPT_KEY: "magic"
//!
//! REDIS_URL: None
use crate::time::SecTime;
use std::env;
use std::str::FromStr;
fn parse<T: FromStr>(var: &'static str, default: T) -> T {
env::var(var)
.ok()
.and_then(|s| s.parse::<T>().ok())
.unwrap_or(default)
}
const DEFAULT_ADDR: &'static str = "localhost:8088";
const DEFAULT_CRYPT_KEY: &'static str = "magic";
lazy_static! {
pub static ref MAX_STORE_SIZE: usize = { parse("PASTEBIN_MAX_STORE_SIZE", 100 * 1024 * 1024) };
pub static ref MAX_POST_SIZE: usize = { parse("PASTEBIN_MAX_POST_SIZE", 32 * 1024) };
pub static ref MAX_EXPIRATION: SecTime = { parse("PASTEBIN_MAX_EXPIRATION", 7 * 24 * 60 * 60) };
pub static ref CLEAN_DURATION: u64 = { parse("PASTEBIN_CLEAN_DURATION", 5000) };
pub static ref ADDR: String = { env::var("PASTEBIN_ADDR").unwrap_or(DEFAULT_ADDR.into()) };
pub static ref CRYPT_KEY: String =
{ env::var("PASTEBIN_CRYPT_KEY").unwrap_or(DEFAULT_CRYPT_KEY.into()) };
pub static ref REDIS_URL: Option<String> = { env::var("PASTEBIN_REDIS_URL").ok() };
}
pub fn info_env() {
info!("ADDR: {}", *ADDR);
info!("MAX_POST_SIZE: {} bytes", *MAX_POST_SIZE);
// info!("CRYPT_KEY: {}", *CRYPT_KEY);
match *REDIS_URL {
Some(ref redis_url) => |
None => {
info!("MAX_STORE_SIZE: {} bytes", *MAX_STORE_SIZE);
info!("MAX_EXPIRATION: {} s", *MAX_EXPIRATION);
info!("CLEAN_DURATION: {} ms", *CLEAN_DURATION);
}
}
}
| {
info!("REDIS_URL: {}", redis_url);
} |
stake_pool.rs | use crate::common::{
file_utils, jcli_wrapper::certificate::wrapper::JCLICertificateWrapper,
startup::create_new_key_pair,
};
use chain_crypto::{bech32::Bech32, Curve25519_2HashDH, Ed25519, SumEd25519_12};
use chain_impl_mockchain::{
certificate::PoolPermissions,
rewards::{Ratio as RatioLib, TaxType},
testing::{builders::StakePoolBuilder, data::StakePool as StakePoolLib},
value::Value as ValueLib,
};
use jormungandr_lib::{crypto::key::KeyPair, wallet::Wallet};
use std::num::NonZeroU64;
use std::path::PathBuf;
// temporary struct which should be replaced by one from chain-libs or jormungandr-lib
#[derive(Clone, Debug)]
pub struct StakePool {
leader: KeyPair<Ed25519>,
owner: Wallet,
inner: StakePoolLib,
stake_pool_signcert_file: PathBuf,
stake_pool_id: String,
}
impl StakePool {
pub fn new(owner: &Wallet) -> Self {
let leader = create_new_key_pair::<Ed25519>();
let stake_key = owner.signing_key_as_str();
let stake_key_pub = owner.identifier().to_bech32_str();
let stake_key_file = file_utils::create_file_in_temp("stake_key.sk", &stake_key);
let jcli_certificate = JCLICertificateWrapper::new();
let mut stake_pool = StakePoolBuilder::new()
.with_owners(vec![owner.identifier().into_public_key()])
.with_pool_permissions(PoolPermissions::new(1))
.with_reward_account(false)
.with_tax_type(TaxType {
fixed: ValueLib(100),
ratio: RatioLib {
numerator: 1,
denominator: NonZeroU64::new(10).unwrap(),
},
max_limit: None,
})
.build();
let stake_pool_signcert_file = jcli_certificate.assert_new_signed_stake_pool_cert(
&stake_pool.kes().public_key().to_bech32_str(),
&stake_pool.vrf().public_key().to_bech32_str(),
&stake_key_file,
0,
stake_pool.info().permissions.management_threshold().into(),
&stake_key_pub,
Some(stake_pool.info().rewards.into()),
);
StakePool {
owner: owner.clone(),
leader: leader,
inner: stake_pool,
stake_pool_signcert_file: stake_pool_signcert_file.clone(),
stake_pool_id: jcli_certificate.assert_get_stake_pool_id(&stake_pool_signcert_file),
}
}
pub fn leader(&self) -> &KeyPair<Ed25519> {
&self.leader
}
pub fn stake_pool_signcert_file(&self) -> &PathBuf {
&self.stake_pool_signcert_file
}
pub fn owner(&self) -> &Wallet {
&self.owner
}
pub fn id(&self) -> &str {
&self.stake_pool_id
}
pub fn kes(&self) -> KeyPair<SumEd25519_12> {
KeyPair::<SumEd25519_12>(self.inner.kes())
}
pub fn vrf(&self) -> KeyPair<Curve25519_2HashDH> |
}
impl Into<StakePoolLib> for StakePool {
fn into(self) -> StakePoolLib {
self.inner
}
}
| {
KeyPair::<Curve25519_2HashDH>(self.inner.vrf())
} |
dll_test.go | package main
import "testing"
func TestDLL(t *testing.T) {
tests := []struct {
name string
source string
expected int
}{
{
name: "none",
expected: 0,
source: `
package main
func main() {
println("Hello!")
}
`,
},
{
name: "for",
expected: 1,
source: `
package main
func main() {
for i := 0; i < 5; i++ {
defer println("defer")
}
}
`,
},
{
name: "range",
expected: 1,
source: `
package main
func main() {
list := []int{1, 2, 3, 4, 5, 6, 7}
for _, x := range list {
defer println(x)
}
}
`,
},
{
name: "nested",
expected: 1,
source: `
package main
func main() {
list := []int{1, 2, 3, 4, 5, 6, 7}
for _, i := range list {
for j := 0; j < i; j++ {
defer println(j)
}
}
}
`,
},
{
name: "block",
expected: 1,
source: `
package main
func main() {
for i := 0; i < 5; i++ {
{
defer println("defer")
}
}
}
`,
},
{
name: "if",
expected: 1,
source: `
package main
func main() {
for i := 0; i < 5; i++ {
if true {
defer println("defer")
}
}
}
`,
},
{
name: "funclit",
expected: 0,
source: `
package main
func main() {
for i := 0; i < 5; i++ {
func() {
defer println("defer")
}()
}
}
`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
reports, err := gather(tt.source, false)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if len(reports) != tt.expected {
t.Fatalf("expected %d reports, got %d", tt.expected, len(reports))
}
})
}
}
func | (t *testing.T) {
source := `
package main
func main() {
s = "missing quote
}
`
_, err := gather(source, false)
if err == nil {
t.Error("expected error but got nil")
}
}
func Test_splitArrayIntoParts(t *testing.T) {
getStringArray := func(amount int) []string {
strings := make([]string, 0, amount)
for i := 0; i < amount; i++ {
strings = append(strings, "foo")
}
return strings
}
tests := []struct {
name string
strings []string
parts int
expectedParts int
}{
{
name: "should split the array with one string into one part",
strings: getStringArray(1),
parts: 1,
expectedParts: 1,
},
{
name: "should split the array with one string into zero part",
strings: getStringArray(1),
parts: 0,
expectedParts: 1,
},
{
name: "should split the array with two strings into one part",
strings: getStringArray(2),
parts: 1,
expectedParts: 1,
},
{
name: "should split the array with two strings into four part",
strings: getStringArray(2),
parts: 4,
expectedParts: 2,
},
{
name: "should split the array with one string into two part",
strings: getStringArray(1),
parts: 2,
expectedParts: 1,
},
{
name: "should split the array with four strings into two part",
strings: getStringArray(4),
parts: 2,
expectedParts: 2,
},
{
name: "should split the array with two strings into three part",
strings: getStringArray(2),
parts: 3,
expectedParts: 2,
},
{
name: "should split the array with ten strings into three part",
strings: getStringArray(10),
parts: 3,
expectedParts: 3,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
got := splitArrayIntoParts(test.strings, test.parts)
if len(got) != test.expectedParts {
t.Fatalf("Expect to split the array into '%d' but got '%d'", test.expectedParts, len(got))
}
for _, files := range got {
if len(files) < 1 {
t.Fatalf("Expected to contain at least on string but got none")
}
}
})
}
t.Run("should split the empty array into one part", func(t *testing.T) {
strings := []string{}
parts := 1
got := len(splitArrayIntoParts(strings, parts))
want := 1
if got != want {
t.Fatalf("Expected a length of %d but got %d", want, got)
}
})
}
| TestErrorParsing |
wiki.go | // Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package repo
import (
"encoding/base64"
"fmt"
"net/http"
"net/url"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
wiki_service "code.gitea.io/gitea/services/wiki"
)
// NewWikiPage response for wiki create request
func NewWikiPage(ctx *context.APIContext) {
// swagger:operation POST /repos/{owner}/{repo}/wiki/new repository repoCreateWikiPage
// ---
// summary: Create a wiki page
// consumes:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: body
// in: body
// schema:
// "$ref": "#/definitions/CreateWikiPageOptions"
// responses:
// "201":
// "$ref": "#/responses/WikiPage"
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
form := web.GetForm(ctx).(*api.CreateWikiPageOptions)
if util.IsEmptyString(form.Title) {
ctx.Error(http.StatusBadRequest, "emptyTitle", nil)
return
}
wikiName := wiki_service.NormalizeWikiName(form.Title)
if len(form.Message) == 0 {
form.Message = fmt.Sprintf("Add '%s'", form.Title)
}
content, err := base64.StdEncoding.DecodeString(form.ContentBase64)
if err != nil {
ctx.Error(http.StatusBadRequest, "invalid base64 encoding of content", err)
return
}
form.ContentBase64 = string(content)
if err := wiki_service.AddWikiPage(ctx, ctx.User, ctx.Repo.Repository, wikiName, form.ContentBase64, form.Message); err != nil {
if models.IsErrWikiReservedName(err) {
ctx.Error(http.StatusBadRequest, "IsErrWikiReservedName", err)
} else if models.IsErrWikiAlreadyExist(err) {
ctx.Error(http.StatusBadRequest, "IsErrWikiAlreadyExists", err)
} else {
ctx.Error(http.StatusInternalServerError, "AddWikiPage", err)
}
return
}
wikiPage := getWikiPage(ctx, wikiName)
if !ctx.Written() {
ctx.JSON(http.StatusCreated, wikiPage)
}
}
// EditWikiPage response for wiki modify request
func EditWikiPage(ctx *context.APIContext) {
// swagger:operation PATCH /repos/{owner}/{repo}/wiki/page/{pageName} repository repoEditWikiPage
// ---
// summary: Edit a wiki page
// consumes:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: pageName
// in: path
// description: name of the page
// type: string
// required: true
// - name: body
// in: body
// schema:
// "$ref": "#/definitions/CreateWikiPageOptions"
// responses:
// "200":
// "$ref": "#/responses/WikiPage"
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
form := web.GetForm(ctx).(*api.CreateWikiPageOptions)
oldWikiName := wiki_service.NormalizeWikiName(ctx.Params(":pageName"))
newWikiName := wiki_service.NormalizeWikiName(form.Title)
if len(newWikiName) == 0 {
newWikiName = oldWikiName
}
if len(form.Message) == 0 {
form.Message = fmt.Sprintf("Update '%s'", newWikiName)
}
content, err := base64.StdEncoding.DecodeString(form.ContentBase64)
if err != nil {
ctx.Error(http.StatusBadRequest, "invalid base64 encoding of content", err)
return
}
form.ContentBase64 = string(content)
if err := wiki_service.EditWikiPage(ctx, ctx.User, ctx.Repo.Repository, oldWikiName, newWikiName, form.ContentBase64, form.Message); err != nil {
ctx.Error(http.StatusInternalServerError, "EditWikiPage", err)
return
}
wikiPage := getWikiPage(ctx, newWikiName)
if !ctx.Written() {
ctx.JSON(http.StatusOK, wikiPage)
}
}
func getWikiPage(ctx *context.APIContext, title string) *api.WikiPage {
title = wiki_service.NormalizeWikiName(title)
wikiRepo, commit := findWikiRepoCommit(ctx)
if wikiRepo != nil {
defer wikiRepo.Close()
}
if ctx.Written() {
return nil
}
// lookup filename in wiki - get filecontent, real filename
content, pageFilename := wikiContentsByName(ctx, commit, title, false)
if ctx.Written() {
return nil
}
sidebarContent, _ := wikiContentsByName(ctx, commit, "_Sidebar", true)
if ctx.Written() {
return nil
}
footerContent, _ := wikiContentsByName(ctx, commit, "_Footer", true)
if ctx.Written() {
return nil
}
// get commit count - wiki revisions
commitsCount, _ := wikiRepo.FileCommitsCount("master", pageFilename)
// Get last change information.
lastCommit, err := wikiRepo.GetCommitByPath(pageFilename)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetCommitByPath", err)
return nil
}
return &api.WikiPage{
WikiPageMetaData: convert.ToWikiPageMetaData(title, lastCommit, ctx.Repo.Repository),
ContentBase64: content,
CommitCount: commitsCount,
Sidebar: sidebarContent,
Footer: footerContent,
}
}
// DeleteWikiPage delete wiki page
func DeleteWikiPage(ctx *context.APIContext) {
// swagger:operation DELETE /repos/{owner}/{repo}/wiki/page/{pageName} repository repoDeleteWikiPage
// ---
// summary: Delete a wiki page
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: pageName
// in: path
// description: name of the page
// type: string
// required: true
// responses:
// "204":
// "$ref": "#/responses/empty"
// "403":
// "$ref": "#/responses/forbidden"
// "404":
// "$ref": "#/responses/notFound"
wikiName := wiki_service.NormalizeWikiName(ctx.Params(":pageName"))
if err := wiki_service.DeleteWikiPage(ctx, ctx.User, ctx.Repo.Repository, wikiName); err != nil {
if err.Error() == "file does not exist" {
ctx.NotFound(err)
return
}
ctx.Error(http.StatusInternalServerError, "DeleteWikiPage", err)
return
}
ctx.Status(http.StatusNoContent)
}
// ListWikiPages get wiki pages list
func ListWikiPages(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/wiki/pages repository repoGetWikiPages
// ---
// summary: Get all wiki pages
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: page
// in: query
// description: page number of results to return (1-based)
// type: integer
// - name: limit
// in: query
// description: page size of results
// type: integer
// responses:
// "200":
// "$ref": "#/responses/WikiPageList"
// "404":
// "$ref": "#/responses/notFound"
wikiRepo, commit := findWikiRepoCommit(ctx)
if wikiRepo != nil {
defer wikiRepo.Close()
}
if ctx.Written() {
return
}
page := ctx.FormInt("page")
if page <= 1 {
page = 1
}
limit := ctx.FormInt("limit")
if limit <= 1 {
limit = setting.API.DefaultPagingNum
}
skip := (page - 1) * limit
max := page * limit
entries, err := commit.ListEntries()
if err != nil {
ctx.ServerError("ListEntries", err)
return
}
pages := make([]*api.WikiPageMetaData, 0, len(entries))
for i, entry := range entries {
if i < skip || i >= max || !entry.IsRegular() {
continue
}
c, err := wikiRepo.GetCommitByPath(entry.Name())
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetCommit", err)
return
}
wikiName, err := wiki_service.FilenameToName(entry.Name())
if err != nil {
if models.IsErrWikiInvalidFileName(err) {
continue
}
ctx.Error(http.StatusInternalServerError, "WikiFilenameToName", err)
return
}
pages = append(pages, convert.ToWikiPageMetaData(wikiName, c, ctx.Repo.Repository))
}
ctx.SetTotalCountHeader(int64(len(entries)))
ctx.JSON(http.StatusOK, pages)
}
// GetWikiPage get single wiki page
func | (ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/wiki/page/{pageName} repository repoGetWikiPage
// ---
// summary: Get a wiki page
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: pageName
// in: path
// description: name of the page
// type: string
// required: true
// responses:
// "200":
// "$ref": "#/responses/WikiPage"
// "404":
// "$ref": "#/responses/notFound"
// get requested pagename
pageName := wiki_service.NormalizeWikiName(ctx.Params(":pageName"))
wikiPage := getWikiPage(ctx, pageName)
if !ctx.Written() {
ctx.JSON(http.StatusOK, wikiPage)
}
}
// ListPageRevisions renders file revision list of wiki page
func ListPageRevisions(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/wiki/revisions/{pageName} repository repoGetWikiPageRevisions
// ---
// summary: Get revisions of a wiki page
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: pageName
// in: path
// description: name of the page
// type: string
// required: true
// - name: page
// in: query
// description: page number of results to return (1-based)
// type: integer
// responses:
// "200":
// "$ref": "#/responses/WikiCommitList"
// "404":
// "$ref": "#/responses/notFound"
wikiRepo, commit := findWikiRepoCommit(ctx)
if wikiRepo != nil {
defer wikiRepo.Close()
}
if ctx.Written() {
return
}
// get requested pagename
pageName := wiki_service.NormalizeWikiName(ctx.Params(":pageName"))
if len(pageName) == 0 {
pageName = "Home"
}
// lookup filename in wiki - get filecontent, gitTree entry , real filename
_, pageFilename := wikiContentsByName(ctx, commit, pageName, false)
if ctx.Written() {
return
}
// get commit count - wiki revisions
commitsCount, _ := wikiRepo.FileCommitsCount("master", pageFilename)
page := ctx.FormInt("page")
if page <= 1 {
page = 1
}
// get Commit Count
commitsHistory, err := wikiRepo.CommitsByFileAndRangeNoFollow("master", pageFilename, page)
if err != nil {
ctx.Error(http.StatusInternalServerError, "CommitsByFileAndRangeNoFollow", err)
return
}
ctx.SetTotalCountHeader(commitsCount)
ctx.JSON(http.StatusOK, convert.ToWikiCommitList(commitsHistory, commitsCount))
}
// findEntryForFile finds the tree entry for a target filepath.
func findEntryForFile(commit *git.Commit, target string) (*git.TreeEntry, error) {
entry, err := commit.GetTreeEntryByPath(target)
if err != nil {
return nil, err
}
if entry != nil {
return entry, nil
}
// Then the unescaped, shortest alternative
var unescapedTarget string
if unescapedTarget, err = url.QueryUnescape(target); err != nil {
return nil, err
}
return commit.GetTreeEntryByPath(unescapedTarget)
}
// findWikiRepoCommit opens the wiki repo and returns the latest commit, writing to context on error.
// The caller is responsible for closing the returned repo again
func findWikiRepoCommit(ctx *context.APIContext) (*git.Repository, *git.Commit) {
wikiRepo, err := git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.WikiPath())
if err != nil {
if git.IsErrNotExist(err) || err.Error() == "no such file or directory" {
ctx.NotFound(err)
} else {
ctx.Error(http.StatusInternalServerError, "OpenRepository", err)
}
return nil, nil
}
commit, err := wikiRepo.GetBranchCommit("master")
if err != nil {
if git.IsErrNotExist(err) {
ctx.NotFound(err)
} else {
ctx.Error(http.StatusInternalServerError, "GetBranchCommit", err)
}
return wikiRepo, nil
}
return wikiRepo, commit
}
// wikiContentsByEntry returns the contents of the wiki page referenced by the
// given tree entry, encoded with base64. Writes to ctx if an error occurs.
func wikiContentsByEntry(ctx *context.APIContext, entry *git.TreeEntry) string {
blob := entry.Blob()
if blob.Size() > setting.API.DefaultMaxBlobSize {
return ""
}
content, err := blob.GetBlobContentBase64()
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetBlobContentBase64", err)
return ""
}
return content
}
// wikiContentsByName returns the contents of a wiki page, along with a boolean
// indicating whether the page exists. Writes to ctx if an error occurs.
func wikiContentsByName(ctx *context.APIContext, commit *git.Commit, wikiName string, isSidebarOrFooter bool) (string, string) {
pageFilename := wiki_service.NameToFilename(wikiName)
entry, err := findEntryForFile(commit, pageFilename)
if err != nil {
if git.IsErrNotExist(err) {
if !isSidebarOrFooter {
ctx.NotFound()
}
} else {
ctx.ServerError("findEntryForFile", err)
}
return "", ""
}
return wikiContentsByEntry(ctx, entry), pageFilename
}
| GetWikiPage |
peerclient.go | /*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package common
import (
"fmt"
"io/ioutil"
"time"
"github.com/hyperledger/fabric/core/comm"
pb "github.com/hyperledger/fabric/protos/peer"
"github.com/pkg/errors"
)
// PeerClient represents a client for communicating with a peer
type PeerClient struct {
commonClient
}
// NewPeerClientFromEnv creates an instance of a PeerClient from the global
// Viper instance
func NewPeerClientFromEnv() (*PeerClient, error) {
address, override, clientConfig, err := configFromEnv("peer")
if err != nil {
return nil, errors.WithMessage(err, "failed to load config for PeerClient")
}
return newPeerClientForClientConfig(address, override, clientConfig)
}
// NewPeerClientForAddress creates an instance of a PeerClient using the
// provided peer address and, if TLS is enabled, the TLS root cert file
func NewPeerClientForAddress(address, tlsRootCertFile string) (*PeerClient, error) {
if address == "" {
return nil, errors.New("peer address must be set")
}
_, override, clientConfig, err := configFromEnv("peer")
if clientConfig.SecOpts.UseTLS {
if tlsRootCertFile == "" {
return nil, errors.New("tls root cert file must be set")
}
caPEM, res := ioutil.ReadFile(tlsRootCertFile)
if res != nil {
err = errors.WithMessage(res, fmt.Sprintf("unable to load TLS root cert file from %s", tlsRootCertFile))
return nil, err
}
clientConfig.SecOpts.ServerRootCAs = [][]byte{caPEM}
}
return newPeerClientForClientConfig(address, override, clientConfig)
}
func | (address, override string, clientConfig comm.ClientConfig) (*PeerClient, error) {
// set timeout
clientConfig.Timeout = time.Second * 3
gClient, err := comm.NewGRPCClient(clientConfig)
if err != nil {
return nil, errors.WithMessage(err, "failed to create PeerClient from config")
}
pClient := &PeerClient{
commonClient: commonClient{
GRPCClient: gClient,
address: address,
sn: override}}
return pClient, nil
}
// Endorser returns a client for the Endorser service
func (pc *PeerClient) Endorser() (pb.EndorserClient, error) {
conn, err := pc.commonClient.NewConnection(pc.address, pc.sn)
if err != nil {
return nil, errors.WithMessage(err, fmt.Sprintf("endorser client failed to connect to %s", pc.address))
}
return pb.NewEndorserClient(conn), nil
}
// Admin returns a client for the Admin service
func (pc *PeerClient) Admin() (pb.AdminClient, error) {
conn, err := pc.commonClient.NewConnection(pc.address, pc.sn)
if err != nil {
return nil, errors.WithMessage(err, fmt.Sprintf("admin client failed to connect to %s", pc.address))
}
return pb.NewAdminClient(conn), nil
}
// GetEndorserClient returns a new endorser client. If the both the address and
// tlsRootCertFile are not provided, the target values for the client are taken
// from the configuration settings for "peer.address" and
// "peer.tls.rootcert.file"
func GetEndorserClient(address string, tlsRootCertFile string) (pb.EndorserClient, error) {
var peerClient *PeerClient
var err error
if address != "" {
peerClient, err = NewPeerClientForAddress(address, tlsRootCertFile)
} else {
peerClient, err = NewPeerClientFromEnv()
}
if err != nil {
return nil, err
}
return peerClient.Endorser()
}
// GetAdminClient returns a new admin client. The target address for
// the client is taken from the configuration setting "peer.address"
func GetAdminClient() (pb.AdminClient, error) {
peerClient, err := NewPeerClientFromEnv()
if err != nil {
return nil, err
}
return peerClient.Admin()
}
| newPeerClientForClientConfig |
author.js | import React from "react"
import {graphql} from "gatsby"
import Layout from "../components/layout"
import SectionHeader from "../components/Services/SectionHeader/SectionHeader"
import SectionBookAnAppointment from "../components/HomePageComponents/SectionBookAnAppointment/SectionBookAnAppointment"
import SectionForm from "../components/HomePageComponents/SectionForm/SectionForm"
import SectionListPosts from "../components/BlogPage/SectionListPosts/SectionListPosts"
import SEO from "../components/seo"
const Author = ({ data }) =>{
return(
<Layout>
<SEO seo={data.seo.SEO_Blog_Page} />
<SectionHeader
title={data.blogPage.Title}
background={data.blogPage.Background_Section_Header[0].url}
/>
<SectionListPosts
posts={data.authorPage.articles}
recentPosts={data.recentPosts.nodes}
author={data.authorPage.username}
shortText={true}
/>
<SectionBookAnAppointment />
<SectionForm />
</Layout>
)
}
export default Author
export const queryAuthor = graphql`
query UserTemplate($username: String) {
seo: strapiArticles {
SEO_Articles {
Description
Image_Url
Title
}
}
authorPage: strapiUsers(username: { eq: $username}) {
id
username
articles {
Title
slug
createdAt(formatString: "MMM D Y")
Text
author
category_article
Image {
alternativeText
url
}
}
}
blogPage: strapiBlogPage {
Title
Background_Section_Header {
alternativeText
url
}
}
recentPosts: allStrapiArticles(limit: 4) {
nodes {
Title | Text
author {
username
}
Image {
url
alternativeText
}
createdAt
}
}
}
` | |
utilsMock.go | // +build !release
package whitesource
import (
"net/http"
"os"
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/SAP/jenkins-library/pkg/piperutils"
"github.com/pkg/errors"
)
func newTestScan(config *ScanOptions) *Scan {
return &Scan{
AggregateProjectName: config.ProjectName,
ProductVersion: config.ProductVersion,
}
}
// NpmInstall records in which directory "npm install" has been invoked and for which package.json files.
type NpmInstall struct {
CurrentDir string
PackageJSON []string
}
// DownloadedFile records what URL has been downloaded to which file.
type DownloadedFile struct {
SourceURL string
FilePath string
}
// ScanUtilsMock is an implementation of the Utils interface that can be used during tests.
type ScanUtilsMock struct {
*mock.FilesMock
*mock.ExecMockRunner
NpmInstalledModules []NpmInstall
DownloadedFiles []DownloadedFile
DownloadError map[string]error
RemoveAllDirs []string
RemoveAllError map[string]error
}
// RemoveAll mimics os.RemoveAll().
func (m *ScanUtilsMock) RemoveAll(dir string) error {
// Can be removed once implemented in mock.FilesMock.
m.RemoveAllDirs = append(m.RemoveAllDirs, dir)
if m.RemoveAllError[dir] != nil {
return m.RemoveAllError[dir]
}
return nil
}
// FindPackageJSONFiles mimics npm.FindPackageJSONFiles() based on the FilesMock setup.
func (m *ScanUtilsMock) FindPackageJSONFiles(options *ScanOptions) ([]string, error) {
unfilteredMatches, _ := m.Glob("**/package.json")
return piperutils.ExcludeFiles(unfilteredMatches, options.BuildDescriptorExcludeList)
}
// InstallAllNPMDependencies mimics npm.InstallAllNPMDependencies() and records the "npm install".
func (m *ScanUtilsMock) InstallAllNPMDependencies(_ *ScanOptions, packageJSONs []string) error {
m.NpmInstalledModules = append(m.NpmInstalledModules, NpmInstall{
CurrentDir: m.CurrentDir,
PackageJSON: packageJSONs,
})
return nil
}
// DownloadFile mimics http.Downloader and records the downloaded file.
func (m *ScanUtilsMock) DownloadFile(url, filename string, _ http.Header, _ []*http.Cookie) error {
if url == "errorCopyFile" {
return errors.New("unable to copy content from url to file")
}
if url == "error404NotFound" {
return errors.New("returned with response 404 Not Found")
}
if m.DownloadError[url] != nil {
return m.DownloadError[url]
}
m.DownloadedFiles = append(m.DownloadedFiles, DownloadedFile{SourceURL: url, FilePath: filename})
return nil
}
// FileOpen mimics os.FileOpen() based on FilesMock Open().
func (m *ScanUtilsMock) FileOpen(name string, flag int, perm os.FileMode) (File, error) {
return m.Open(name, flag, perm)
}
// NewScanUtilsMock returns an initialized ScanUtilsMock instance.
func | () *ScanUtilsMock {
return &ScanUtilsMock{
FilesMock: &mock.FilesMock{},
ExecMockRunner: &mock.ExecMockRunner{},
}
}
| NewScanUtilsMock |
sql_parser.rs | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use common_exception::Result;
use common_meta_types::Compression;
use common_meta_types::Credentials;
use common_meta_types::FileFormat;
use common_meta_types::Format;
use common_meta_types::PasswordType;
use common_meta_types::StageParams;
use common_meta_types::UserIdentity;
use common_meta_types::UserPrivilegeSet;
use common_meta_types::UserPrivilegeType;
use common_planners::Optimization;
use databend_query::sql::statements::DfAlterUser;
use databend_query::sql::statements::DfCopy;
use databend_query::sql::statements::DfCreateDatabase;
use databend_query::sql::statements::DfCreateStage;
use databend_query::sql::statements::DfCreateTable;
use databend_query::sql::statements::DfCreateUser;
use databend_query::sql::statements::DfDescribeTable;
use databend_query::sql::statements::DfDropDatabase;
use databend_query::sql::statements::DfDropStage;
use databend_query::sql::statements::DfDropTable;
use databend_query::sql::statements::DfDropUser;
use databend_query::sql::statements::DfGrantObject;
use databend_query::sql::statements::DfGrantStatement;
use databend_query::sql::statements::DfOptimizeTable;
use databend_query::sql::statements::DfQueryStatement;
use databend_query::sql::statements::DfRevokeStatement;
use databend_query::sql::statements::DfShowCreateDatabase;
use databend_query::sql::statements::DfShowCreateTable;
use databend_query::sql::statements::DfShowDatabases;
use databend_query::sql::statements::DfShowGrants;
use databend_query::sql::statements::DfShowTables;
use databend_query::sql::statements::DfTruncateTable;
use databend_query::sql::statements::DfUseDatabase;
use databend_query::sql::*;
use sqlparser::ast::*;
use sqlparser::dialect::GenericDialect;
use sqlparser::parser::Parser;
use sqlparser::parser::ParserError;
use sqlparser::tokenizer::Tokenizer;
fn expect_parse_ok(sql: &str, expected: DfStatement) -> Result<()> {
let (statements, _) = DfParser::parse_sql(sql)?;
assert_eq!(
statements.len(),
1,
"Expected to parse exactly one statement"
);
assert_eq!(statements[0], expected);
Ok(())
}
fn expect_parse_err(sql: &str, expected: String) -> Result<()> {
let err = DfParser::parse_sql(sql).unwrap_err();
assert_eq!(err.message(), expected);
Ok(())
}
fn expect_parse_err_contains(sql: &str, expected: String) -> Result<()> {
let err = DfParser::parse_sql(sql).unwrap_err();
assert!(err.message().contains(&expected));
Ok(())
}
fn verified_query(sql: &str) -> Result<Box<DfQueryStatement>> {
let mut parser = DfParser::new_with_dialect(sql, &GenericDialect {})?;
let stmt = parser.parse_statement()?;
if let DfStatement::Query(query) = stmt {
return Ok(query);
}
Err(ParserError::ParserError("Expect query statement".to_string()).into())
}
fn make_column_def(name: impl Into<String>, data_type: DataType) -> ColumnDef {
ColumnDef {
name: Ident {
value: name.into(),
quote_style: None,
},
data_type,
collation: None,
options: vec![],
}
}
fn parse_sql_to_expr(query_expr: &str) -> Expr {
let dialect = GenericDialect {};
let mut tokenizer = Tokenizer::new(&dialect, query_expr);
let tokens = tokenizer.tokenize().unwrap();
let mut parser = Parser::new(tokens, &dialect);
parser.parse_expr().unwrap()
}
#[test]
fn create_database() -> Result<()> {
{
let sql = "CREATE DATABASE db1";
let expected = DfStatement::CreateDatabase(DfCreateDatabase {
if_not_exists: false,
name: ObjectName(vec![Ident::new("db1")]),
engine: "".to_string(),
engine_options: HashMap::new(),
options: HashMap::new(),
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "CREATE DATABASE db1 engine = github";
let expected = DfStatement::CreateDatabase(DfCreateDatabase {
if_not_exists: false,
name: ObjectName(vec![Ident::new("db1")]),
engine: "github".to_string(),
engine_options: HashMap::new(),
options: HashMap::new(),
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "CREATE DATABASE IF NOT EXISTS db1";
let expected = DfStatement::CreateDatabase(DfCreateDatabase {
if_not_exists: true,
name: ObjectName(vec![Ident::new("db1")]),
engine: "".to_string(),
engine_options: HashMap::new(),
options: HashMap::new(),
});
expect_parse_ok(sql, expected)?;
}
Ok(())
}
#[test]
fn drop_database() -> Result<()> {
{
let sql = "DROP DATABASE db1";
let expected = DfStatement::DropDatabase(DfDropDatabase {
if_exists: false,
name: ObjectName(vec![Ident::new("db1")]),
});
expect_parse_ok(sql, expected)?;
} | let sql = "DROP DATABASE IF EXISTS db1";
let expected = DfStatement::DropDatabase(DfDropDatabase {
if_exists: true,
name: ObjectName(vec![Ident::new("db1")]),
});
expect_parse_ok(sql, expected)?;
}
Ok(())
}
#[test]
fn create_table() -> Result<()> {
// positive case
let sql = "CREATE TABLE t(c1 int) ENGINE = CSV location = '/data/33.csv' ";
let expected = DfStatement::CreateTable(DfCreateTable {
if_not_exists: false,
name: ObjectName(vec![Ident::new("t")]),
columns: vec![make_column_def("c1", DataType::Int(None))],
engine: "CSV".to_string(),
options: maplit::hashmap! {"location".into() => "/data/33.csv".into()},
like: None,
query: None,
});
expect_parse_ok(sql, expected)?;
// positive case: it is ok for parquet files not to have columns specified
let sql = "CREATE TABLE t(c1 int, c2 bigint, c3 varchar(255) ) ENGINE = Parquet location = 'foo.parquet' comment = 'foo'";
let expected = DfStatement::CreateTable(DfCreateTable {
if_not_exists: false,
name: ObjectName(vec![Ident::new("t")]),
columns: vec![
make_column_def("c1", DataType::Int(None)),
make_column_def("c2", DataType::BigInt(None)),
make_column_def("c3", DataType::Varchar(Some(255))),
],
engine: "Parquet".to_string(),
options: maplit::hashmap! {
"location".into() => "foo.parquet".into(),
"comment".into() => "foo".into(),
},
like: None,
query: None,
});
expect_parse_ok(sql, expected)?;
// create table like statement
let sql = "CREATE TABLE db1.test1 LIKE db2.test2 ENGINE = Parquet location = 'batcave'";
let expected = DfStatement::CreateTable(DfCreateTable {
if_not_exists: false,
name: ObjectName(vec![Ident::new("db1"), Ident::new("test1")]),
columns: vec![],
engine: "Parquet".to_string(),
options: maplit::hashmap! {"location".into() => "batcave".into()},
like: Some(ObjectName(vec![Ident::new("db2"), Ident::new("test2")])),
query: None,
});
expect_parse_ok(sql, expected)?;
// create table as select statement
let sql = "CREATE TABLE db1.test1(c1 int, c2 varchar(255)) ENGINE = Parquet location = 'batcave' AS SELECT * FROM t2";
let expected = DfStatement::CreateTable(DfCreateTable {
if_not_exists: false,
name: ObjectName(vec![Ident::new("db1"), Ident::new("test1")]),
columns: vec![
make_column_def("c1", DataType::Int(None)),
make_column_def("c2", DataType::Varchar(Some(255))),
],
engine: "Parquet".to_string(),
options: maplit::hashmap! {"location".into() => "batcave".into()},
like: None,
query: Some(Box::new(DfQueryStatement {
from: vec![TableWithJoins {
relation: TableFactor::Table {
name: ObjectName(vec![Ident::new("t2")]),
alias: None,
args: vec![],
with_hints: vec![],
},
joins: vec![],
}],
projection: vec![SelectItem::Wildcard],
selection: None,
group_by: vec![],
having: None,
order_by: vec![],
limit: None,
offset: None,
})),
});
expect_parse_ok(sql, expected)?;
Ok(())
}
#[test]
fn drop_table() -> Result<()> {
{
let sql = "DROP TABLE t1";
let expected = DfStatement::DropTable(DfDropTable {
if_exists: false,
name: ObjectName(vec![Ident::new("t1")]),
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "DROP TABLE IF EXISTS t1";
let expected = DfStatement::DropTable(DfDropTable {
if_exists: true,
name: ObjectName(vec![Ident::new("t1")]),
});
expect_parse_ok(sql, expected)?;
}
Ok(())
}
#[test]
fn describe_table() -> Result<()> {
{
let sql = "DESCRIBE t1";
let expected = DfStatement::DescribeTable(DfDescribeTable {
name: ObjectName(vec![Ident::new("t1")]),
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "DESC t1";
let expected = DfStatement::DescribeTable(DfDescribeTable {
name: ObjectName(vec![Ident::new("t1")]),
});
expect_parse_ok(sql, expected)?;
}
Ok(())
}
#[test]
fn show_queries() -> Result<()> {
use databend_query::sql::statements::DfShowSettings;
use databend_query::sql::statements::DfShowTables;
// positive case
expect_parse_ok("SHOW TABLES", DfStatement::ShowTables(DfShowTables::All))?;
expect_parse_ok("SHOW TABLES;", DfStatement::ShowTables(DfShowTables::All))?;
expect_parse_ok("SHOW SETTINGS", DfStatement::ShowSettings(DfShowSettings))?;
expect_parse_ok(
"SHOW TABLES LIKE 'aaa'",
DfStatement::ShowTables(DfShowTables::Like(Ident::with_quote('\'', "aaa"))),
)?;
expect_parse_ok(
"SHOW TABLES --comments should not in sql case1",
DfStatement::ShowTables(DfShowTables::All),
)?;
expect_parse_ok(
"SHOW TABLES LIKE 'aaa' --comments should not in sql case2",
DfStatement::ShowTables(DfShowTables::Like(Ident::with_quote('\'', "aaa"))),
)?;
expect_parse_ok(
"SHOW TABLES WHERE t LIKE 'aaa'",
DfStatement::ShowTables(DfShowTables::Where(parse_sql_to_expr("t LIKE 'aaa'"))),
)?;
expect_parse_ok(
"SHOW TABLES LIKE 'aaa' --comments should not in sql case2",
DfStatement::ShowTables(DfShowTables::Like(Ident::with_quote('\'', "aaa"))),
)?;
expect_parse_ok(
"SHOW TABLES WHERE t LIKE 'aaa' AND t LIKE 'a%'",
DfStatement::ShowTables(DfShowTables::Where(parse_sql_to_expr(
"t LIKE 'aaa' AND t LIKE 'a%'",
))),
)?;
Ok(())
}
#[test]
fn show_tables_test() -> Result<()> {
let mut ident = Ident::new("ss");
ident.quote_style = Some('`');
let v = vec![ident];
let name = ObjectName(v);
let name_two = name.clone();
expect_parse_ok(
"SHOW TABLES FROM `ss`",
DfStatement::ShowTables(DfShowTables::FromOrIn(name)),
)?;
expect_parse_ok(
"SHOW TABLES IN `ss`",
DfStatement::ShowTables(DfShowTables::FromOrIn(name_two)),
)?;
Ok(())
}
#[test]
fn show_grants_test() -> Result<()> {
expect_parse_ok(
"SHOW GRANTS",
DfStatement::ShowGrants(DfShowGrants {
user_identity: None,
}),
)?;
expect_parse_ok(
"SHOW GRANTS FOR 'u1'@'%'",
DfStatement::ShowGrants(DfShowGrants {
user_identity: Some(UserIdentity {
username: "u1".into(),
hostname: "%".into(),
}),
}),
)?;
Ok(())
}
#[test]
fn show_functions_tests() -> Result<()> {
use databend_query::sql::statements::DfShowFunctions;
// positive case
expect_parse_ok(
"SHOW FUNCTIONS",
DfStatement::ShowFunctions(DfShowFunctions::All),
)?;
expect_parse_ok(
"SHOW FUNCTIONS;",
DfStatement::ShowFunctions(DfShowFunctions::All),
)?;
expect_parse_ok(
"SHOW FUNCTIONS --comments should not in sql case1",
DfStatement::ShowFunctions(DfShowFunctions::All),
)?;
expect_parse_ok(
"SHOW FUNCTIONS LIKE 'aaa'",
DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))),
)?;
expect_parse_ok(
"SHOW FUNCTIONS LIKE 'aaa';",
DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))),
)?;
expect_parse_ok(
"SHOW FUNCTIONS LIKE 'aaa' --comments should not in sql case2",
DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))),
)?;
expect_parse_ok(
"SHOW FUNCTIONS WHERE t LIKE 'aaa'",
DfStatement::ShowFunctions(DfShowFunctions::Where(parse_sql_to_expr("t LIKE 'aaa'"))),
)?;
expect_parse_ok(
"SHOW FUNCTIONS LIKE 'aaa' --comments should not in sql case2",
DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))),
)?;
expect_parse_ok(
"SHOW FUNCTIONS WHERE t LIKE 'aaa' AND t LIKE 'a%'",
DfStatement::ShowFunctions(DfShowFunctions::Where(parse_sql_to_expr(
"t LIKE 'aaa' AND t LIKE 'a%'",
))),
)?;
Ok(())
}
#[test]
fn use_database_test() -> Result<()> {
expect_parse_ok(
"USe db1",
DfStatement::UseDatabase(DfUseDatabase {
name: ObjectName(vec![Ident::new("db1")]),
}),
)?;
expect_parse_ok(
"use db1",
DfStatement::UseDatabase(DfUseDatabase {
name: ObjectName(vec![Ident::new("db1")]),
}),
)?;
Ok(())
}
#[test]
fn truncate_table() -> Result<()> {
{
let sql = "TRUNCATE TABLE t1";
let expected = DfStatement::TruncateTable(DfTruncateTable {
name: ObjectName(vec![Ident::new("t1")]),
purge: false,
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "TRUNCATE TABLE t1 purge";
let expected = DfStatement::TruncateTable(DfTruncateTable {
name: ObjectName(vec![Ident::new("t1")]),
purge: true,
});
expect_parse_ok(sql, expected)?;
}
Ok(())
}
#[test]
fn hint_test() -> Result<()> {
{
let comment = " { ErrorCode 1002 }";
let expected = DfHint::create_from_comment(comment, "--");
assert_eq!(expected.error_code, Some(1002));
}
{
let comment = " { ErrorCode1002 }";
let expected = DfHint::create_from_comment(comment, "--");
assert_eq!(expected.error_code, None);
}
{
let comment = " { ErrorCode 22}";
let expected = DfHint::create_from_comment(comment, "--");
assert_eq!(expected.error_code, Some(22));
}
{
let comment = " { ErrorCode: 22}";
let expected = DfHint::create_from_comment(comment, "--");
assert_eq!(expected.error_code, None);
}
{
let comment = " { Errorcode 22}";
let expected = DfHint::create_from_comment(comment, "--");
assert_eq!(expected.error_code, None);
}
Ok(())
}
#[test]
fn copy_test() -> Result<()> {
let ident = Ident::new("test_csv");
let v = vec![ident];
let name = ObjectName(v);
expect_parse_ok(
"copy into test_csv from '@my_ext_stage/tutorials/sample.csv' format csv csv_header = 1 field_delimitor = ',';",
DfStatement::Copy(DfCopy {
name,
columns: vec![],
location: "@my_ext_stage/tutorials/sample.csv".to_string(),
format: "csv".to_string(),
options: maplit::hashmap! {
"csv_header".into() => "1".into(),
"field_delimitor".into() => ",".into(),
}
}
),
)?;
Ok(())
}
#[test]
fn show_databases_test() -> Result<()> {
expect_parse_ok(
"SHOW DATABASES",
DfStatement::ShowDatabases(DfShowDatabases { where_opt: None }),
)?;
expect_parse_ok(
"SHOW DATABASES;",
DfStatement::ShowDatabases(DfShowDatabases { where_opt: None }),
)?;
expect_parse_ok(
"SHOW DATABASES WHERE Database = 'ss'",
DfStatement::ShowDatabases(DfShowDatabases {
where_opt: Some(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("name"))),
op: BinaryOperator::Eq,
right: Box::new(Expr::Value(Value::SingleQuotedString("ss".to_string()))),
}),
}),
)?;
expect_parse_ok(
"SHOW DATABASES WHERE Database Like 'ss%'",
DfStatement::ShowDatabases(DfShowDatabases {
where_opt: Some(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("name"))),
op: BinaryOperator::Like,
right: Box::new(Expr::Value(Value::SingleQuotedString("ss%".to_string()))),
}),
}),
)?;
expect_parse_ok(
"SHOW DATABASES LIKE 'ss%'",
DfStatement::ShowDatabases(DfShowDatabases {
where_opt: Some(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("name"))),
op: BinaryOperator::Like,
right: Box::new(Expr::Value(Value::SingleQuotedString("ss%".to_string()))),
}),
}),
)?;
expect_parse_ok(
"SHOW DATABASES LIKE SUBSTRING('ss%' FROM 1 FOR 3)",
DfStatement::ShowDatabases(DfShowDatabases {
where_opt: Some(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("name"))),
op: BinaryOperator::Like,
right: Box::new(Expr::Substring {
expr: Box::new(Expr::Value(Value::SingleQuotedString("ss%".to_string()))),
substring_from: Some(Box::new(Expr::Value(Value::Number(
"1".to_string(),
false,
)))),
substring_for: Some(Box::new(Expr::Value(Value::Number(
"3".to_string(),
false,
)))),
}),
}),
}),
)?;
expect_parse_ok(
"SHOW DATABASES LIKE POSITION('012345' IN 'abcdef')",
DfStatement::ShowDatabases(DfShowDatabases {
where_opt: Some(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("name"))),
op: BinaryOperator::Like,
right: Box::new(Expr::Position {
substr_expr: Box::new(Expr::Value(Value::SingleQuotedString(
"012345".to_string(),
))),
str_expr: Box::new(Expr::Value(Value::SingleQuotedString(
"abcdef".to_string(),
))),
}),
}),
}),
)?;
Ok(())
}
#[test]
fn show_create_test() -> Result<()> {
expect_parse_ok(
"SHOW CREATE TABLE test",
DfStatement::ShowCreateTable(DfShowCreateTable {
name: ObjectName(vec![Ident::new("test")]),
}),
)?;
expect_parse_ok(
"SHOW CREATE DATABASE test",
DfStatement::ShowCreateDatabase(DfShowCreateDatabase {
name: ObjectName(vec![Ident::new("test")]),
}),
)?;
Ok(())
}
#[test]
fn create_user_test() -> Result<()> {
expect_parse_ok(
"CREATE USER 'test'@'localhost' IDENTIFIED BY 'password'",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::Sha256,
password: String::from("password"),
}),
)?;
expect_parse_ok(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH plaintext_password BY 'password'",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::PlainText,
password: String::from("password"),
}),
)?;
expect_parse_ok(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY 'password'",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::Sha256,
password: String::from("password"),
}),
)?;
expect_parse_ok(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH double_sha1_password BY 'password'",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::DoubleSha1,
password: String::from("password"),
}),
)?;
expect_parse_ok(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH no_password",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::None,
password: String::from(""),
}),
)?;
expect_parse_ok(
"CREATE USER IF NOT EXISTS 'test'@'localhost' IDENTIFIED WITH sha256_password BY 'password'",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: true,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::Sha256,
password: String::from("password"),
}),
)?;
expect_parse_ok(
"CREATE USER 'test@localhost' IDENTIFIED WITH sha256_password BY 'password'",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test@localhost"),
hostname: String::from("%"),
password_type: PasswordType::Sha256,
password: String::from("password"),
}),
)?;
expect_parse_ok(
"CREATE USER 'test'@'localhost' NOT IDENTIFIED",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::None,
password: String::from(""),
}),
)?;
expect_parse_ok(
"CREATE USER 'test'@'localhost'",
DfStatement::CreateUser(DfCreateUser {
if_not_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
password_type: PasswordType::None,
password: String::from(""),
}),
)?;
expect_parse_err(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH no_password BY 'password'",
String::from("sql parser error: Expected end of statement, found: BY"),
)?;
expect_parse_err(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH sha256_password",
String::from("sql parser error: Expected keyword BY"),
)?;
expect_parse_err(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY",
String::from("sql parser error: Expected literal string, found: EOF"),
)?;
expect_parse_err(
"CREATE USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY ''",
String::from("sql parser error: Missing password"),
)?;
Ok(())
}
#[test]
fn alter_user_test() -> Result<()> {
expect_parse_ok(
"ALTER USER 'test'@'localhost' IDENTIFIED BY 'password'",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test"),
hostname: String::from("localhost"),
new_password_type: PasswordType::Sha256,
new_password: String::from("password"),
}),
)?;
expect_parse_ok(
"ALTER USER USER() IDENTIFIED BY 'password'",
DfStatement::AlterUser(DfAlterUser {
if_current_user: true,
name: String::from(""),
hostname: String::from(""),
new_password_type: PasswordType::Sha256,
new_password: String::from("password"),
}),
)?;
expect_parse_ok(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH plaintext_password BY 'password'",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test"),
hostname: String::from("localhost"),
new_password_type: PasswordType::PlainText,
new_password: String::from("password"),
}),
)?;
expect_parse_ok(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY 'password'",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test"),
hostname: String::from("localhost"),
new_password_type: PasswordType::Sha256,
new_password: String::from("password"),
}),
)?;
expect_parse_ok(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH double_sha1_password BY 'password'",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test"),
hostname: String::from("localhost"),
new_password_type: PasswordType::DoubleSha1,
new_password: String::from("password"),
}),
)?;
expect_parse_ok(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH no_password",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test"),
hostname: String::from("localhost"),
new_password_type: PasswordType::None,
new_password: String::from(""),
}),
)?;
expect_parse_ok(
"ALTER USER 'test@localhost' IDENTIFIED WITH sha256_password BY 'password'",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test@localhost"),
hostname: String::from("%"),
new_password_type: PasswordType::Sha256,
new_password: String::from("password"),
}),
)?;
expect_parse_ok(
"ALTER USER 'test'@'localhost' NOT IDENTIFIED",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test"),
hostname: String::from("localhost"),
new_password_type: PasswordType::None,
new_password: String::from(""),
}),
)?;
expect_parse_ok(
"ALTER USER 'test'@'localhost'",
DfStatement::AlterUser(DfAlterUser {
if_current_user: false,
name: String::from("test"),
hostname: String::from("localhost"),
new_password_type: PasswordType::None,
new_password: String::from(""),
}),
)?;
expect_parse_err(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH no_password BY 'password'",
String::from("sql parser error: Expected end of statement, found: BY"),
)?;
expect_parse_err(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH sha256_password",
String::from("sql parser error: Expected keyword BY"),
)?;
expect_parse_err(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY",
String::from("sql parser error: Expected literal string, found: EOF"),
)?;
expect_parse_err(
"ALTER USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY ''",
String::from("sql parser error: Missing password"),
)?;
Ok(())
}
#[test]
fn drop_user_test() -> Result<()> {
expect_parse_ok(
"DROP USER 'test'@'localhost'",
DfStatement::DropUser(DfDropUser {
if_exists: false,
name: String::from("test"),
hostname: String::from("localhost"),
}),
)?;
expect_parse_ok(
"DROP USER 'test'@'127.0.0.1'",
DfStatement::DropUser(DfDropUser {
if_exists: false,
name: String::from("test"),
hostname: String::from("127.0.0.1"),
}),
)?;
expect_parse_ok(
"DROP USER 'test'",
DfStatement::DropUser(DfDropUser {
if_exists: false,
name: String::from("test"),
hostname: String::from("%"),
}),
)?;
expect_parse_ok(
"DROP USER IF EXISTS 'test'@'localhost'",
DfStatement::DropUser(DfDropUser {
if_exists: true,
name: String::from("test"),
hostname: String::from("localhost"),
}),
)?;
expect_parse_ok(
"DROP USER IF EXISTS 'test'@'127.0.0.1'",
DfStatement::DropUser(DfDropUser {
if_exists: true,
name: String::from("test"),
hostname: String::from("127.0.0.1"),
}),
)?;
expect_parse_ok(
"DROP USER IF EXISTS 'test'",
DfStatement::DropUser(DfDropUser {
if_exists: true,
name: String::from("test"),
hostname: String::from("%"),
}),
)?;
Ok(())
}
#[test]
fn grant_privilege_test() -> Result<()> {
expect_parse_ok(
"GRANT ALL ON * TO 'test'@'localhost'",
DfStatement::GrantPrivilege(DfGrantStatement {
name: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Database(None),
priv_types: UserPrivilegeSet::all_privileges(),
}),
)?;
expect_parse_ok(
"GRANT ALL PRIVILEGES ON * TO 'test'@'localhost'",
DfStatement::GrantPrivilege(DfGrantStatement {
name: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Database(None),
priv_types: UserPrivilegeSet::all_privileges(),
}),
)?;
expect_parse_ok(
"GRANT INSERT ON `db1`.`tb1` TO 'test'@'localhost'",
DfStatement::GrantPrivilege(DfGrantStatement {
name: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Table(Some("db1".into()), "tb1".into()),
priv_types: {
let mut privileges = UserPrivilegeSet::empty();
privileges.set_privilege(UserPrivilegeType::Insert);
privileges
},
}),
)?;
expect_parse_ok(
"GRANT INSERT ON `tb1` TO 'test'@'localhost'",
DfStatement::GrantPrivilege(DfGrantStatement {
name: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Table(None, "tb1".into()),
priv_types: {
let mut privileges = UserPrivilegeSet::empty();
privileges.set_privilege(UserPrivilegeType::Insert);
privileges
},
}),
)?;
expect_parse_ok(
"GRANT INSERT ON `db1`.'*' TO 'test'@'localhost'",
DfStatement::GrantPrivilege(DfGrantStatement {
name: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Database(Some("db1".into())),
priv_types: {
let mut privileges = UserPrivilegeSet::empty();
privileges.set_privilege(UserPrivilegeType::Insert);
privileges
},
}),
)?;
expect_parse_ok(
"GRANT CREATE, SELECT ON * TO 'test'@'localhost'",
DfStatement::GrantPrivilege(DfGrantStatement {
name: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Database(None),
priv_types: {
let mut privileges = UserPrivilegeSet::empty();
privileges.set_privilege(UserPrivilegeType::Select);
privileges.set_privilege(UserPrivilegeType::Create);
privileges
},
}),
)?;
expect_parse_ok(
"GRANT CREATE USER, CREATE ROLE, CREATE, SELECT ON * TO 'test'@'localhost'",
DfStatement::GrantPrivilege(DfGrantStatement {
name: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Database(None),
priv_types: {
let mut privileges = UserPrivilegeSet::empty();
privileges.set_privilege(UserPrivilegeType::Create);
privileges.set_privilege(UserPrivilegeType::CreateUser);
privileges.set_privilege(UserPrivilegeType::CreateRole);
privileges.set_privilege(UserPrivilegeType::Select);
privileges
},
}),
)?;
expect_parse_err(
"GRANT TEST, ON * TO 'test'@'localhost'",
String::from("sql parser error: Expected privilege type, found: TEST"),
)?;
expect_parse_err(
"GRANT SELECT, ON * TO 'test'@'localhost'",
String::from("sql parser error: Expected privilege type, found: ON"),
)?;
expect_parse_err(
"GRANT SELECT IN * TO 'test'@'localhost'",
String::from("sql parser error: Expected keyword ON, found: IN"),
)?;
expect_parse_err(
"GRANT SELECT ON * 'test'@'localhost'",
String::from("sql parser error: Expected keyword TO, found: 'test'"),
)?;
expect_parse_err(
"GRANT INSERT ON *.`tb1` TO 'test'@'localhost'",
String::from("sql parser error: Expected whitespace, found: ."),
)?;
Ok(())
}
#[test]
fn revoke_privilege_test() -> Result<()> {
expect_parse_ok(
"REVOKE ALL ON * FROM 'test'@'localhost'",
DfStatement::RevokePrivilege(DfRevokeStatement {
username: String::from("test"),
hostname: String::from("localhost"),
on: DfGrantObject::Database(None),
priv_types: UserPrivilegeSet::all_privileges(),
}),
)?;
expect_parse_err(
"REVOKE SELECT ON * 'test'@'localhost'",
String::from("sql parser error: Expected keyword FROM, found: 'test'"),
)?;
Ok(())
}
#[test]
fn create_stage_test() -> Result<()> {
expect_parse_ok(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z')",
DfStatement::CreateStage(DfCreateStage {
if_not_exists: false,
stage_name: "test_stage".to_string(),
stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }),
file_format: FileFormat::default(),
comments: "".to_string(),
}),
)?;
expect_parse_ok(
"CREATE STAGE IF NOT EXISTS test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z')",
DfStatement::CreateStage(DfCreateStage {
if_not_exists: true,
stage_name: "test_stage".to_string(),
stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }),
file_format: FileFormat::default(),
comments: "".to_string(),
}),
)?;
expect_parse_ok(
"CREATE STAGE IF NOT EXISTS test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=CSV compression=GZIP record_delimiter=',')",
DfStatement::CreateStage(DfCreateStage {
if_not_exists: true,
stage_name: "test_stage".to_string(),
stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }),
file_format: FileFormat { compression: Compression::Gzip, record_delimiter: ",".to_string(),..Default::default()},
comments: "".to_string(),
}),
)?;
expect_parse_ok(
"CREATE STAGE IF NOT EXISTS test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=CSV compression=GZIP record_delimiter=',') comments='test'",
DfStatement::CreateStage(DfCreateStage {
if_not_exists: true,
stage_name: "test_stage".to_string(),
stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }),
file_format: FileFormat { compression: Compression::Gzip, record_delimiter: ",".to_string(),..Default::default()},
comments: "test".to_string(),
}),
)?;
expect_parse_ok(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=Parquet compression=AUTO) comments='test'",
DfStatement::CreateStage(DfCreateStage {
if_not_exists: false,
stage_name: "test_stage".to_string(),
stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }),
file_format: FileFormat { format: Format::Parquet, compression: Compression::Auto ,..Default::default()},
comments: "test".to_string(),
}),
)?;
expect_parse_ok(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO) comments='test'",
DfStatement::CreateStage(DfCreateStage {
if_not_exists: false,
stage_name: "test_stage".to_string(),
stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }),
file_format: FileFormat { format: Format::Csv, compression: Compression::Auto,..Default::default()},
comments: "test".to_string(),
}),
)?;
expect_parse_ok(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=json) comments='test'",
DfStatement::CreateStage(DfCreateStage {
if_not_exists: false,
stage_name: "test_stage".to_string(),
stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }),
file_format: FileFormat { format: Format::Json,..Default::default()},
comments: "test".to_string(),
}),
)?;
expect_parse_err(
"CREATE STAGE test_stage credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'",
String::from("sql parser error: Missing URL"),
)?;
expect_parse_err(
"CREATE STAGE test_stage url='s3://load/files/' password=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'",
String::from("sql parser error: Expected end of statement, found: password"),
)?;
expect_parse_err(
"CREATE STAGE test_stage url='s4://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'",
String::from("sql parser error: Not supported storage"),
)?;
expect_parse_err(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'",
String::from("sql parser error: Invalid credentials options: unknown field `access_key`, expected `access_key_id` or `secret_access_key`"),
)?;
expect_parse_err(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' aecret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'",
String::from("sql parser error: Invalid credentials options: unknown field `aecret_access_key`, expected `access_key_id` or `secret_access_key`"),
)?;
expect_parse_err_contains(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(type=csv compression=AUTO record_delimiter=NONE) comments='test'",
String::from("unknown field `type`"),
)?;
expect_parse_err_contains(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(format=csv compression=AUTO1 record_delimiter=NONE) comments='test'",
String::from("unknown variant `auto1`"),
)?;
expect_parse_err_contains(
"CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(format=csv1 compression=AUTO record_delimiter=NONE) comments='test'",
String::from("unknown variant `csv1`"),
)?;
Ok(())
}
#[test]
fn create_table_select() -> Result<()> {
expect_parse_ok(
"CREATE TABLE foo AS SELECT a, b FROM bar",
DfStatement::CreateTable(DfCreateTable {
if_not_exists: false,
name: ObjectName(vec![Ident::new("foo")]),
columns: vec![],
engine: "FUSE".to_string(),
options: maplit::hashmap! {},
like: None,
query: Some(verified_query("SELECT a, b FROM bar")?),
}),
)?;
expect_parse_ok(
"CREATE TABLE foo (a INT) SELECT a, b FROM bar",
DfStatement::CreateTable(DfCreateTable {
if_not_exists: false,
name: ObjectName(vec![Ident::new("foo")]),
columns: vec![make_column_def("a", DataType::Int(None))],
engine: "FUSE".to_string(),
options: maplit::hashmap! {},
like: None,
query: Some(verified_query("SELECT a, b FROM bar")?),
}),
)?;
Ok(())
}
#[test]
fn optimize_table() -> Result<()> {
{
let sql = "optimize TABLE t1";
let expected = DfStatement::OptimizeTable(DfOptimizeTable {
name: ObjectName(vec![Ident::new("t1")]),
operation: Optimization::PURGE,
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "OPTIMIZE tABLE t1";
let expected = DfStatement::OptimizeTable(DfOptimizeTable {
name: ObjectName(vec![Ident::new("t1")]),
operation: Optimization::PURGE,
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "optimize TABLE t1 purge";
let expected = DfStatement::OptimizeTable(DfOptimizeTable {
name: ObjectName(vec![Ident::new("t1")]),
operation: Optimization::PURGE,
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "optimize TABLE t1 compact";
let expected = DfStatement::OptimizeTable(DfOptimizeTable {
name: ObjectName(vec![Ident::new("t1")]),
operation: Optimization::COMPACT,
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "optimize TABLE t1 all";
let expected = DfStatement::OptimizeTable(DfOptimizeTable {
name: ObjectName(vec![Ident::new("t1")]),
operation: Optimization::ALL,
});
expect_parse_ok(sql, expected)?;
}
{
let sql = "optimize TABLE t1 unacceptable";
expect_parse_err(
sql,
"sql parser error: Expected one of PURGE, COMPACT, ALL, found: unacceptable"
.to_string(),
)?;
}
{
let sql = "optimize TABLE t1 (";
expect_parse_err(
sql,
"sql parser error: Expected Nothing, or one of PURGE, COMPACT, ALL, found: ("
.to_string(),
)?;
}
Ok(())
}
#[test]
fn drop_stage_test() -> Result<()> {
expect_parse_ok(
"DROP STAGE test_stage",
DfStatement::DropStage(DfDropStage {
if_exists: false,
stage_name: "test_stage".to_string(),
}),
)?;
expect_parse_ok(
"DROP STAGE IF EXISTS test_stage",
DfStatement::DropStage(DfDropStage {
if_exists: true,
stage_name: "test_stage".to_string(),
}),
)?;
Ok(())
} | { |
SideBar.test.tsx | import React from 'react'
import { shallow, mount } from 'enzyme'
import { mocked } from 'ts-jest/utils'
import { Menu } from 'react-feather'
import { Drawer, DrawerContent, ChakraProvider } from '@chakra-ui/react'
import { SideBarElement } from '../components/SidebarElement'
import { SideBar } from '../components/SideBar'
import { goToResourceEvent } from '../events'
import { testAdmin } from '../../setupTests'
jest.mock('react-router-dom', () => ({
useHistory: () => ({
push: jest.fn(),
}),
useLocation: () => ({
pathname: 'localhost:0000/appeals/',
}),
}))
jest.mock('../events')
const goToResourceEventMock = mocked(goToResourceEvent)
const getSideBarElement = (): JSX.Element => (
<ChakraProvider>
<SideBarElement title={testAdmin.verboseName || testAdmin.name} path={`/${testAdmin.name}/`} />
</ChakraProvider>
)
test('Side bar component properly rendered', () => {
const component = shallow(
<SideBar header="Test">
<h1>Test</h1>
<h1>Test2</h1>
</SideBar>
)
expect(component.find(Menu).length).toEqual(1)
expect(component.find(Drawer).length).toEqual(1)
expect(component.find(DrawerContent).length).toEqual(1)
expect(component.find('h1').length).toEqual(2)
})
test('Side bar element component properly rendered', () => {
const component = mount(getSideBarElement())
expect(component.find('Button').length).toBe(1)
})
test('Side bar element component on click handle', () => {
const component = mount(getSideBarElement())
;(component.find('Button').props() as { onClick: Function }).onClick()
| expect(goToResourceEventMock.mock.calls.length).toBe(1)
}) |
|
views.py | from django.core.mail import send_mail
from django.shortcuts import render, get_object_or_404, redirect
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from django.contrib import messages
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.decorators import login_required
from decouple import config
from geeksurvey.settings import *
import json
from .models import Study, Profile
from .forms import *
def index(request):
if request.user.is_authenticated:
profile = Profile.objects.get(user=request.user)
context = {
'profile': profile,
}
else:
context = {}
return render(request, 'home.html', context)
def | (request):
return render(request, 'working.html')
def help(request):
return render(request, 'help.html')
@login_required
def participate(request):
all_studies = Study.objects.all()
enrolled_studies = []
completed_studies = []
for study in all_studies:
if request.user in study.completed.all():
completed_studies.append(study)
elif request.user in study.enrolled.all():
enrolled_studies.append(study)
profile = Profile.objects.get(user=request.user)
context = {
'enrolled_studies':enrolled_studies,
'completed_studies':completed_studies,
'profile': profile,
}
return render(request, 'participate/index.html', context)
@login_required
def part_discover(request):
user_profile = Profile.objects.get(user=request.user)
all_studies = Study.objects.all()
eligible_studies = []
for study in all_studies:
if user_profile.can_enroll(study):
eligible_studies.append(study)
context = {
'studies': eligible_studies,
'profile': user_profile,
}
return render(request, 'participate/discover.html', context)
@login_required
def profile(request):
profile = Profile.objects.get(user=request.user)
context={'profile':profile}
return render(request, 'profile/index.html', context)
# public profile view, accesible by url
def profile_view(request, username):
user = get_object_or_404(User, username=username)
profile = Profile.objects.get(user=user)
context = {
'user':user,
'profile':profile,
}
return render(request, 'profile/view.html', context)
@login_required
def profile_update(request):
profile = Profile.objects.get(user=request.user)
if request.method == 'POST':
u_form = UserUpdateForm(request.POST, instance=request.user)
p_form = ProfileUpdateForm(request.POST,
request.FILES,
instance=request.user.profile)
if u_form.is_valid() and p_form.is_valid():
u_form.save()
new_profile = p_form.save(commit=False)
new_profile.updated_once = True
new_profile.save()
messages.success(request, f'Your account has been updated!')
return redirect('profile') # Redirect back to profile page
else:
u_form = UserUpdateForm(instance=request.user)
p_form = ProfileUpdateForm(instance=request.user.profile)
p_form['open_source_experience'].label = "Experienced With Open Source Development?"
p_form['email_opt_in'].label = "Opt In For Email Notifications?"
context = {
'profile': profile,
'u_form': u_form,
'p_form': p_form
}
return render(request, 'profile/update.html', context)
@login_required
def research(request):
profile = Profile.objects.get(user=request.user)
# show existing studies created by the user
studies = Study.objects.filter(owner=request.user)
context = {
'profile':profile,
'studies':studies
}
return render(request, 'research/index.html', context)
| working |
config.go | package pool
import(
"time"
)
//default config
const (
defaultInitialSize = 5
defaultMaxSize = 10
defaultExpiryTime = 5*time.Second
defaultAddr string = "127.0.0.1:8080"
defaultMaxIdleNum = 3
) | //pool config
type config struct {
initialSize int
maxSize int
expiryTime time.Duration
addr string
maxIdleNum int
}
//func option
type Options func(*config)
func ConfigInitSize(size int) Options{
return func(c *config) {
c.initialSize = size
}
}
func ConfigMaxSize(size int) Options {
return func(c *config) {
c.maxSize = size
}
}
func ConfigExpiryTime(t time.Duration) Options {
return func(c *config) {
c.expiryTime = t
}
}
func ConfigAddr(addr string) Options {
return func(c *config) {
c.addr = addr
}
}
func ConfigMaxIdleNum(num int) Options {
return func(c *config) {
c.maxIdleNum = num
}
} | |
block_test.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
block::{
block_test_utils::{certificate_for_genesis, *},
Block,
},
quorum_cert::QuorumCert,
};
use aptos_crypto::hash::HashValue;
use aptos_types::{validator_signer::ValidatorSigner, validator_verifier::ValidatorVerifier};
use std::{collections::BTreeMap, sync::Arc};
#[test]
fn test_genesis() {
// Test genesis and the next block
let genesis_block = Block::make_genesis_block();
assert_eq!(genesis_block.parent_id(), HashValue::zero());
assert_ne!(genesis_block.id(), HashValue::zero());
assert!(genesis_block.is_genesis_block());
}
#[test]
fn test_nil_block() {
let genesis_block = Block::make_genesis_block();
let quorum_cert = certificate_for_genesis();
let nil_block = Block::new_nil(1, quorum_cert);
assert_eq!(
nil_block.quorum_cert().certified_block().id(),
genesis_block.id()
);
assert_eq!(nil_block.round(), 1);
assert_eq!(nil_block.timestamp_usecs(), genesis_block.timestamp_usecs());
assert_eq!(nil_block.is_nil_block(), true);
assert!(nil_block.author().is_none());
let dummy_verifier = Arc::new(ValidatorVerifier::new(BTreeMap::new()));
assert!(nil_block
.validate_signature(dummy_verifier.as_ref())
.is_ok());
assert!(nil_block.verify_well_formed().is_ok());
let signer = ValidatorSigner::random(None);
let payload = vec![];
let parent_block_info = nil_block.quorum_cert().certified_block();
let nil_block_qc = gen_test_certificate(
vec![&signer],
nil_block.gen_block_info(
parent_block_info.executed_state_id(),
parent_block_info.version(),
parent_block_info.next_epoch_state().cloned(),
),
nil_block.quorum_cert().certified_block().clone(),
None,
);
println!(
"{:?} {:?}",
nil_block.id(),
nil_block_qc.certified_block().id()
);
let nil_block_child = Block::new_proposal(
payload,
2,
aptos_infallible::duration_since_epoch().as_micros() as u64,
nil_block_qc,
&signer,
);
assert_eq!(nil_block_child.is_nil_block(), false);
assert_eq!(nil_block_child.round(), 2);
assert_eq!(nil_block_child.parent_id(), nil_block.id());
}
#[test]
fn test_block_relation() {
let signer = ValidatorSigner::random(None);
// Test genesis and the next block
let genesis_block = Block::make_genesis_block();
let quorum_cert = certificate_for_genesis();
let payload = vec![];
let next_block = Block::new_proposal(
payload.clone(),
1,
aptos_infallible::duration_since_epoch().as_micros() as u64,
quorum_cert,
&signer,
);
assert_eq!(next_block.round(), 1);
assert_eq!(genesis_block.is_parent_of(&next_block), true);
assert_eq!(
next_block.quorum_cert().certified_block().id(),
genesis_block.id()
);
assert_eq!(next_block.payload(), Some(&payload));
let cloned_block = next_block.clone();
assert_eq!(cloned_block.round(), next_block.round());
}
// Ensure that blocks that extend from the same QuorumCertificate but with different signatures
// have different block ids.
#[test]
fn test_same_qc_different_authors() | {
let signer = ValidatorSigner::random(None);
let genesis_qc = certificate_for_genesis();
let round = 1;
let payload = vec![];
let current_timestamp = aptos_infallible::duration_since_epoch().as_micros() as u64;
let block_round_1 = Block::new_proposal(
payload.clone(),
round,
current_timestamp,
genesis_qc.clone(),
&signer,
);
let signature = signer.sign(genesis_qc.ledger_info().ledger_info());
let mut ledger_info_altered = genesis_qc.ledger_info().clone();
ledger_info_altered.add_signature(signer.author(), signature);
let genesis_qc_altered = QuorumCert::new(genesis_qc.vote_data().clone(), ledger_info_altered);
let block_round_1_altered = Block::new_proposal(
payload.clone(),
round,
current_timestamp,
genesis_qc_altered,
&signer,
);
let block_round_1_same =
Block::new_proposal(payload, round, current_timestamp, genesis_qc, &signer);
assert!(block_round_1.id() != block_round_1_altered.id());
assert_eq!(block_round_1.id(), block_round_1_same.id());
} |
|
rust_proto_library.bzl | load("//rust:rust_proto_lib.bzl", "rust_proto_lib")
load("@rules_rust//rust:rust.bzl", "rust_library")
def rust_proto_library(name, **kwargs): # buildifier: disable=function-docstring
# Compile protos
name_pb = name + "_pb"
name_lib = name + "_lib"
rust_proto_compile(
name = name_pb,
**{
k: v
for (k, v) in kwargs.items()
if k in ["protos" if "protos" in kwargs else "deps"] + proto_compile_attrs.keys()
} # Forward args
)
# Create lib file
rust_proto_lib(
name = name_lib,
compilation = name_pb,
grpc = False,
)
# Create rust library
rust_library(
name = name,
srcs = [name_pb, name_lib],
deps = PROTO_DEPS + (kwargs.get("deps", []) if "protos" in kwargs else []),
visibility = kwargs.get("visibility"),
tags = kwargs.get("tags"),
)
PROTO_DEPS = [
Label("//rust/raze:protobuf"),
] | """Generated definition of rust_proto_library."""
load("//rust:rust_proto_compile.bzl", "rust_proto_compile")
load("//internal:compile.bzl", "proto_compile_attrs") |
|
analyzeRDF.py | # -*- coding: utf-8 -*-
"""
Created on Fri Apr 27 14:45:24 2012
@author: proto
"""
'''
this method classifies reactants according to the rdf information, and gives
us information on which reactants are the same, and how do they differ
(compartment etc)
'''
from sbml2bngl import SBML2BNGL
import libsbml
import collections
def getAnnotations(parser,stringKey=None):
|
def getSpeciesAnnotationStructure(parser):
model = parser.model
for species in model.getListOfSpecies():
name = species.getName()
speciesId = species.getId()
annotation = species.getAnnotation()
lista = libsbml.CVTermList()
libsbml.RDFAnnotationParser.parseRDFAnnotation(annotation,lista)
for idx in range(0,lista.getSize()):
for idx2 in range(0, lista.get(idx).getResources().getLength()):
resource = lista.get(idx).getResources().getValue(idx2)
qualifierType = lista.get(idx).getQualifierType()
qualifierDescription= bioqual[lista.get(idx).getBiologicalQualifierType()] if qualifierType \
else modqual[lista.get(idx).getModelQualifierType()]
#resource = resolveAnnotation(resource)
def getEquivalence(species,rdf_database):
'''
*species* is the species whose equivalence we will go and search
This method will search through the RDF database and look if param 'species'
is equal to any other element in the species database
'''
for element in rdf_database:
if species in rdf_database[element]:
if rdf_database[element].index(species) == 0:
return []
#return [x for x in rdf_database[element] if x != species]
#well only return the first one by default
return [rdf_database[element][0]]
return []
if __name__ == "__main__":
reader = libsbml.SBMLReader()
#BIOMD0000000272
document = reader.readSBMLFromFile('XMLExamples/curated/BIOMD0000000219.xml')
#document = reader.readSBMLFromFile('XMLExamples/simple4.xml')
model = document.getModel()
parser = SBML2BNGL(model)
annotationDictionary = getAnnotations(parser)
print annotationDictionary
#print getEquivalence('SAv_EpoR',annotationDictionary)
#print annotation
#print rules
| annotation = parser.getSpeciesAnnotation()
annotationDictionary = collections.defaultdict(set)
for key,value in annotation.items():
annotationList = []
if annotation[key] != None:
for element in annotation[key]:
for index in range(0,element.getNumAttributes()):
if not stringKey or stringKey in element.getValue(index):
annotationList.append(element.getValue(index))
if annotationList == []:
continue
annotationDictionary[key].add(tuple(annotationList))
#annotationDictionary[frozenset(annotationList)].sort(lambda x,y: cmp(len(x), len(y)))
return annotationDictionary |
package_available.go | // Copyright 2021 VMware, Inc. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package tkgpackagedatamodel
// PackageAvailableOptions includes fields for package available
type PackageAvailableOptions struct {
KubeConfig string
Namespace string
PackageName string
AllNamespaces bool
ValuesSchema bool
}
// NewPackageAvailableOptions instantiates PackageAvailableOptions
func NewPackageAvailableOptions() *PackageAvailableOptions | {
return &PackageAvailableOptions{}
} |
|
api.rs | // Types defined in this file conforms to the schema https://github.com/Shopify/shopify/blob/main/db/graphql/shopify_vm/order_discounts.graphql
// All input fields are optional as they may not be included in the input query
#![allow(dead_code)]
| pub type Boolean = bool;
pub type Float = f64;
pub type Int = i64;
pub type ID = String;
pub mod input {
use super::*;
use serde::Deserialize;
pub type UnsignedInt64 = u64;
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all(deserialize = "camelCase"))]
pub struct Input {
pub customer: Option<Customer>,
pub delivery_lines: Option<Vec<DeliveryLine>>,
pub locale: Option<String>,
pub merchandise_lines: Option<Vec<MerchandiseLine>>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all(deserialize = "camelCase"))]
pub struct Customer {
pub accepts_marketing: Option<Boolean>,
pub email: Option<String>,
pub id: Option<ID>,
pub order_count: Option<Int>,
pub phone: Option<String>,
pub tags: Option<Vec<String>>,
pub total_spent: Option<Money>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct Money {
pub currency: String,
pub subunits: UnsignedInt64,
}
#[derive(Clone, Debug, Deserialize)]
pub struct DeliveryLine {
pub destination: Option<Address>,
pub id: Option<ID>,
pub subscription: Option<Boolean>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all(deserialize = "camelCase"))]
pub struct Address {
pub city: Option<String>,
pub country_code: Option<String>,
pub phone: Option<String>,
pub po_box: Option<Boolean>,
pub province_code: Option<String>,
pub zip: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all(deserialize = "camelCase"))]
pub struct MerchandiseLine {
pub id: Option<ID>,
pub price: Option<Money>,
pub properties: Option<Vec<Properties>>,
pub quantity: Option<Int>,
pub selling_plan: Option<SellingPlan>,
pub variant: Option<Variant>,
pub weight: Option<Int>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct Properties {
pub key: Option<String>,
pub value: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SellingPlan {
pub id: Option<ID>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all(deserialize = "camelCase"))]
pub struct Variant {
pub compare_at_price: Option<Money>,
pub id: Option<ID>,
pub product: Option<Product>,
pub sku: Option<String>,
pub title: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all(deserialize = "camelCase"))]
pub struct Product {
pub gift_card: Option<Boolean>,
pub id: Option<ID>,
pub tags: Option<Vec<String>>,
pub title: Option<String>,
pub type_: Option<String>,
pub vendor: Option<String>,
}
}
use serde::Serialize;
use serde_with::skip_serializing_none;
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all(serialize = "camelCase"))]
pub struct FunctionResult {
pub discount_application_strategy: DiscountApplicationStrategy,
pub discounts: Vec<Discount>,
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all(serialize = "SCREAMING_SNAKE_CASE"))]
pub enum DiscountApplicationStrategy {
First,
Maximum,
}
#[skip_serializing_none]
#[derive(Clone, Debug, Serialize)]
pub struct Discount {
pub value: Value,
pub targets: Vec<Target>,
pub message: Option<String>,
pub conditions: Option<Vec<Condition>>,
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all(serialize = "camelCase"))]
pub enum Value {
FixedAmount(FixedAmount),
Percentage(Percentage),
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all(serialize = "camelCase"))]
pub struct FixedAmount {
pub applies_to_each_item: Option<Boolean>,
pub value: Float,
}
#[derive(Clone, Debug, Serialize)]
pub struct Percentage {
pub value: Float,
}
#[skip_serializing_none]
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all(serialize = "camelCase"))]
pub enum Target {
ProductVariant { id: ID, quantity: Option<Int> },
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all(serialize = "camelCase"))]
pub enum Condition {
#[serde(rename_all(serialize = "camelCase"))]
ProductMinimumQuantity {
ids: Vec<ID>,
minimum_quantity: Int,
target_type: ConditionTargetType,
},
#[serde(rename_all(serialize = "camelCase"))]
ProductMinimumSubtotal {
ids: Vec<ID>,
minimum_amount: Float,
target_type: ConditionTargetType,
},
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all(serialize = "SCREAMING_SNAKE_CASE"))]
pub enum ConditionTargetType {
ProductVariant,
} | |
tinkerbelltemplateconfig_test.go | package v1alpha1
import (
"testing"
"github.com/onsi/gomega"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/aws/eks-anywhere/pkg/api/v1alpha1/thirdparty/tinkerbell"
)
func TestGetTinkerbellTemplateConfig(t *testing.T) {
tests := []struct {
testName string
fileName string
wantConfigs map[string]*TinkerbellTemplateConfig
wantErr bool
}{
{
testName: "file doesn't exist",
fileName: "testdata/fake_file.yaml",
wantConfigs: nil,
wantErr: true,
},
{
testName: "not parseable file",
fileName: "testdata/not_parseable_cluster_tinkerbell.yaml",
wantConfigs: nil,
wantErr: true,
},
{
testName: "valid tinkerbell template config",
fileName: "testdata/cluster_1_21_valid_tinkerbell.yaml",
wantConfigs: map[string]*TinkerbellTemplateConfig{
"tink-test": {
TypeMeta: metav1.TypeMeta{
Kind: TinkerbellTemplateConfigKind,
APIVersion: SchemeBuilder.GroupVersion.String(),
},
ObjectMeta: metav1.ObjectMeta{
Name: "tink-test",
},
Spec: TinkerbellTemplateConfigSpec{
Template: tinkerbell.Workflow{
Version: "0.1",
Name: "tink-test",
GlobalTimeout: 6000,
ID: "",
Tasks: []tinkerbell.Task{ | "/dev:/dev",
"/dev/console:/dev/console",
"/lib/firmware:/lib/firmware:ro",
},
Actions: []tinkerbell.Action{
{
Name: "stream-image",
Image: "image2disk:v1.0.0",
Timeout: 360,
Environment: map[string]string{
"IMG_URL": "",
"DEST_DISK": "/dev/sda",
"COMPRESSED": "true",
},
},
},
},
},
},
},
},
},
wantErr: false,
},
{
testName: "multiple tinkerbell template configs",
fileName: "testdata/cluster_1_21_valid_multiple_tinkerbell_templates.yaml",
wantConfigs: map[string]*TinkerbellTemplateConfig{
"tink-test-1": {
TypeMeta: metav1.TypeMeta{
Kind: TinkerbellTemplateConfigKind,
APIVersion: SchemeBuilder.GroupVersion.String(),
},
ObjectMeta: metav1.ObjectMeta{
Name: "tink-test-1",
},
Spec: TinkerbellTemplateConfigSpec{
Template: tinkerbell.Workflow{
Version: "0.1",
Name: "tink-test-1",
GlobalTimeout: 6000,
ID: "",
Tasks: []tinkerbell.Task{
{
Name: "tink-test-1",
WorkerAddr: "{{.device_1}}",
Volumes: []string{
"/dev:/dev",
"/dev/console:/dev/console",
"/lib/firmware:/lib/firmware:ro",
},
Actions: []tinkerbell.Action{
{
Name: "stream-image",
Image: "image2disk:v1.0.0",
Timeout: 360,
Environment: map[string]string{
"IMG_URL": "",
"DEST_DISK": "/dev/sda",
"COMPRESSED": "true",
},
},
},
},
},
},
},
},
"tink-test-2": {
TypeMeta: metav1.TypeMeta{
Kind: TinkerbellTemplateConfigKind,
APIVersion: SchemeBuilder.GroupVersion.String(),
},
ObjectMeta: metav1.ObjectMeta{
Name: "tink-test-2",
},
Spec: TinkerbellTemplateConfigSpec{
Template: tinkerbell.Workflow{
Version: "0.1",
Name: "tink-test-2",
GlobalTimeout: 6000,
ID: "",
Tasks: []tinkerbell.Task{
{
Name: "tink-test-2",
WorkerAddr: "{{.device_1}}",
Volumes: []string{
"/dev:/dev",
"/dev/console:/dev/console",
"/lib/firmware:/lib/firmware:ro",
},
Actions: []tinkerbell.Action{
{
Name: "stream-image",
Image: "image2disk:v1.0.0",
Timeout: 360,
Environment: map[string]string{
"IMG_URL": "",
"DEST_DISK": "/dev/sda",
"COMPRESSED": "true",
},
},
},
},
},
},
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.testName, func(t *testing.T) {
g := gomega.NewWithT(t)
got, err := GetTinkerbellTemplateConfig(tt.fileName)
g.Expect((err != nil)).To(gomega.BeEquivalentTo(tt.wantErr))
g.Expect(got).To(gomega.BeEquivalentTo(tt.wantConfigs))
})
}
} | {
Name: "tink-test",
WorkerAddr: "{{.device_1}}",
Volumes: []string{ |
home.component.ts | import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
@Component({
selector: 'app-home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.css']
})
export class | implements OnInit {
constructor(private router: Router) { }
ngOnInit(): void {
}
loginPage(): void {
this.router.navigateByUrl("/login")
}
aboutPage(): void {
this.router.navigateByUrl("/about")
}
}
| HomeComponent |
test.py | import time
import pytest
import random
import string
from helpers.test_tools import TSV
from helpers.test_tools import assert_eq_with_retry
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)
def get_random_array():
return [random.randint(0, 1000) % 1000 for _ in range(random.randint(0, 1000))]
def get_random_string():
|
def insert_random_data(table, node, size):
data = [
'(' + ','.join((
"'2019-10-11'",
str(i),
"'" + get_random_string() + "'",
str(get_random_array()))) +
')' for i in range(size)
]
node.query("INSERT INTO {} VALUES {}".format(table, ','.join(data)))
def create_tables(name, nodes, node_settings, shard):
for i, (node, settings) in enumerate(zip(nodes, node_settings)):
node.query(
'''
CREATE TABLE {name}(date Date, id UInt32, s String, arr Array(Int32))
ENGINE = ReplicatedMergeTree('/clickhouse/tables/test/{shard}/{name}', '{repl}')
PARTITION BY toYYYYMM(date)
ORDER BY id
SETTINGS index_granularity = {index_granularity}, index_granularity_bytes = {index_granularity_bytes},
min_rows_for_wide_part = {min_rows_for_wide_part}, min_bytes_for_wide_part = {min_bytes_for_wide_part}
'''.format(name=name, shard=shard, repl=i, **settings))
def create_tables_old_format(name, nodes, shard):
for i, node in enumerate(nodes):
node.query(
'''
CREATE TABLE {name}(date Date, id UInt32, s String, arr Array(Int32))
ENGINE = ReplicatedMergeTree('/clickhouse/tables/test/{shard}/{name}', '{repl}', date, id, 64)
'''.format(name=name, shard=shard, repl=i))
node1 = cluster.add_instance('node1', config_dir="configs", with_zookeeper=True)
node2 = cluster.add_instance('node2', config_dir="configs", with_zookeeper=True)
settings_default = {'index_granularity' : 64, 'index_granularity_bytes' : 10485760, 'min_rows_for_wide_part' : 512, 'min_bytes_for_wide_part' : 0}
settings_not_adaptive = {'index_granularity' : 64, 'index_granularity_bytes' : 0, 'min_rows_for_wide_part' : 512, 'min_bytes_for_wide_part' : 0}
node3 = cluster.add_instance('node3', config_dir="configs", with_zookeeper=True)
node4 = cluster.add_instance('node4', config_dir="configs", main_configs=['configs/no_leader.xml'], with_zookeeper=True)
settings_compact = {'index_granularity' : 64, 'index_granularity_bytes' : 10485760, 'min_rows_for_wide_part' : 512, 'min_bytes_for_wide_part' : 0}
settings_wide = {'index_granularity' : 64, 'index_granularity_bytes' : 10485760, 'min_rows_for_wide_part' : 0, 'min_bytes_for_wide_part' : 0}
node5 = cluster.add_instance('node5', config_dir='configs', main_configs=['configs/compact_parts.xml'], with_zookeeper=True)
node6 = cluster.add_instance('node6', config_dir='configs', main_configs=['configs/compact_parts.xml'], with_zookeeper=True)
@pytest.fixture(scope="module")
def start_cluster():
try:
cluster.start()
create_tables('polymorphic_table', [node1, node2], [settings_default, settings_default], "shard1")
create_tables('non_adaptive_table', [node1, node2], [settings_not_adaptive, settings_default], "shard1")
create_tables('polymorphic_table_compact', [node3, node4], [settings_compact, settings_wide], "shard2")
create_tables('polymorphic_table_wide', [node3, node4], [settings_wide, settings_compact], "shard2")
create_tables_old_format('polymorphic_table', [node5, node6], "shard3")
yield cluster
finally:
cluster.shutdown()
@pytest.mark.parametrize(
('first_node', 'second_node'),
[
(node1, node2),
(node5, node6)
]
)
def test_polymorphic_parts_basics(start_cluster, first_node, second_node):
first_node.query("SYSTEM STOP MERGES")
second_node.query("SYSTEM STOP MERGES")
for size in [300, 300, 600]:
insert_random_data('polymorphic_table', first_node, size)
second_node.query("SYSTEM SYNC REPLICA polymorphic_table", timeout=20)
assert first_node.query("SELECT count() FROM polymorphic_table") == "1200\n"
assert second_node.query("SELECT count() FROM polymorphic_table") == "1200\n"
expected = "Compact\t2\nWide\t1\n"
assert TSV(first_node.query("SELECT part_type, count() FROM system.parts " \
"WHERE table = 'polymorphic_table' AND active GROUP BY part_type ORDER BY part_type")) == TSV(expected)
assert TSV(second_node.query("SELECT part_type, count() FROM system.parts " \
"WHERE table = 'polymorphic_table' AND active GROUP BY part_type ORDER BY part_type")) == TSV(expected)
first_node.query("SYSTEM START MERGES")
second_node.query("SYSTEM START MERGES")
for _ in range(40):
insert_random_data('polymorphic_table', first_node, 10)
insert_random_data('polymorphic_table', second_node, 10)
first_node.query("SYSTEM SYNC REPLICA polymorphic_table", timeout=20)
second_node.query("SYSTEM SYNC REPLICA polymorphic_table", timeout=20)
assert first_node.query("SELECT count() FROM polymorphic_table") == "2000\n"
assert second_node.query("SELECT count() FROM polymorphic_table") == "2000\n"
first_node.query("OPTIMIZE TABLE polymorphic_table FINAL")
second_node.query("SYSTEM SYNC REPLICA polymorphic_table", timeout=20)
assert first_node.query("SELECT count() FROM polymorphic_table") == "2000\n"
assert second_node.query("SELECT count() FROM polymorphic_table") == "2000\n"
assert first_node.query("SELECT DISTINCT part_type FROM system.parts WHERE table = 'polymorphic_table' AND active") == "Wide\n"
assert second_node.query("SELECT DISTINCT part_type FROM system.parts WHERE table = 'polymorphic_table' AND active") == "Wide\n"
# Check alters and mutations also work
first_node.query("ALTER TABLE polymorphic_table ADD COLUMN ss String")
first_node.query("ALTER TABLE polymorphic_table UPDATE ss = toString(id) WHERE 1")
second_node.query("SYSTEM SYNC REPLICA polymorphic_table", timeout=20)
first_node.query("SELECT count(ss) FROM polymorphic_table") == "2000\n"
first_node.query("SELECT uniqExact(ss) FROM polymorphic_table") == "600\n"
second_node.query("SELECT count(ss) FROM polymorphic_table") == "2000\n"
second_node.query("SELECT uniqExact(ss) FROM polymorphic_table") == "600\n"
# Check that follower replicas create parts of the same type, which leader has chosen at merge.
@pytest.mark.parametrize(
('table', 'part_type'),
[
('polymorphic_table_compact', 'Compact'),
('polymorphic_table_wide', 'Wide')
]
)
def test_different_part_types_on_replicas(start_cluster, table, part_type):
leader = node3
follower = node4
assert leader.query("SELECT is_leader FROM system.replicas WHERE table = '{}'".format(table)) == "1\n"
assert node4.query("SELECT is_leader FROM system.replicas WHERE table = '{}'".format(table)) == "0\n"
for _ in range(3):
insert_random_data(table, leader, 100)
leader.query("OPTIMIZE TABLE {} FINAL".format(table))
follower.query("SYSTEM SYNC REPLICA {}".format(table), timeout=20)
expected = "{}\t1\n".format(part_type)
assert TSV(leader.query("SELECT part_type, count() FROM system.parts " \
"WHERE table = '{}' AND active GROUP BY part_type ORDER BY part_type".format(table))) == TSV(expected)
assert TSV(follower.query("SELECT part_type, count() FROM system.parts " \
"WHERE table = '{}' AND active GROUP BY part_type ORDER BY part_type".format(table))) == TSV(expected)
node7 = cluster.add_instance('node7', config_dir="configs", with_zookeeper=True, image='yandex/clickhouse-server:19.17.8.54', stay_alive=True, with_installed_binary=True)
node8 = cluster.add_instance('node8', config_dir="configs", with_zookeeper=True)
settings7 = {'index_granularity' : 64, 'index_granularity_bytes' : 10485760}
settings8 = {'index_granularity' : 64, 'index_granularity_bytes' : 10485760, 'min_rows_for_wide_part' : 512, 'min_bytes_for_wide_part' : 0}
@pytest.fixture(scope="module")
def start_cluster_diff_versions():
try:
for name in ['polymorphic_table', 'polymorphic_table_2']:
cluster.start()
node7.query(
'''
CREATE TABLE {name}(date Date, id UInt32, s String, arr Array(Int32))
ENGINE = ReplicatedMergeTree('/clickhouse/tables/test/shard5/{name}', '1')
PARTITION BY toYYYYMM(date)
ORDER BY id
SETTINGS index_granularity = {index_granularity}, index_granularity_bytes = {index_granularity_bytes}
'''.format(name=name, **settings7)
)
node8.query(
'''
CREATE TABLE {name}(date Date, id UInt32, s String, arr Array(Int32))
ENGINE = ReplicatedMergeTree('/clickhouse/tables/test/shard5/{name}', '2')
PARTITION BY toYYYYMM(date)
ORDER BY id
SETTINGS index_granularity = {index_granularity}, index_granularity_bytes = {index_granularity_bytes},
min_rows_for_wide_part = {min_rows_for_wide_part}, min_bytes_for_wide_part = {min_bytes_for_wide_part}
'''.format(name=name, **settings8)
)
yield cluster
finally:
cluster.shutdown()
@pytest.mark.skip(reason="compatability is temporary broken")
def test_polymorphic_parts_diff_versions(start_cluster_diff_versions):
# Check that replication with Wide parts works between different versions.
node_old = node7
node_new = node8
insert_random_data('polymorphic_table', node7, 100)
node8.query("SYSTEM SYNC REPLICA polymorphic_table", timeout=20)
assert node8.query("SELECT count() FROM polymorphic_table") == "100\n"
assert node8.query("SELECT DISTINCT part_type FROM system.parts WHERE table = 'polymorphic_table' and active") == "Wide\n"
@pytest.mark.skip(reason="compatability is temporary broken")
def test_polymorphic_parts_diff_versions_2(start_cluster_diff_versions):
# Replication doesn't work on old version if part is created in compact format, because
# this version doesn't know anything about it. It's considered to be ok.
node_old = node7
node_new = node8
insert_random_data('polymorphic_table_2', node_new, 100)
assert node_new.query("SELECT count() FROM polymorphic_table_2") == "100\n"
assert node_old.query("SELECT count() FROM polymorphic_table_2") == "0\n"
with pytest.raises(Exception):
node_old.query("SYSTEM SYNC REPLICA polymorphic_table_2", timeout=3)
node_old.restart_with_latest_version()
node_old.query("SYSTEM SYNC REPLICA polymorphic_table_2", timeout=20)
# Works after update
assert node_old.query("SELECT count() FROM polymorphic_table_2") == "100\n"
assert node_old.query("SELECT DISTINCT part_type FROM system.parts WHERE table = 'polymorphic_table_2' and active") == "Compact\n"
def test_polymorphic_parts_non_adaptive(start_cluster):
node1.query("SYSTEM STOP MERGES")
node2.query("SYSTEM STOP MERGES")
insert_random_data('non_adaptive_table', node1, 100)
node2.query("SYSTEM SYNC REPLICA non_adaptive_table", timeout=20)
insert_random_data('non_adaptive_table', node2, 100)
node1.query("SYSTEM SYNC REPLICA non_adaptive_table", timeout=20)
assert TSV(node1.query("SELECT part_type, count() FROM system.parts " \
"WHERE table = 'non_adaptive_table' AND active GROUP BY part_type ORDER BY part_type")) == TSV("Wide\t2\n")
assert TSV(node2.query("SELECT part_type, count() FROM system.parts " \
"WHERE table = 'non_adaptive_table' AND active GROUP BY part_type ORDER BY part_type")) == TSV("Wide\t2\n")
assert node1.contains_in_log("<Warning> default.non_adaptive_table: Table can't create parts with adaptive granularity")
| length = random.randint(0, 1000)
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(length)) |
dataset.py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""SSD dataset"""
from __future__ import division
import os
import json
import xml.etree.ElementTree as et
import numpy as np
import cv2
import mindspore.dataset as de
import mindspore.dataset.vision.c_transforms as C
from mindspore.mindrecord import FileWriter
from .config import config
from .box_utils import jaccard_numpy, ssd_bboxes_encode
def _rand(a=0., b=1.):
"""Generate random."""
return np.random.rand() * (b - a) + a
def get_imageId_from_fileName(filename, id_iter):
"""Get imageID from fileName if fileName is int, else return id_iter."""
filename = os.path.splitext(filename)[0]
if filename.isdigit():
return int(filename)
return id_iter
def random_sample_crop(image, boxes):
"""Random Crop the image and boxes"""
height, width, _ = image.shape
min_iou = np.random.choice([None, 0.1, 0.3, 0.5, 0.7, 0.9])
if min_iou is None:
return image, boxes
# max trails (50)
for _ in range(50):
image_t = image
w = _rand(0.3, 1.0) * width
h = _rand(0.3, 1.0) * height
# aspect ratio constraint b/t .5 & 2
if h / w < 0.5 or h / w > 2:
continue
left = _rand() * (width - w)
top = _rand() * (height - h)
rect = np.array([int(top), int(left), int(top + h), int(left + w)])
overlap = jaccard_numpy(boxes, rect)
# dropout some boxes
drop_mask = overlap > 0
if not drop_mask.any():
continue
if overlap[drop_mask].min() < min_iou and overlap[drop_mask].max() > (min_iou + 0.2):
continue
image_t = image_t[rect[0]:rect[2], rect[1]:rect[3], :]
centers = (boxes[:, :2] + boxes[:, 2:4]) / 2.0
m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1])
m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1])
# mask in that both m1 and m2 are true
mask = m1 * m2 * drop_mask
# have any valid boxes? try again if not
if not mask.any():
continue
# take only matching gt boxes
boxes_t = boxes[mask, :].copy()
boxes_t[:, :2] = np.maximum(boxes_t[:, :2], rect[:2])
boxes_t[:, :2] -= rect[:2]
boxes_t[:, 2:4] = np.minimum(boxes_t[:, 2:4], rect[2:4])
boxes_t[:, 2:4] -= rect[:2]
return image_t, boxes_t
return image, boxes
def preprocess_fn(img_id, image, box, is_training):
"""Preprocess function for dataset."""
cv2.setNumThreads(2)
def _infer_data(image, input_shape):
img_h, img_w, _ = image.shape
input_h, input_w = input_shape
image = cv2.resize(image, (input_w, input_h))
# When the channels of image is 1
if len(image.shape) == 2:
image = np.expand_dims(image, axis=-1)
image = np.concatenate([image, image, image], axis=-1)
return img_id, image, np.array((img_h, img_w), np.float32)
def _data_aug(image, box, is_training, image_size=(300, 300)):
"""Data augmentation function."""
ih, iw, _ = image.shape
w, h = image_size
if not is_training:
return _infer_data(image, image_size)
# Random crop
box = box.astype(np.float32)
image, box = random_sample_crop(image, box)
ih, iw, _ = image.shape
# Resize image
image = cv2.resize(image, (w, h))
# Flip image or not
flip = _rand() < .5
if flip:
image = cv2.flip(image, 1, dst=None)
# When the channels of image is 1
if len(image.shape) == 2:
image = np.expand_dims(image, axis=-1)
image = np.concatenate([image, image, image], axis=-1)
box[:, [0, 2]] = box[:, [0, 2]] / ih
box[:, [1, 3]] = box[:, [1, 3]] / iw
if flip:
box[:, [1, 3]] = 1 - box[:, [3, 1]]
box, label, num_match = ssd_bboxes_encode(box)
return image, box, label, num_match
return _data_aug(image, box, is_training, image_size=config.img_shape)
def create_voc_label(is_training):
"""Get image path and annotation from VOC."""
voc_root = config.voc_root
cls_map = {name: i for i, name in enumerate(config.classes)}
sub_dir = 'train' if is_training else 'eval'
voc_dir = os.path.join(voc_root, sub_dir)
if not os.path.isdir(voc_dir):
raise ValueError(f'Cannot find {sub_dir} dataset path.')
image_dir = anno_dir = voc_dir
if os.path.isdir(os.path.join(voc_dir, 'Images')):
image_dir = os.path.join(voc_dir, 'Images')
if os.path.isdir(os.path.join(voc_dir, 'Annotations')):
anno_dir = os.path.join(voc_dir, 'Annotations')
if not is_training:
json_file = os.path.join(config.voc_root, config.voc_json)
file_dir = os.path.split(json_file)[0]
if not os.path.isdir(file_dir):
os.makedirs(file_dir)
json_dict = {"images": [], "type": "instances", "annotations": [],
"categories": []}
bnd_id = 1
image_files_dict = {}
image_anno_dict = {}
images = []
id_iter = 0
for anno_file in os.listdir(anno_dir):
print(anno_file)
if not anno_file.endswith('xml'):
continue
tree = et.parse(os.path.join(anno_dir, anno_file))
root_node = tree.getroot()
file_name = root_node.find('filename').text
img_id = get_imageId_from_fileName(file_name, id_iter)
id_iter += 1
image_path = os.path.join(image_dir, file_name)
print(image_path)
if not os.path.isfile(image_path):
print(f'Cannot find image {file_name} according to annotations.')
continue
labels = []
for obj in root_node.iter('object'):
cls_name = obj.find('name').text
if cls_name not in cls_map:
print(f'Label "{cls_name}" not in "{config.classes}"')
continue
bnd_box = obj.find('bndbox')
x_min = int(bnd_box.find('xmin').text) - 1
y_min = int(bnd_box.find('ymin').text) - 1
x_max = int(bnd_box.find('xmax').text) - 1
y_max = int(bnd_box.find('ymax').text) - 1
labels.append([y_min, x_min, y_max, x_max, cls_map[cls_name]])
if not is_training:
o_width = abs(x_max - x_min)
o_height = abs(y_max - y_min)
ann = {'area': o_width * o_height, 'iscrowd': 0, 'image_id': \
img_id, 'bbox': [x_min, y_min, o_width, o_height], \
'category_id': cls_map[cls_name], 'id': bnd_id, \
'ignore': 0, \
'segmentation': []}
json_dict['annotations'].append(ann)
bnd_id = bnd_id + 1
if labels:
images.append(img_id)
image_files_dict[img_id] = image_path
image_anno_dict[img_id] = np.array(labels)
if not is_training:
size = root_node.find("size")
width = int(size.find('width').text)
height = int(size.find('height').text)
image = {'file_name': file_name, 'height': height, 'width': width,
'id': img_id}
json_dict['images'].append(image)
if not is_training:
for cls_name, cid in cls_map.items():
cat = {'supercategory': 'none', 'id': cid, 'name': cls_name}
json_dict['categories'].append(cat)
json_fp = open(json_file, 'w')
json_str = json.dumps(json_dict)
json_fp.write(json_str)
json_fp.close()
return images, image_files_dict, image_anno_dict
def create_coco_label(is_training):
"""Get image path and annotation from COCO."""
from pycocotools.coco import COCO
coco_root = config.coco_root
data_type = config.val_data_type
if is_training:
data_type = config.train_data_type
# Classes need to train or test.
train_cls = config.classes
train_cls_dict = {}
for i, cls in enumerate(train_cls):
train_cls_dict[cls] = i
anno_json = os.path.join(coco_root, config.instances_set.format(data_type))
coco = COCO(anno_json)
classs_dict = {}
cat_ids = coco.loadCats(coco.getCatIds())
for cat in cat_ids:
classs_dict[cat["id"]] = cat["name"]
image_ids = coco.getImgIds()
images = []
image_path_dict = {}
image_anno_dict = {}
for img_id in image_ids:
image_info = coco.loadImgs(img_id)
file_name = image_info[0]["file_name"]
anno_ids = coco.getAnnIds(imgIds=img_id, iscrowd=None)
anno = coco.loadAnns(anno_ids)
image_path = os.path.join(coco_root, data_type, file_name)
annos = []
iscrowd = False
for label in anno:
bbox = label["bbox"]
class_name = classs_dict[label["category_id"]]
iscrowd = iscrowd or label["iscrowd"]
if class_name in train_cls:
x_min, x_max = bbox[0], bbox[0] + bbox[2]
y_min, y_max = bbox[1], bbox[1] + bbox[3]
annos.append(list(map(round, [y_min, x_min, y_max, x_max])) + [train_cls_dict[class_name]])
if not is_training and iscrowd:
continue
if len(annos) >= 1:
images.append(img_id)
image_path_dict[img_id] = image_path
image_anno_dict[img_id] = np.array(annos)
return images, image_path_dict, image_anno_dict
def anno_parser(annos_str):
"""Parse annotation from string to list."""
annos = []
for anno_str in annos_str:
anno = list(map(int, anno_str.strip().split(',')))
annos.append(anno)
return annos
def filter_valid_data(image_dir, anno_path):
"""Filter valid image file, which both in image_dir and anno_path."""
images = []
image_path_dict = {}
image_anno_dict = {}
if not os.path.isdir(image_dir):
raise RuntimeError("Path given is not valid.")
if not os.path.isfile(anno_path):
raise RuntimeError("Annotation file is not valid.")
with open(anno_path, "rb") as f:
lines = f.readlines()
for img_id, line in enumerate(lines):
line_str = line.decode("utf-8").strip()
line_split = str(line_str).split(' ')
file_name = line_split[0]
image_path = os.path.join(image_dir, file_name)
if os.path.isfile(image_path):
images.append(img_id)
image_path_dict[img_id] = image_path
image_anno_dict[img_id] = anno_parser(line_split[1:])
return images, image_path_dict, image_anno_dict
def voc_data_to_mindrecord(mindrecord_dir, is_training, prefix="ssd.mindrecord", file_num=8):
"""Create MindRecord file by image_dir and anno_path."""
mindrecord_path = os.path.join(mindrecord_dir, prefix)
writer = FileWriter(mindrecord_path, file_num)
images, image_path_dict, image_anno_dict = create_voc_label(is_training)
ssd_json = {
"img_id": {"type": "int32", "shape": [1]},
"image": {"type": "bytes"},
"annotation": {"type": "int32", "shape": [-1, 5]},
}
writer.add_schema(ssd_json, "ssd_json")
for img_id in images:
image_path = image_path_dict[img_id]
with open(image_path, 'rb') as f:
img = f.read()
annos = np.array(image_anno_dict[img_id], dtype=np.int32)
img_id = np.array([img_id], dtype=np.int32)
row = {"img_id": img_id, "image": img, "annotation": annos}
writer.write_raw_data([row])
writer.commit()
def data_to_mindrecord_byte_image(dataset="coco", is_training=True, prefix="ssd.mindrecord", file_num=8):
"""Create MindRecord file."""
mindrecord_dir = config.mindrecord_dir
mindrecord_path = os.path.join(mindrecord_dir, prefix)
writer = FileWriter(mindrecord_path, file_num)
if dataset == "coco":
images, image_path_dict, image_anno_dict = create_coco_label(is_training)
else:
images, image_path_dict, image_anno_dict = filter_valid_data(config.image_dir, config.anno_path)
ssd_json = {
"img_id": {"type": "int32", "shape": [1]},
"image": {"type": "bytes"},
"annotation": {"type": "int32", "shape": [-1, 5]},
}
writer.add_schema(ssd_json, "ssd_json")
for img_id in images:
image_path = image_path_dict[img_id]
with open(image_path, 'rb') as f:
img = f.read()
annos = np.array(image_anno_dict[img_id], dtype=np.int32)
img_id = np.array([img_id], dtype=np.int32)
row = {"img_id": img_id, "image": img, "annotation": annos}
writer.write_raw_data([row])
writer.commit()
def | (mindrecord_file, batch_size=32, repeat_num=10, device_num=1, rank=0,
is_training=True, num_parallel_workers=4, use_multiprocessing=True):
"""Create SSD dataset with MindDataset."""
ds = de.MindDataset(mindrecord_file, columns_list=["img_id", "image", "annotation"], num_shards=device_num,
shard_id=rank, num_parallel_workers=num_parallel_workers, shuffle=is_training)
decode = C.Decode()
ds = ds.map(operations=decode, input_columns=["image"])
change_swap_op = C.HWC2CHW()
normalize_op = C.Normalize(mean=[0.485 * 255, 0.456 * 255, 0.406 * 255],
std=[0.229 * 255, 0.224 * 255, 0.225 * 255])
color_adjust_op = C.RandomColorAdjust(brightness=0.4, contrast=0.4, saturation=0.4)
compose_map_func = (lambda img_id, image, annotation: preprocess_fn(img_id, image, annotation, is_training))
if is_training:
output_columns = ["image", "box", "label", "num_match"]
trans = [color_adjust_op, normalize_op, change_swap_op]
else:
output_columns = ["img_id", "image", "image_shape"]
trans = [normalize_op, change_swap_op]
ds = ds.map(operations=compose_map_func, input_columns=["img_id", "image", "annotation"],
output_columns=output_columns, column_order=output_columns,
python_multiprocessing=use_multiprocessing,
num_parallel_workers=num_parallel_workers)
ds = ds.map(operations=trans, input_columns=["image"], python_multiprocessing=use_multiprocessing,
num_parallel_workers=num_parallel_workers)
ds = ds.batch(batch_size, drop_remainder=True)
ds = ds.repeat(repeat_num)
return ds
def create_mindrecord(dataset="coco", prefix="ssd.mindrecord", is_training=True):
print("Start create dataset!")
# It will generate mindrecord file in config.mindrecord_dir,
# and the file name is ssd.mindrecord0, 1, ... file_num.
mindrecord_dir = config.mindrecord_dir
mindrecord_file = os.path.join(mindrecord_dir, prefix + "0")
if not os.path.exists(mindrecord_file):
if not os.path.isdir(mindrecord_dir):
os.makedirs(mindrecord_dir)
if dataset == "coco":
if os.path.isdir(config.coco_root):
print("Create Mindrecord.")
data_to_mindrecord_byte_image("coco", is_training, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("coco_root not exits.")
elif dataset == "voc":
if os.path.isdir(config.voc_root):
print("Create Mindrecord.")
voc_data_to_mindrecord(mindrecord_dir, is_training, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("voc_root not exits.")
else:
if os.path.isdir(config.image_dir) and os.path.exists(config.anno_path):
print("Create Mindrecord.")
data_to_mindrecord_byte_image("other", is_training, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("image_dir or anno_path not exits.")
return mindrecord_file
| create_ssd_dataset |
conftest.py | import pytest
#from blogcookiecutter.users.models import User
#from blogcookiecutter.users.tests.factories import UserFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir): | def user() -> User:
return UserFactory() | settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture |
copulative_suffix.rs | //! 繋辞
/// 繋辞連体形
pub enum AttributiveCopulativeSuffix {
/// 非完了態
NonPerfective,
/// 完了態
Perfective,
/// 前望態
Prospective,
}
/// 繋辞連体形
pub struct ConverbalCopulativeSuffix;
| ||
exporter.go | package burrow_exporter
import (
"context"
"sync"
"time"
"net/http"
"strconv"
log "github.com/Sirupsen/logrus"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
type BurrowExporter struct {
client *BurrowClient
metricsListenAddr string
interval int
wg sync.WaitGroup
}
var partitionStatuses = [...]string{"OK", "WARNING", "STALL", "STOP", "ERROR"}
func (be *BurrowExporter) processGroup(cluster, group string) {
status, err := be.client.ConsumerGroupLag(cluster, group)
if err != nil {
log.WithFields(log.Fields{
"err": err,
}).Error("error getting status for consumer group. returning.")
return
}
for _, partition := range status.Status.Partitions {
KafkaConsumerPartitionLag.With(prometheus.Labels{
"cluster": status.Status.Cluster,
"group": status.Status.Group,
"topic": partition.Topic,
"partition": strconv.Itoa(int(partition.Partition)),
}).Set(float64(partition.Lag))
KafkaConsumerPartitionLastOffsetLag.With(prometheus.Labels{
"cluster": status.Status.Cluster,
"group": status.Status.Group,
"topic": partition.Topic,
"partition": strconv.Itoa(int(partition.Partition)),
}).Set(float64(partition.End.Lag))
KafkaConsumerPartitionCurrentOffset.With(prometheus.Labels{
"cluster": status.Status.Cluster,
"group": status.Status.Group,
"topic": partition.Topic,
"partition": strconv.Itoa(int(partition.Partition)),
}).Set(float64(partition.End.Offset))
KafkaConsumerPartitionMaxOffset.With(prometheus.Labels{
"cluster": status.Status.Cluster,
"group": status.Status.Group,
"topic": partition.Topic,
"partition": strconv.Itoa(int(partition.Partition)),
}).Set(float64(partition.End.MaxOffset))
for _, partitionStatus := range partitionStatuses {
active := 0
if partitionStatus == partition.Status {
active = 1
}
KafkaConsumerPartitionStatus.With(prometheus.Labels{
"cluster": status.Status.Cluster,
"group": status.Status.Group,
"topic": partition.Topic,
"partition": strconv.Itoa(int(partition.Partition)),
"status": partitionStatus,
}).Set(float64(active))
}
}
KafkaConsumerTotalLag.With(prometheus.Labels{
"cluster": status.Status.Cluster,
"group": status.Status.Group,
}).Set(float64(status.Status.TotalLag))
}
func (be *BurrowExporter) processTopic(cluster, topic string) {
details, err := be.client.ClusterTopicDetails(cluster, topic)
if err != nil {
log.WithFields(log.Fields{
"err": err,
"topic": topic,
}).Error("error getting status for cluster topic. returning.")
return
}
for i, offset := range details.Offsets {
KafkaTopicPartitionOffset.With(prometheus.Labels{
"cluster": cluster,
"topic": topic,
"partition": strconv.Itoa(i),
}).Set(float64(offset))
}
}
func (be *BurrowExporter) processCluster(cluster string) {
groups, err := be.client.ListConsumers(cluster)
if err != nil {
log.WithFields(log.Fields{
"err": err,
"cluster": cluster,
}).Error("error listing consumer groups. returning.")
return
}
topics, err := be.client.ListClusterTopics(cluster)
if err != nil {
log.WithFields(log.Fields{
"err": err,
"cluster": cluster,
}).Error("error listing cluster topics. returning.")
return
}
wg := sync.WaitGroup{}
for _, group := range groups.ConsumerGroups {
wg.Add(1)
go func(g string) {
defer wg.Done()
be.processGroup(cluster, g)
}(group)
}
for _, topic := range topics.Topics {
wg.Add(1)
go func(t string) {
defer wg.Done()
be.processTopic(cluster, t)
}(topic)
}
wg.Wait()
}
func (be *BurrowExporter) startPrometheus() {
http.Handle("/metrics", promhttp.Handler())
go http.ListenAndServe(be.metricsListenAddr, nil)
}
func (be *BurrowExporter) Close() {
be.wg.Wait()
}
func (be *BurrowExporter) Start(ctx context.Context) {
be.startPrometheus()
be.wg.Add(1)
defer be.wg.Done()
be.mainLoop(ctx)
}
func (be *BurrowExporter) scrape() {
start := time.Now()
log.WithField("timestamp", start.UnixNano()).Info("Scraping burrow...")
clusters, err := be.client.ListClusters()
if err != nil {
log.WithFields(log.Fields{
"err": err,
}).Error("error listing clusters. Continuing.")
return
}
wg := sync.WaitGroup{}
for _, cluster := range clusters.Clusters {
wg.Add(1)
go func(c string) {
defer wg.Done()
be.processCluster(c)
}(cluster)
}
wg.Wait()
end := time.Now()
log.WithFields(log.Fields{
"timestamp": end.UnixNano(),
"took": end.Sub(start),
}).Info("Finished scraping burrow.")
}
func (be *BurrowExporter) mainLoop(ctx context.Context) {
timer := time.NewTicker(time.Duration(be.interval) * time.Second)
// scrape at app start without waiting for the first interval to elapse
be.scrape()
for {
select {
case <-ctx.Done():
log.Info("Shutting down exporter.")
timer.Stop()
return
case <-timer.C:
be.scrape()
}
}
}
func | (burrowUrl string, apiVersion int, metricsAddr string, interval int) *BurrowExporter {
return &BurrowExporter{
client: MakeBurrowClient(burrowUrl, apiVersion),
metricsListenAddr: metricsAddr,
interval: interval,
}
}
| MakeBurrowExporter |
mediastack.js | var util = require('util');
/*
* Kurento media Stack
*/
var kurento = require('kurento-client');
var transform = require('sdp-transform');
var config = require('./config');
var MediaStack = function () {
MediaStack.id ="bob";
MediaStack.sessions = {};
MediaStack.sip = null;
MediaStack.candidatesQueue = {};
MediaStack.kurentoClient = null;
};
MediaStack.prototype.init = function (sip){
MediaStack.sip = sip;
}
MediaStack.prototype.start = function (sessionId, ws, from,to, sdpOffer, options,callback) {
if (!sessionId) {
return callback('Cannot use undefined sessionId');
}
MediaStack.candidatesQueue[sessionId] = [];
MediaStack.sessions[sessionId]={
'ws': ws
};
console.log(sessionId +"Concurent calls : " + Object.keys(MediaStack.sessions).length +"/"+ config.maxConcurentCalls + util.inspect(MediaStack.sessions) );
if(Object.keys(MediaStack.sessions).length > config.maxConcurentCalls){
return callback('Unable to start call due to server concurrent capacity limit');
}
getKurentoClient(function(error, kurentoClient) {
if (error) {
return callback(error);
}
kurentoClient.create('MediaPipeline', function(error, pipeline) {
if (error) {
return callback(error);
}
createMediaElements(sessionId,pipeline,ws,from,to,options, function(error, webRtcEndpoint,rtpEndpoint) {
if (error) {
pipeline.release();
return callback(error);
}
console.log("Collect Candidates");
if (MediaStack.candidatesQueue[sessionId]) {
while(MediaStack.candidatesQueue[sessionId].length) {
var candidate = MediaStack.candidatesQueue[sessionId].shift();
webRtcEndpoint.addIceCandidate(candidate);
}
}
console.log("connect media element");
connectMediaElements(webRtcEndpoint,rtpEndpoint, function(error) {
if (error) {
pipeline.release();
return callback(error);
}
webRtcEndpoint.on('OnIceCandidate', function(event) {
var candidate = kurento.getComplexType('IceCandidate')(event.candidate);
ws.send(JSON.stringify({
id : 'iceCandidate',
candidate : candidate
}));
});
webRtcEndpoint.processOffer(sdpOffer, function(error, sdpAnswer) {
console.log("Sdp Answer WebRTC Endpoint " + sdpAnswer);
if (error) {
pipeline.release();
return callback(error);
}
MediaStack.sessions[sessionId].pipeline = pipeline;
MediaStack.sessions[sessionId].webRtcEndpoint = webRtcEndpoint;
MediaStack.sessions[sessionId].rtpEndpoint = rtpEndpoint;
return callback(null, sdpAnswer);
});
webRtcEndpoint.gatherCandidates(function(error) {
if (error) {
return callback(error);
}
});
});
});
});
});
}
// Recover kurentoClient for the first time.
function getKurentoClient(callback) {
console.log("Get Kurento Client ");
if (MediaStack.kurentoClient) {
console.log(" Kurento Client not null ");
return callback(null, MediaStack.kurentoClient);
}
kurento(config.kurento.ws_uri, function(error, _kurentoClient) {
if (error) {
console.log("Could not find media server at address " + config.kurento.ws_uri);
return callback("Could not find media server at address" + config.kurento.ws_uri
+ ". Exiting with error " + error);
}
MediaStack.kurentoClient = _kurentoClient;
console.log(" Call Abck Kurento CLient ");
callback(null, MediaStack.kurentoClient);
});
}
function getIPAddress() {
return config.serverPublicIP;
}
function replace_ip(sdp, ip) {
if (!ip)
ip = getIPAddress();
console.log("IP " + ip);
console.log("sdp init : "+sdp);
var sdpObject = transform.parse(sdp);
sdpObject.origin.address = ip;
sdpObject.connection.ip = ip;
var sdpResult = transform.write(sdpObject);
console.log("sdp result : "+sdpResult);
return sdpResult;
}
function mungleSDP(sdp){
mugleSdp = sdp;
var mugleSdp = sdp.replace(new RegExp("RTP/AVPF", "g"), "RTP/AVP");
var h264Payload = MediaStack.sip.getH264Payload(sdp);
mugleSdp+="a=fmtp:"+h264Payload+" profile-level-id=42801F\n";
return mugleSdp;
}
function mungleSDP2(sdp){
mugleSdp = sdp;
var mugleSdp = sdp.replace(new RegExp("RTP/AVPF", "g"), "RTP/AVP");
var h264Payload = MediaStack.sip.getH264Payload(sdp);
return mugleSdp;
}
function prettyJSON(obj) {
console.log(JSON.stringify(obj, null, 2));
}
function createMediaElements(sessionId,pipeline,ws,from,to,options, callback) {
pipeline.create('WebRtcEndpoint', function(error, webRtcEndpoint) {
if (error) {
return callback(error);
}
pipeline.create('RtpEndpoint', function(error, rtpEndpoint){
if (error) {
return callback(error);
}
createSipCall(sessionId,from+"@"+getIPAddress(),to,rtpEndpoint,options,function(error){
if (error) {
return callback(error);
}
return callback(null, webRtcEndpoint, rtpEndpoint);
});
});
});
}
function connectMediaElements(webRtcEndpoint, rtpEndpoint,callback) {
rtpEndpoint.connect(webRtcEndpoint, function(error) {
if (error) {
return callback(error);
}
webRtcEndpoint.connect(rtpEndpoint,function (error){
if (error) {
return callback(error);
}
return callback(null);
});
});
}
function reConnectMediaElements(sessionId) {
var webRtcEndpoint = MediaStack.sessions[sessionId].webRtcEndpoint;
var rtpEndpoint = MediaStack.sessions[sessionId].rtpEndpoint;
rtpEndpoint.connect(webRtcEndpoint, function(error) {
if (!error) {
webRtcEndpoint.connect(rtpEndpoint,function (error){
console.log("Reconnect Media "+sessionId);
});
}
/*
if (MediaStack.sessions[sessionId].OldRtpEndpoint){
MediaStack.sessions[sessionId].OldRtpEndpoint.release();
MediaStack.sessions[sessionId].OldRtpEndpoint=null;
}*/
});
}
function createSipCall(sessionId,from,to,rtpEndpoint,options,callback){
rtpEndpoint.generateOffer(function(error, sdpOffer) {
var modSdp = replace_ip(sdpOffer);
modSdp = mungleSDP(modSdp);
MediaStack.sip.invite (sessionId,from,to,modSdp,options,function (error,remoteSdp){
if (error){
return callback(error);
}
rtpEndpoint.processAnswer(remoteSdp,function(error){
if (error){
return callback(error);
}
// Insert EnCall timeout
setTimeout(function(){
console.log("EndCall Timeout "+sessionId);
MediaStack.sip.bye(sessionId);
MediaStack.stopFromBye(sessionId);
}
,config.maxCallSeconds*1000);
return callback(null);
});
});
});
}
MediaStack.prototype.stop = function (sessionId) {
MediaStack.sip.bye(sessionId);
if (MediaStack.sessions[sessionId]) {
var pipeline = MediaStack.sessions[sessionId].pipeline;
if (pipeline != undefined){
console.info('Releasing pipeline');
pipeline.release();
}
delete MediaStack.sessions[sessionId];
delete MediaStack.candidatesQueue[sessionId];
}
}
MediaStack.prototype.stopFromBye = function (sessionId) {
if (MediaStack.sessions[sessionId]) {
var ws = MediaStack.sessions[sessionId].ws;
if (ws != undefined){
ws.send(JSON.stringify({
id : 'stopFromBye'
}));
}
var pipeline = MediaStack.sessions[sessionId].pipeline;
if (pipeline != undefined){
console.info('Releasing pipeline');
pipeline.release();
}
delete MediaStack.sessions[sessionId];
delete MediaStack.candidatesQueue[sessionId];
}
}
MediaStack.prototype.onIceCandidate = function (sessionId, _candidate) {
var candidate = kurento.getComplexType('IceCandidate')(_candidate);
if (MediaStack.sessions[sessionId]!=undefined && MediaStack.sessions[sessionId].webRtcEndpoint!=undefined) {
console.info('Sending candidate');
var webRtcEndpoint = MediaStack.sessions[sessionId].webRtcEndpoint;
webRtcEndpoint.addIceCandidate(candidate);
}
else {
console.info('Queueing candidate');
if (!MediaStack.candidatesQueue[sessionId]) {
MediaStack.candidatesQueue[sessionId] = [];
}
MediaStack.candidatesQueue[sessionId].push(candidate);
}
}
MediaStack.prototype.sendDtmf = function (sessionId, dtmf){
MediaStack.sip.infoDtmf(sessionId,dtmf);
// reConnectMediaElements(sessionId);
}
MediaStack.prototype.reconnect = function (sessionId){
reConnectMediaElements(sessionId);
}
MediaStack.prototype.renegotiateWebRTC = function (sessionId,callback){
if (MediaStack.sessions[sessionId] && MediaStack.sessions[sessionId].pipeline){
var pipeline = MediaStack.sessions[sessionId].pipeline;
MediaStack.sessions[sessionId].webRtcEndpoint.release();
MediaStack.sessions[sessionId].renegotiated=true;
MediaStack.candidatesQueue[sessionId]=[];
pipeline.create('WebRtcEndpoint', function(error, webRtcEndpoint){
if (error) {
return callback(error);
}
MediaStack.sessions[sessionId].webRtcEndpoint = webRtcEndpoint;
webRtcEndpoint.generateOffer(function(error,sdpOffer) {
if (error){
console.log("SdpOffer not accepted by kurento");
console.log(error);
return callback(error);
}
var ws = MediaStack.sessions[sessionId].ws;
if (ws != undefined){
ws.send(JSON.stringify({
id : 'renegotiateWebRTC',
sdp : sdpOffer
}));
return callback();
}
});
});
};
}
MediaStack.prototype.renegotiateResponse = function (sessionId,sdp){
if (MediaStack.sessions[sessionId] && MediaStack.sessions[sessionId].pipeline && MediaStack.sessions[sessionId].webRtcEndpoint){
var webRtcEndpoint = MediaStack.sessions[sessionId].webRtcEndpoint;
var pipeline = MediaStack.sessions[sessionId].pipeline;
console.log("Collect Candidates");
if (MediaStack.candidatesQueue[sessionId]) {
while(MediaStack.candidatesQueue[sessionId].length) {
var candidate = MediaStack.candidatesQueue[sessionId].shift();
webRtcEndpoint.addIceCandidate(candidate);
}
}
var ws = MediaStack.sessions[sessionId].ws;
webRtcEndpoint.on('OnIceCandidate', function(event) {
var candidate = kurento.getComplexType('IceCandidate')(event.candidate);
ws.send(JSON.stringify({
id : 'iceCandidate',
candidate : candidate
}));
});
webRtcEndpoint.processAnswer(sdp, function(error) {
MediaStack.sessions[sessionId].renegotiated=false;
if (error) {
pipeline.release();
console.log("ProcessAnswer Error"+error);
}
MediaStack.sip.reponseToReInvite(sessionId);
// reConnectMediaElements(sessionId);
return;
});
webRtcEndpoint.gatherCandidates(function(error) {
if (error) {
console.log("gatherCandidates Error"+error);
}
});
}
}
function | (webRtcEndpoint,rtpEndpoint,callback){
rtpEndpoint.disconnect(webRtcEndpoint, function(error) {
if (error)
callback(error)
webRtcEndpoint.disconnect(rtpEndpoint,function (error){
if (error)
callback(error);
rtpEndpoint.release((error)=>{
if (error)
callback(error);
callback(null);
});
});
});
}
MediaStack.prototype.renegotiateRTP = function (sessionId, remoteSdp,callback){
if (MediaStack.sessions[sessionId] && MediaStack.sessions[sessionId].pipeline){
var pipeline = MediaStack.sessions[sessionId].pipeline;
disconnectElement(MediaStack.sessions[sessionId].webRtcEndpoint,MediaStack.sessions[sessionId].rtpEndpoint,function(){
//MediaStack.sessions[sessionId].rtpEndpoint.release();
pipeline.create('RtpEndpoint', function(error, rtpEndpoint){
if (error) {
return callback(error);
}
rtpEndpoint.processOffer(remoteSdp,function(error,sdpOffer) {
if (error){
console.log("SdpOffer not accepted by kurento");
console.log(error);
return callback(error);
}
var modSdp = replace_ip(sdpOffer);
modSdp = mungleSDP2(modSdp);
MediaStack.sessions[sessionId].rtpEndpoint = rtpEndpoint;
return callback(null,modSdp);
});
});
});
}
}
module.exports = new MediaStack();
| disconnectElement |
main.go | package main
import (
"crypto/tls"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"net/http"
"github.com/mattbaird/jsonpatch"
"k8s.io/api/admission/v1beta1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/klog"
)
var scheme = runtime.NewScheme()
var codecs = serializer.NewCodecFactory(scheme)
// Config contains the server (the webhook) cert and key.
type Config struct {
CertFile string
KeyFile string
}
type admitFunc func(v1beta1.AdmissionReview) *v1beta1.AdmissionResponse
func configTLS(config Config) *tls.Config |
func apply(ar v1beta1.AdmissionReview) *v1beta1.AdmissionResponse {
klog.Info("Entering apply in ExtendedResourceToleration webhook")
podResource := metav1.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"}
if ar.Request.Resource != podResource {
klog.Errorf("expect resource to be %s", podResource)
return nil
}
raw := ar.Request.Object.Raw
pod := corev1.Pod{}
deserializer := codecs.UniversalDeserializer()
if _, _, err := deserializer.Decode(raw, nil, &pod); err != nil {
klog.Error(err)
return toAdmissionResponse(err)
}
reviewResponse := v1beta1.AdmissionResponse{}
reviewResponse.Allowed = true
podCopy := pod.DeepCopy()
klog.V(1).Infof("Examining pod: %v\n", pod.GetName())
// Ignore if exclusion annotation is present
if podAnnotations := pod.GetAnnotations(); podAnnotations != nil {
klog.Info(fmt.Sprintf("Looking at pod annotations, found: %v", podAnnotations))
if _, isMirrorPod := podAnnotations[corev1.MirrorPodAnnotationKey]; isMirrorPod {
return &reviewResponse
}
}
// find resource requests and add toleration
// Copied from : https://github.com/kubernetes/kubernetes/blob/master/plugin/pkg/admission/extendedresourcetoleration/admission.go
resources := sets.String{}
for _, container := range pod.Spec.Containers {
for resourceName := range container.Resources.Requests {
if isExtendedResourceName(resourceName) {
resources.Insert(string(resourceName))
}
}
}
for _, container := range pod.Spec.InitContainers {
for resourceName := range container.Resources.Requests {
if isExtendedResourceName(resourceName) {
resources.Insert(string(resourceName))
}
}
}
if resources.Len() == 0 {
return &reviewResponse
}
// Doing .List() so that we get a stable sorted list.
// This allows us to test adding tolerations for multiple extended resources.
for _, resource := range resources.List() {
if !addOrUpdateTolerationInPod(&pod, &corev1.Toleration{
Key: resource,
Operator: corev1.TolerationOpExists,
Effect: corev1.TaintEffectNoSchedule,
}) {
return &reviewResponse
}
klog.Infof("applied extendedresourcetoleration: %s successfully on Pod: %+v ", resource, pod.GetName())
}
podCopyJSON, err := json.Marshal(podCopy)
if err != nil {
return toAdmissionResponse(err)
}
podJSON, err := json.Marshal(pod)
if err != nil {
return toAdmissionResponse(err)
}
klog.Infof("PodCopy json: %s ", podCopyJSON)
klog.Infof("pod json: %s ", podJSON)
jsonPatch, err := jsonpatch.CreatePatch(podCopyJSON, podJSON)
if err != nil {
klog.Infof("patch error: %+v", err)
return toAdmissionResponse(err)
}
jsonPatchBytes, _ := json.Marshal(jsonPatch)
klog.Infof("jsonPatch json: %s", jsonPatchBytes)
reviewResponse.Patch = jsonPatchBytes
pt := v1beta1.PatchTypeJSONPatch
reviewResponse.PatchType = &pt
return &reviewResponse
}
func serve(w http.ResponseWriter, r *http.Request, admit admitFunc) {
var body []byte
if r.Body != nil {
if data, err := ioutil.ReadAll(r.Body); err == nil {
body = data
}
}
// verify the content type is accurate
contentType := r.Header.Get("Content-Type")
if contentType != "application/json" {
klog.Errorf("contentType=%s, expect application/json", contentType)
return
}
var reviewResponse *v1beta1.AdmissionResponse
ar := v1beta1.AdmissionReview{}
deserializer := codecs.UniversalDeserializer()
if _, _, err := deserializer.Decode(body, nil, &ar); err != nil {
klog.Error(err)
reviewResponse = toAdmissionResponse(err)
} else {
reviewResponse = admit(ar)
}
response := v1beta1.AdmissionReview{}
if reviewResponse != nil {
response.Response = reviewResponse
response.Response.UID = ar.Request.UID
}
// reset the Object and OldObject, they are not needed in a response.
ar.Request.Object = runtime.RawExtension{}
ar.Request.OldObject = runtime.RawExtension{}
resp, err := json.Marshal(response)
if err != nil {
klog.Error(err)
}
if _, err := w.Write(resp); err != nil {
klog.Error(err)
}
}
func serveERT(w http.ResponseWriter, r *http.Request) {
serve(w, r, apply)
}
func main() {
var config Config
flag.StringVar(&config.CertFile, "tlsCertFile", "/etc/certs/cert.pem", "File containing the x509 Certificate for HTTPS.")
flag.StringVar(&config.KeyFile, "tlsKeyFile", "/etc/certs/key.pem", "File containing the x509 private key to --tlsCertFile.")
flag.Parse()
klog.InitFlags(nil)
http.HandleFunc("/apply-ert", serveERT)
server := &http.Server{
Addr: ":443",
TLSConfig: configTLS(config),
}
klog.Info(fmt.Sprintf("About to start serving webhooks: %#v", server))
server.ListenAndServeTLS("", "")
}
| {
sCert, err := tls.LoadX509KeyPair(config.CertFile, config.KeyFile)
if err != nil {
klog.Fatalf("config=%#v Error: %v", config, err)
}
return &tls.Config{
Certificates: []tls.Certificate{sCert},
// TODO: uses mutual tls after we agree on what cert the apiserver should use.
//ClientAuth: tls.RequireAndVerifyClientCert,
}
} |
reach_plan_ad_length.pb.go | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.13.0
// source: google/ads/googleads/v5/enums/reach_plan_ad_length.proto
package enums
import (
reflect "reflect"
sync "sync"
proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
// Possible ad length values.
type ReachPlanAdLengthEnum_ReachPlanAdLength int32
const (
// Not specified.
ReachPlanAdLengthEnum_UNSPECIFIED ReachPlanAdLengthEnum_ReachPlanAdLength = 0
// The value is unknown in this version.
ReachPlanAdLengthEnum_UNKNOWN ReachPlanAdLengthEnum_ReachPlanAdLength = 1
// 6 seconds long ad.
ReachPlanAdLengthEnum_SIX_SECONDS ReachPlanAdLengthEnum_ReachPlanAdLength = 2
// 15 or 20 seconds long ad.
ReachPlanAdLengthEnum_FIFTEEN_OR_TWENTY_SECONDS ReachPlanAdLengthEnum_ReachPlanAdLength = 3
// More than 20 seconds long ad.
ReachPlanAdLengthEnum_TWENTY_SECONDS_OR_MORE ReachPlanAdLengthEnum_ReachPlanAdLength = 4
)
// Enum value maps for ReachPlanAdLengthEnum_ReachPlanAdLength.
var (
ReachPlanAdLengthEnum_ReachPlanAdLength_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "SIX_SECONDS",
3: "FIFTEEN_OR_TWENTY_SECONDS",
4: "TWENTY_SECONDS_OR_MORE",
}
ReachPlanAdLengthEnum_ReachPlanAdLength_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"SIX_SECONDS": 2,
"FIFTEEN_OR_TWENTY_SECONDS": 3,
"TWENTY_SECONDS_OR_MORE": 4,
}
)
func (x ReachPlanAdLengthEnum_ReachPlanAdLength) Enum() *ReachPlanAdLengthEnum_ReachPlanAdLength {
p := new(ReachPlanAdLengthEnum_ReachPlanAdLength)
*p = x
return p
}
func (x ReachPlanAdLengthEnum_ReachPlanAdLength) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (ReachPlanAdLengthEnum_ReachPlanAdLength) Descriptor() protoreflect.EnumDescriptor {
return file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_enumTypes[0].Descriptor()
}
func (ReachPlanAdLengthEnum_ReachPlanAdLength) Type() protoreflect.EnumType {
return &file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_enumTypes[0]
}
func (x ReachPlanAdLengthEnum_ReachPlanAdLength) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use ReachPlanAdLengthEnum_ReachPlanAdLength.Descriptor instead.
func (ReachPlanAdLengthEnum_ReachPlanAdLength) EnumDescriptor() ([]byte, []int) {
return file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescGZIP(), []int{0, 0}
}
// Message describing length of a plannable video ad.
type ReachPlanAdLengthEnum struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *ReachPlanAdLengthEnum) Reset() {
*x = ReachPlanAdLengthEnum{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ReachPlanAdLengthEnum) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ReachPlanAdLengthEnum) ProtoMessage() {}
func (x *ReachPlanAdLengthEnum) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ReachPlanAdLengthEnum.ProtoReflect.Descriptor instead.
func (*ReachPlanAdLengthEnum) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescGZIP(), []int{0}
}
var File_google_ads_googleads_v5_enums_reach_plan_ad_length_proto protoreflect.FileDescriptor
var file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDesc = []byte{
0x0a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x35, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2f,
0x72, 0x65, 0x61, 0x63, 0x68, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x61, 0x64, 0x5f, 0x6c, 0x65,
0x6e, 0x67, 0x74, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1d, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x35, 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x96, 0x01, 0x0a, 0x15, 0x52, 0x65, 0x61, 0x63,
0x68, 0x50, 0x6c, 0x61, 0x6e, 0x41, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x45, 0x6e, 0x75,
0x6d, 0x22, 0x7d, 0x0a, 0x11, 0x52, 0x65, 0x61, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x41, 0x64,
0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43,
0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f,
0x57, 0x4e, 0x10, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x49, 0x58, 0x5f, 0x53, 0x45, 0x43, 0x4f,
0x4e, 0x44, 0x53, 0x10, 0x02, 0x12, 0x1d, 0x0a, 0x19, 0x46, 0x49, 0x46, 0x54, 0x45, 0x45, 0x4e,
0x5f, 0x4f, 0x52, 0x5f, 0x54, 0x57, 0x45, 0x4e, 0x54, 0x59, 0x5f, 0x53, 0x45, 0x43, 0x4f, 0x4e,
0x44, 0x53, 0x10, 0x03, 0x12, 0x1a, 0x0a, 0x16, 0x54, 0x57, 0x45, 0x4e, 0x54, 0x59, 0x5f, 0x53,
0x45, 0x43, 0x4f, 0x4e, 0x44, 0x53, 0x5f, 0x4f, 0x52, 0x5f, 0x4d, 0x4f, 0x52, 0x45, 0x10, 0x04,
0x42, 0xeb, 0x01, 0x0a, 0x21, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x35,
0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x42, 0x16, 0x52, 0x65, 0x61, 0x63, 0x68, 0x50, 0x6c, 0x61,
0x6e, 0x41, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01,
0x5a, 0x42, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e,
0x6f, 0x72, 0x67, 0x2f, 0x67, 0x65, 0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x35, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x3b, 0x65,
0x6e, 0x75, 0x6d, 0x73, 0xa2, 0x02, 0x03, 0x47, 0x41, 0x41, 0xaa, 0x02, 0x1d, 0x47, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x41, 0x64, 0x73, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64,
0x73, 0x2e, 0x56, 0x35, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x73, 0xca, 0x02, 0x1d, 0x47, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x5c, 0x41, 0x64, 0x73, 0x5c, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64,
0x73, 0x5c, 0x56, 0x35, 0x5c, 0x45, 0x6e, 0x75, 0x6d, 0x73, 0xea, 0x02, 0x21, 0x47, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x41, 0x64, 0x73, 0x3a, 0x3a, 0x56, 0x35, 0x3a, 0x3a, 0x45, 0x6e, 0x75, 0x6d, 0x73, 0x62, 0x06,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescOnce sync.Once
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescData = file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDesc
)
func file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescGZIP() []byte {
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescOnce.Do(func() {
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescData)
})
return file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDescData
}
var file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_goTypes = []interface{}{
(ReachPlanAdLengthEnum_ReachPlanAdLength)(0), // 0: google.ads.googleads.v5.enums.ReachPlanAdLengthEnum.ReachPlanAdLength
(*ReachPlanAdLengthEnum)(nil), // 1: google.ads.googleads.v5.enums.ReachPlanAdLengthEnum
}
var file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_init() }
func file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_init() | {
if File_google_ads_googleads_v5_enums_reach_plan_ad_length_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ReachPlanAdLengthEnum); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDesc,
NumEnums: 1,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_goTypes,
DependencyIndexes: file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_depIdxs,
EnumInfos: file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_enumTypes,
MessageInfos: file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_msgTypes,
}.Build()
File_google_ads_googleads_v5_enums_reach_plan_ad_length_proto = out.File
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_rawDesc = nil
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_goTypes = nil
file_google_ads_googleads_v5_enums_reach_plan_ad_length_proto_depIdxs = nil
} |
|
signin.js | import React, { Component } from 'react';
import SignInForm from './signinForm';
import PageTitle from '../pageTitle';
import {connect} from "react-redux";
import * as actions from "../../actions";
class SignIn extends Component {
componentDidMount() {
this.props.setHeaderLinks([]);
this.props.setNavbarLinks([]);
}
onSubmit = (fields) => {
console.log(fields);
}
render() { | </div>
)
}
}
SignIn = connect(null, actions)(SignIn);
export default SignIn; | return (
<div className='sign-in'>
<PageTitle className="sign-in__page-title" title="Login"/>
<SignInForm onSubmit={this.onSubmit} className='sign-in__form' /> |
configuration.test.ts | // Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import assert from 'assert';
import path from 'path';
import { Configuration } from '../src/configuration';
describe('Configuration', function () {
const makeConfiguration = (files = ['base.json']) => {
const configuration = new Configuration();
files.forEach((file) => configuration.file(path.join(__dirname, 'settings', file)));
configuration.argv(['--strings.argv', 'argv']);
process.env['strings__env'] = 'env';
configuration.env();
return configuration;
};
describe('flags', function () {
it('works', async function () {
const flags = makeConfiguration().bind(['flags']);
assert.strictEqual(flags.bool(['on']), true);
assert.strictEqual(flags.bool(['off']), false);
assert.throws(() => flags.bool(['bad']));
});
});
describe('strings', function () {
it('works', async function () {
const strings = makeConfiguration().bind(['strings']);
assert.strictEqual(strings.string(['ok']), 'howdy');
assert.throws(() => strings.string(['bad']));
assert.strictEqual(strings.string(['unset']), undefined);
strings.set(['unset'], 'set'); | assert.strictEqual(strings.string(['env']), 'env');
assert.strictEqual(strings.string(['argv']), 'argv');
});
});
describe('nesting and layering', function () {
it('works', async function () {
const base = makeConfiguration();
assert.strictEqual(base.get(['root', 'key']), 'base');
const layered = makeConfiguration(['layer.json', 'base.json']);
assert.strictEqual(layered.get(['root', 'key']), 'layer');
});
});
}); | assert.strictEqual(strings.string(['unset']), 'set');
|
removeDuplicateItems.ts | import type { ISanivaliDef } from '_src/types';
export type RemoveDuplicateItemsParam =
| boolean
| string
| ((x: any) => string)
| undefined;
export type RemoveDuplicateItemsRuleItem =
| 'removeDuplicateItems'
| ['removeDuplicateItems', RemoveDuplicateItemsParam?];
export const removeDuplicateItemsDef: ISanivaliDef = {
sanitizer: (getKey?: RemoveDuplicateItemsParam) => {
if (getKey === false) return null;
if (typeof getKey === 'string') {
return (v: any[]) =>
v.filter(function (this: Record<string, 1>, x): boolean {
const key = x[getKey];
if (this[key] === 1) {
return false;
}
this[key] = 1;
return true;
}, {});
}
if (typeof getKey === 'function') {
return (v: any[]) =>
v.filter(function (this: Record<string, 1>, x): boolean {
const key = getKey(x);
if (this[key] === 1) {
return false;
}
this[key] = 1;
return true;
}, {});
} | return (v: any[]) =>
v.filter(function (this: Record<string, 1>, x): boolean {
const key = x;
if (this[key] === 1) {
return false;
}
this[key] = 1;
return true;
}, {});
},
}; | |
engine_test.go | package engine
import (
"errors"
"io/ioutil"
"os"
"testing"
"github.com/thrasher-corp/gocryptotrader/config"
)
func TestLoadConfigWithSettings(t *testing.T) {
empty := ""
somePath := "somePath"
// Clean up after the tests
defer os.RemoveAll(somePath)
tests := []struct {
name string
flags []string
settings *Settings
want *string
wantErr bool
}{
{
name: "invalid file",
settings: &Settings{
ConfigFile: "nonExistent.json",
},
wantErr: true,
},
{
name: "test file",
settings: &Settings{
ConfigFile: config.TestFile,
EnableDryRun: true,
},
want: &empty,
wantErr: false,
},
{
name: "data dir in settings overrides config data dir",
flags: []string{"datadir"},
settings: &Settings{
ConfigFile: config.TestFile,
DataDir: somePath,
EnableDryRun: true,
},
want: &somePath,
wantErr: false,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
// prepare the 'flags'
flagSet := make(map[string]bool)
for _, v := range tt.flags {
flagSet[v] = true
}
// Run the test
got, err := loadConfigWithSettings(tt.settings, flagSet)
if (err != nil) != tt.wantErr {
t.Errorf("loadConfigWithSettings() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != nil || tt.want != nil {
if (got == nil && tt.want != nil) || (got != nil && tt.want == nil) {
t.Errorf("loadConfigWithSettings() = is nil %v, want nil %v", got == nil, tt.want == nil)
} else if got.DataDirectory != *tt.want {
t.Errorf("loadConfigWithSettings() = %v, want %v", got.DataDirectory, *tt.want)
}
}
})
}
}
func TestStartStopDoesNotCausePanic(t *testing.T) |
var enableExperimentalTest = false
func TestStartStopTwoDoesNotCausePanic(t *testing.T) {
t.Parallel()
if !enableExperimentalTest {
t.Skip("test is functional, however does not need to be included in go test runs")
}
tempDir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatalf("Problem creating temp dir at %s: %s\n", tempDir, err)
}
tempDir2, err := ioutil.TempDir("", "")
if err != nil {
t.Fatalf("Problem creating temp dir at %s: %s\n", tempDir, err)
}
defer func() {
err = os.RemoveAll(tempDir)
if err != nil {
t.Error(err)
}
err = os.RemoveAll(tempDir2)
if err != nil {
t.Error(err)
}
}()
botOne, err := NewFromSettings(&Settings{
ConfigFile: config.TestFile,
EnableDryRun: true,
DataDir: tempDir,
}, nil)
if err != nil {
t.Error(err)
}
botOne.Settings.EnableGRPCProxy = false
botTwo, err := NewFromSettings(&Settings{
ConfigFile: config.TestFile,
EnableDryRun: true,
DataDir: tempDir2,
}, nil)
if err != nil {
t.Error(err)
}
botTwo.Settings.EnableGRPCProxy = false
if err = botOne.Start(); err != nil {
t.Error(err)
}
if err = botTwo.Start(); err != nil {
t.Error(err)
}
botOne.Stop()
botTwo.Stop()
}
func TestCheckExchangeExists(t *testing.T) {
e := CreateTestBot(t)
if e.GetExchangeByName(testExchange) == nil {
t.Errorf("TestGetExchangeExists: Unable to find exchange")
}
if e.GetExchangeByName("Asdsad") != nil {
t.Errorf("TestGetExchangeExists: Non-existent exchange found")
}
}
func TestGetExchangeByName(t *testing.T) {
e := CreateTestBot(t)
exch := e.GetExchangeByName(testExchange)
if exch == nil {
t.Errorf("TestGetExchangeByName: Failed to get exchange")
}
if !exch.IsEnabled() {
t.Errorf("TestGetExchangeByName: Unexpected result")
}
exch.SetEnabled(false)
bfx := e.GetExchangeByName(testExchange)
if bfx.IsEnabled() {
t.Errorf("TestGetExchangeByName: Unexpected result")
}
if exch.GetName() != testExchange {
t.Errorf("TestGetExchangeByName: Unexpected result")
}
exch = e.GetExchangeByName("Asdasd")
if exch != nil {
t.Errorf("TestGetExchangeByName: Non-existent exchange found")
}
}
func TestUnloadExchange(t *testing.T) {
e := CreateTestBot(t)
err := e.UnloadExchange("asdf")
if !errors.Is(err, config.ErrExchangeNotFound) {
t.Errorf("error '%v', expected '%v'", err, config.ErrExchangeNotFound)
}
err = e.UnloadExchange(testExchange)
if err != nil {
t.Errorf("TestUnloadExchange: Failed to get exchange. %s",
err)
}
err = e.UnloadExchange(testExchange)
if !errors.Is(err, ErrNoExchangesLoaded) {
t.Errorf("error '%v', expected '%v'", err, ErrNoExchangesLoaded)
}
}
func TestDryRunParamInteraction(t *testing.T) {
bot := CreateTestBot(t)
// Simulate overiding default settings and ensure that enabling exchange
// verbose mode will be set on Bitfinex
var err error
if err = bot.UnloadExchange(testExchange); err != nil {
t.Error(err)
}
bot.Settings.CheckParamInteraction = false
bot.Settings.EnableExchangeVerbose = false
if err = bot.LoadExchange(testExchange, false, nil); err != nil {
t.Error(err)
}
exchCfg, err := bot.Config.GetExchangeConfig(testExchange)
if err != nil {
t.Error(err)
}
if exchCfg.Verbose {
t.Error("verbose should have been disabled")
}
if err = bot.UnloadExchange(testExchange); err != nil {
t.Error(err)
}
// Now set dryrun mode to true,
// enable exchange verbose mode and verify that verbose mode
// will be set on Bitfinex
bot.Settings.EnableDryRun = true
bot.Settings.CheckParamInteraction = true
bot.Settings.EnableExchangeVerbose = true
if err = bot.LoadExchange(testExchange, false, nil); err != nil {
t.Error(err)
}
exchCfg, err = bot.Config.GetExchangeConfig(testExchange)
if err != nil {
t.Error(err)
}
if !bot.Settings.EnableDryRun ||
!exchCfg.Verbose {
t.Error("dryrun should be true and verbose should be true")
}
}
| {
t.Parallel()
botOne, err := NewFromSettings(&Settings{
ConfigFile: config.TestFile,
EnableDryRun: true,
}, nil)
if err != nil {
t.Error(err)
}
botOne.Settings.EnableGRPCProxy = false
if err = botOne.Start(); err != nil {
t.Error(err)
}
botOne.Stop()
} |
jsonrpc.go | package daemons
import "encoding/json"
type JsonRpc interface {
GetJsonRpcId() int64
Json() []byte
}
type JsonRpcResponse struct {
Id interface{} `json:"id"` // be int64 or null
Result json.RawMessage `json:"result,omitempty"`
Error *JsonRpcError `json:"error,omitempty"`
}
func (j *JsonRpcResponse) GetJsonRpcId() int64 {
if j.Id == nil {
return 0
}
return j.Id.(int64)
}
func (j *JsonRpcResponse) Json() []byte {
raw, _ := json.Marshal(j)
return raw
}
| Id interface{} `json:"id"`
Method string `json:"method"`
Params []json.RawMessage `json:"params"`
}
func (j *JsonRpcRequest) GetJsonRpcId() int64 {
if j.Id == nil {
return 0
}
return j.Id.(int64)
}
func (j *JsonRpcRequest) Json() []byte {
raw, _ := json.Marshal(j)
return raw
}
type JsonRpcError struct {
Code int `json:"code"`
Message string `json:"message"`
}
//type Method string
//
//const (
// MethodSubmitBlock Method = "getsubmitblock"
// MethodGetBlockTemplate Method = "getblocktemplate"
// MethodGetBlock Method = "getblock"
// MethodGetBalance Method = "getbalance"
// MethodValidateAddress Method = "validateaddress"
// ) | type JsonRpcRequest struct { |
gateway_api_grpc.pb.go | // Code generated by protoc-gen-go-grpc. DO NOT EDIT.
package protov1
import (
context "context"
empty "github.com/golang/protobuf/ptypes/empty"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// GatewayAPIClient is the client API for GatewayAPI service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type GatewayAPIClient interface {
// Reachability test.
Ping(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*PingResponse, error)
}
type gatewayAPIClient struct {
cc grpc.ClientConnInterface
}
func | (cc grpc.ClientConnInterface) GatewayAPIClient {
return &gatewayAPIClient{cc}
}
func (c *gatewayAPIClient) Ping(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*PingResponse, error) {
out := new(PingResponse)
err := c.cc.Invoke(ctx, "/com.affinity.gateway.proto.v1.GatewayAPI/Ping", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// GatewayAPIServer is the server API for GatewayAPI service.
// All implementations must embed UnimplementedGatewayAPIServer
// for forward compatibility
type GatewayAPIServer interface {
// Reachability test.
Ping(context.Context, *empty.Empty) (*PingResponse, error)
mustEmbedUnimplementedGatewayAPIServer()
}
// UnimplementedGatewayAPIServer must be embedded to have forward compatible implementations.
type UnimplementedGatewayAPIServer struct {
}
func (*UnimplementedGatewayAPIServer) Ping(context.Context, *empty.Empty) (*PingResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Ping not implemented")
}
func (*UnimplementedGatewayAPIServer) mustEmbedUnimplementedGatewayAPIServer() {}
func RegisterGatewayAPIServer(s *grpc.Server, srv GatewayAPIServer) {
s.RegisterService(&_GatewayAPI_serviceDesc, srv)
}
func _GatewayAPI_Ping_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(empty.Empty)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(GatewayAPIServer).Ping(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/com.affinity.gateway.proto.v1.GatewayAPI/Ping",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(GatewayAPIServer).Ping(ctx, req.(*empty.Empty))
}
return interceptor(ctx, in, info, handler)
}
var _GatewayAPI_serviceDesc = grpc.ServiceDesc{
ServiceName: "com.affinity.gateway.proto.v1.GatewayAPI",
HandlerType: (*GatewayAPIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Ping",
Handler: _GatewayAPI_Ping_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "v1/gateway_api.proto",
}
| NewGatewayAPIClient |
simulated.py | from qfengine.exchange.exchange import Exchange
import datetime
class SimulatedExchange(Exchange):
"""
The SimulatedExchange class is used to model a live
trading venue.
It exposes methods to inform a client class intance of
when the exchange is open to determine when orders can
be executed.
Parameters
----------
start_dt : `pd.Timestamp`
The starting time of the simulated exchange.
"""
def __init__(self, start_dt):
self.start_dt = start_dt
# TODO: Eliminate hardcoding of NYSE
# TODO: Make these timezone-aware
self.open_dt = datetime.time(9, 30)
self.close_dt = datetime.time(16, 00)
def is_open_at_datetime(self, dt):
| """
Check if the SimulatedExchange is open at a particular
provided pandas Timestamp.
This logic is simplistic in that it only checks whether
the provided time is between market hours on a weekday.
There is no historical calendar handling or concept of
exchange holidays.
Parameters
----------
dt : `pd.Timestamp`
The timestamp to check for open market hours.
Returns
-------
`Boolean`
Whether the exchange is open at this timestamp.
"""
if dt.weekday() > 4: # NOT OPEN ON WEEKEND
return False
return (
(self.open_dt <= dt.time())
and
(dt.time() < self.close_dt)
) |
|
meta.py | from abc import ABCMeta
from uuid import UUID
import jsonschema
from dateutil.parser import parse as dateparse
from uptimer.events import SCHEMATA_PATH
from uptimer.events.cache import schema_cache
from uptimer.helpers import to_bool, to_none
class EventDefinitionError(ValueError):
pass
class EventMeta(ABCMeta, metaclass=ABCMeta):
schema_path: str = f"file:///{SCHEMATA_PATH}"
"""Base-URL at which the schema resolver will look up schema references."""
def __new__(cls, name, bases, attrs, **kwargs):
super_new = super().__new__
schema = attrs.pop("schema", None)
# `table` can be a valid None, so use False as placeholder of missing property
table = attrs.pop("table", False)
if not schema:
raise EventDefinitionError(f"Class {name} did not declare a JSON schema.")
if table is False:
raise EventDefinitionError(
f"Class {name} did not declare a database table mapping."
)
# Now resolve and parse the JSON schema for additional properties; generating
# useful representations, the proper schema resolver for validation, etc.
# Inserting them in the `attrs` dictionary will cause them to become regular
# class variables, available in every instantiated class object.
schema_spec = schema_cache[schema]
if schema_spec["title"] != name:
raise EventDefinitionError(
f"Name of class {name} must be equal to "
f"JSON schema title '{schema_spec['title']}'"
)
properties_dict = cls._collect_properties(schema_spec)
properties = list(properties_dict.keys())
property_cast_mapping = {
prop: cls.property_to_python(spec) for prop, spec in properties_dict.items()
}
resolver = jsonschema.RefResolver(cls.schema_path, schema_spec)
attrs.update(
dict(
schema=schema,
table=table,
schema_spec=schema_spec,
properties_dict=properties_dict,
properties=properties,
property_cast_mapping=property_cast_mapping,
_resolver=resolver,
)
)
return super_new(cls, name, bases, attrs, **kwargs)
@staticmethod
def _collect_properties(schema):
"""Collects a list of all (including nested and conditional) properties."""
props = dict()
array_iter = []
if isinstance(schema, list):
array_iter = enumerate(schema)
elif isinstance(schema, dict):
array_iter = schema.items() | props.update(value)
elif key == "required":
continue
else:
props.update(EventMeta._collect_properties(value))
return props
@staticmethod
def property_to_python(property_spec):
"""
Returns a list of appropriate python-native datatypes for a schema property.
Based on the event class'es schema, a list of callables is returned that a
value might be tried against. The list is ordered from most to least strict
as to prevent falsely casting values as a less strict type.
Possible types taken from JSON schema validation specification
http://json-schema.org/latest/json-schema-validation.html#rfc.section.6.1.1
"""
propformat = property_spec.get("format")
if propformat == "date-time":
return [dateparse]
if propformat == "uuid":
return [UUID]
proptypes = property_spec.get("type")
if not proptypes:
return []
if not isinstance(proptypes, list):
proptypes = [proptypes]
callables = []
if "null" in proptypes:
callables.append(to_none)
if "boolean" in proptypes:
callables.append(to_bool)
if "integer" in proptypes:
callables.append(int)
if "number" in proptypes:
callables.append(float)
return callables |
for key, value in array_iter:
if key == "properties": |
__init__.py | from __future__ import unicode_literals
from .. import Provider as LoremProvider
class Provider(LoremProvider):
word_list = (
'войти', 'монета', 'вскинуть', 'желание', 'экзамен', 'налоговый',
'вытаскивать', 'приятель', 'вздрагивать', 'куча', 'порт', 'точно',
'заплакать', 'изба', 'правление', 'художественный', 'мучительно',
'изображать', 'фонарик', 'миф', 'грустный', 'опасность', 'мера',
'пастух', 'факультет', 'мелькнуть', 'полевой', 'другой', 'выраженный',
'забирать', 'рот', 'народ', 'соответствие', 'тута', 'коммунизм',
'решение', 'плод', 'собеседник', 'возмутиться', 'достоинство',
'господь', 'болото', 'инфекция', 'голубчик', 'сынок', 'пространство',
'прощение', 'прежде', 'хотеть', 'ленинград', 'даль', 'развитый',
'близко', 'более', 'спорт', 'эпоха', 'ответить', 'освободить', 'совет',
'проход', 'палец', 'вчера', 'приличный', 'ярко', 'белье', 'кузнец',
'неожиданно', 'вперед', 'зато', 'кольцо', 'передо', 'мгновение',
'плавно', 'табак', 'число', 'изучить', 'тяжелый', 'рассуждение',
'салон', 'идея', 'что', 'светило', 'порода', 'сомнительный', 'бок',
'очко', 'неудобно', 'советовать', 'отдел', 'помолчать', 'поздравлять',
'пробовать', 'дошлый', 'смеяться', 'упорно', 'вздрогнуть', 'затянуться',
'танцевать', 'песенка', 'выбирать', 'правильный', 'намерение', 'издали',
'запустить', 'наслаждение', 'крыса', 'лететь', 'космос', 'радость',
'поезд', 'находить', 'гулять', 'горький', 'бочок', 'ночь', 'счастье',
'уничтожение', 'дьявол', 'коробка', 'спасть', 'кожа', 'провинция',
'прелесть', 'тюрьма', 'низкий', 'сверкать', 'темнеть', 'солнце',
'дружно', 'настать', 'блин', 'степь', 'самостоятельно', 'крутой',
'картинка', 'зачем', 'рабочий', 'необычный', 'армейский', 'труп',
'ягода', 'около', 'монета', 'естественный', 'юный', 'район', 'скрытый',
'поймать', 'строительство', 'палата', 'миг', 'триста', 'штаб', 'ломать',
'возможно', 'полюбить', 'человечек', 'легко', 'чувство', 'ручей',
'карман', 'деньги', 'неправда', 'сравнение', 'грудь', 'отъезд',
'возникновение', 'степь', 'возбуждение', 'деловой', 'следовательно',
'жидкий', 'сынок', 'художественный', 'поколение', 'расстегнуть', 'пища',
'ученый', 'секунда', 'успокоиться', 'вряд', 'аж', 'вскакивать', 'мимо',
'падать', 'потянуться', 'угроза', 'растеряться', 'бегать', 'стакан',
'о', 'кпсс', 'ныне', 'пол', 'реклама', 'при', 'школьный', 'премьера',
'дальний', 'потрясти', 'освобождение', 'покидать', 'наступать', 'жить',
'какой', 'обида', 'командование', 'девка', 'выражаться', 'головной',
'второй', 'князь', 'социалистический', 'головка', 'привлекать', 'через',
'господь', 'результат', 'отметить', 'ведь', 'падаль', 'покидать',
'художественный', 'правый', 'висеть', 'лапа', 'каюта', 'слишком',
'нервно', 'серьезный', 'зима', 'заработать', 'эффект', 'пропасть',
'плод', 'что', 'висеть', 'холодно', 'единый', 'выкинуть', 'мрачно',
'выгнать', 'умирать', 'иной', 'космос', 'природа', 'функция',
'поставить', 'оборот', 'услать', 'очередной', 'медицина', 'функция',
'зарплата', 'выдержать', 'расстройство', 'адвокат', 'задержать',
'появление', 'инвалид', 'интеллектуальный', 'исследование', 'господь',
'смертельный', 'спичка', 'вариант', 'рай', 'одиннадцать', 'чем',
'манера', 'магазин', 'поговорить', 'полоска', 'помимо', 'построить',
'домашний', 'механический', 'сохранять', 'отражение', 'научить',
'тесно', 'аллея', 'прежний', 'посидеть', 'славный', 'очутиться',
'лететь', 'невозможно', 'порядок', 'выразить', 'спешить', 'сынок',
'ребятишки', 'угроза', 'оставить', 'цвет', 'налево', 'парень',
'миллиард', 'горький', 'трубка', 'подробность', 'пасть', 'непривычный',
'угодный', 'засунуть', 'цель', 'запретить', 'дремать', 'разуметься',
'приходить', 'совещание', 'постоянный', 'анализ', 'терапия', 'приятель',
'процесс', 'академик', 'металл', 'развернуться', 'жестокий', 'интернет',
'банда', 'изменение', 'коллектив', 'похороны', 'устройство',
'торопливый', 'разводить', 'промолчать', 'подземный', 'пламя',
'редактор', 'теория', 'карандаш', 'упор', 'означать', 'бабочка',
'четыре', 'столетие', 'разнообразный', 'витрина', 'нож', 'команда',
'шлем', 'недостаток', 'протягивать', 'за', 'металл', 'добиться',
'сутки', 'четко', 'предоставить', 'тысяча', 'запеть', 'бригада',
'мелочь', 'выраженный', 'пересечь', 'сходить', 'вообще', 'рис', 'банк',
'бак', 'передо', 'назначить', 'важный', 'правление', 'палка', 'трясти',
'уронить', 'витрина', 'основание', 'да', 'перебивать', 'дыхание',
'применяться', 'июнь', 'бетонный', 'избегать', 'умолять', 'мягкий',
'заявление', 'конференция', 'встать', 'свежий', 'сопровождаться',
'цепочка', 'выражение', 'угол', 'ботинок', 'ложиться', 'инструкция',
'присесть', 'решетка', 'еврейский', 'порог', 'зеленый', 'граница',
'ставить', 'смелый', 'сустав', 'роса', 'демократия', 'вывести',
'конструкция', 'задрать', 'багровый', 'военный', 'направо', 'житель',
'товар', 'неправда', 'материя', 'командующий', 'кидать', 'заложить',
'лиловый', 'слать', 'горький', 'пространство', 'провал', 'мусор',
'наткнуться', 'торговля', 'монета', 'место', 'спалить', 'бровь',
'левый', 'хлеб', 'коричневый', 'потом', 'страсть', 'виднеться',
'роскошный', 'способ', 'костер', 'заведение', 'пропадать', 'слишком',
'пятеро', 'мальчишка', 'тусклый', 'неожиданный', 'плясать', 'дурацкий',
'дрогнуть', 'сбросить', 'прошептать', 'беспомощный', 'рота', 'песня',
'тревога', 'некоторый', 'термин', 'нажать', 'видимо', 'валюта', 'набор',
'боец', 'райком', 'новый', 'скользить', 'руководитель', 'волк',
'изредка', 'понятный', 'пропаганда', 'остановить', 'исполнять', 'ход',
'госпожа', 'печатать', 'командир', 'снимать', 'казнь', 'невыносимый',
'спорт', 'тревога', 'уточнить', 'актриса', 'полностью', 'покинуть',
'сверкающий', 'мотоцикл', 'дорогой', 'указанный', 'ремень', 'посвятить',
'один', 'а', 'доставать', 'хозяйка', 'носок', 'написать', 'еврейский',
'призыв', 'увеличиваться', 'равнодушный',
) | # -*- coding: utf-8 -*- |
|
app-name.js | /**
* External dependencies
*/
const path = require( 'path' );
/**
* Internal dependencies
*/
const { getTestConfig } = require( './test-config' );
const getAppRoot = require( './app-root' );
| return testConfig.appName;
}
return getAppBase();
};
const getAppBase = () => {
const appRoot = getAppRoot();
return path.basename( appRoot );
}
module.exports = {
getAppName,
getAppBase,
}; | const getAppName = () => {
const testConfig = getTestConfig();
if ( testConfig.appName ) { |
i18n.ts | import * as pluralize from 'pluralize';
import { I18N_GENERIC } from '../../meta-schema/constants';
import {
arrayStartsWith,
capitalize,
compact,
decapitalize,
groupArray,
mapFirstDefined,
mapValues
} from '../../utils/utils';
import {
LocalizationBaseConfig,
LocalizationConfig,
NamespaceLocalizationConfig,
TypeLocalizationConfig
} from '../config';
import { MessageLocation, ValidationMessage } from '../validation';
import { ModelComponent, ValidationContext } from '../validation/validation-context';
import { EnumValue } from './enum-type';
import { Field } from './field';
import { Model } from './model';
import { Type } from './type';
import { TypeBase } from './type-base';
export class ModelI18n implements ModelComponent {
private readonly languageLocalizationProvidersByLanguage: ReadonlyMap<string, ModelLocalizationProvider>;
constructor(input: ReadonlyArray<LocalizationConfig>, private readonly model: Model) {
// collect configs by language and create one localization provider per language
const configsByLanguage = groupArray(input, config => config.language);
const localizationsByLanguage = mapValues(configsByLanguage, configs =>
configs.map(config => new NamespaceLocalization(config))
);
this.languageLocalizationProvidersByLanguage = mapValues(
localizationsByLanguage,
localizations => new ModelLocalizationProvider(localizations)
);
}
public validate(context: ValidationContext): void {
for (const localizationProvider of this.languageLocalizationProvidersByLanguage.values()) {
localizationProvider.validate(context, this.model);
}
}
public getTypeLocalization(type: TypeBase, resolutionOrder: ReadonlyArray<string>): TypeLocalization {
const resolutionProviders = this.getResolutionProviders(resolutionOrder);
// try to build one complete type localization out of the available possibly partial localizations
return {
label: mapFirstDefined(resolutionProviders, rp => rp.localizeType(type).label),
labelPlural: mapFirstDefined(resolutionProviders, rp => rp.localizeType(type).labelPlural),
hint: mapFirstDefined(resolutionProviders, rp => rp.localizeType(type).hint)
};
}
public getFieldLocalization(field: Field, resolutionOrder: ReadonlyArray<string>): FieldLocalization {
const resolutionProviders = this.getResolutionProviders(resolutionOrder);
// try to build one complete field localization out of the available possibly partial localizations
return {
label: mapFirstDefined(resolutionProviders, rp => rp.localizeField(field).label),
hint: mapFirstDefined(resolutionProviders, rp => rp.localizeField(field).hint)
};
}
public getEnumValueLocalization(
enumValue: EnumValue,
resolutionOrder: ReadonlyArray<string>
): EnumValueLocalization {
const resolutionProviders = this.getResolutionProviders(resolutionOrder);
return {
label: mapFirstDefined(resolutionProviders, rp => rp.localizeEnumValue(enumValue).label),
hint: mapFirstDefined(resolutionProviders, rp => rp.localizeEnumValue(enumValue).hint)
};
}
private getResolutionProviders(resolutionOrder: ReadonlyArray<string>): ReadonlyArray<LocalizationProvider> {
return compact(
resolutionOrder.map(providerName => {
switch (providerName) {
case I18N_GENERIC:
return new GenericLocalizationProvider();
default:
return this.languageLocalizationProvidersByLanguage.get(providerName);
}
})
);
}
}
export class NamespaceLocalization {
public readonly namespacePath: ReadonlyArray<string>;
constructor(private readonly config: NamespaceLocalizationConfig) {
this.namespacePath = config.namespacePath;
}
public getTypeLocalization(name: string): TypeLocalization | undefined {
if (!this.config.types || !this.config.types[name]) {
return undefined;
}
const type = this.config.types[name];
return {
label: type.label,
labelPlural: type.labelPlural,
hint: type.hint,
loc: type.loc
};
}
public getFieldLocalization({
typeName,
fieldName
}: {
typeName: string;
fieldName: string;
}): FieldLocalization | undefined {
return this.getElementLocalization({ typeName, elementName: fieldName, property: 'fields' });
}
public getEnumValueLocalization({
typeName,
enumValue
}: {
typeName: string;
enumValue: string;
}): EnumValueLocalization | undefined {
return this.getElementLocalization({ typeName, elementName: enumValue, property: 'values' });
}
private getElementLocalization({
typeName,
elementName,
property
}: {
typeName: string;
elementName: string;
property: 'fields' | 'values';
}): FieldLocalization | undefined {
if (!this.config.types || !this.config.types[typeName]) {
return undefined;
}
const typeConfig = this.config.types[typeName];
let elementLocalizations: { [name: string]: LocalizationBaseConfig } | undefined = typeConfig[property];
if (!elementLocalizations) {
return undefined;
}
const element = elementLocalizations[elementName];
if (!element) {
return undefined;
}
return {
hint: element.hint,
label: element.label,
loc: element.loc
};
}
/**
* Gets a localization for a field name outside of a type declaration
*
* This should be used as fallback if no direct type-field localization is present
*/
public getCommonFieldLocalization(name: string): FieldLocalization | undefined {
if (!this.config.fields || !this.config.fields[name]) {
return undefined;
}
const field = this.config.fields[name];
return {
hint: field.hint,
label: field.label,
loc: field.loc
};
}
get loc(): MessageLocation | undefined {
return this.config.loc;
}
get types(): { [name: string]: TypeLocalizationConfig } | undefined {
return this.config.types;
}
}
export interface TypeLocalization {
readonly label?: string;
readonly labelPlural?: string;
readonly hint?: string;
readonly loc?: MessageLocation;
}
export interface FieldLocalization extends LocalizationBaseConfig {}
export interface EnumValueLocalization extends LocalizationBaseConfig {}
interface LocalizationProvider {
localizeType(type: TypeBase): TypeLocalization;
localizeField(field: Field): FieldLocalization;
localizeEnumValue(enumValue: EnumValue): EnumValueLocalization;
}
class ModelLocalizationProvider implements LocalizationProvider {
constructor(private namespaces: ReadonlyArray<NamespaceLocalization>) {}
private getMatchingNamespaces(namespacePath: ReadonlyArray<string>): ReadonlyArray<NamespaceLocalization> {
return this.namespaces
.filter(set => arrayStartsWith(namespacePath, set.namespacePath))
.sort((lhs, rhs) => lhs.namespacePath.length - rhs.namespacePath.length);
}
validate(validationContext: ValidationContext, model: Model) {
const groupedNamespaceLocalizations = groupArray(this.namespaces, ns => ns.namespacePath.join('.'));
for (const namespaces of groupedNamespaceLocalizations.values()) {
checkForDoubleDefinitions(namespaces, validationContext);
checkForTypeConstraints(namespaces, model, validationContext);
}
}
localizeType(type: TypeBase): TypeLocalization {
const matchingNamespaces = this.getMatchingNamespaces(type.namespacePath);
const matchingTypeLocalizations = compact(matchingNamespaces.map(ns => ns.getTypeLocalization(type.name)));
return {
label: mapFirstDefined(matchingTypeLocalizations, t => t.label),
labelPlural: mapFirstDefined(matchingTypeLocalizations, t => t.labelPlural),
hint: mapFirstDefined(matchingTypeLocalizations, t => t.hint)
};
}
localizeField(field: Field): FieldLocalization {
const matchingNamespaces = this.getMatchingNamespaces(field.declaringType.namespacePath);
let label: string | undefined;
let hint: string | undefined;
// first, try to find a localization declared on the type
for (const namespace of matchingNamespaces) {
const typeField = namespace.getFieldLocalization({
typeName: field.declaringType.name, | });
if (typeField) {
label = label ? label : typeField.label;
hint = hint ? hint : typeField.hint;
if (label && hint) {
break;
}
}
}
// fall back to global field localization
for (const namespace of matchingNamespaces) {
const typeField = namespace.getCommonFieldLocalization(field.name);
if (typeField) {
label = label ? label : typeField.label;
hint = hint ? hint : typeField.hint;
}
if (label && hint) {
break;
}
}
return { label: label, hint: hint };
}
localizeEnumValue(enumValue: EnumValue): EnumValueLocalization {
const matchingNamespaces = this.getMatchingNamespaces(enumValue.declaringType.namespacePath);
let label: string | undefined;
let hint: string | undefined;
for (const namespace of matchingNamespaces) {
const localization = namespace.getEnumValueLocalization({
typeName: enumValue.declaringType.name,
enumValue: enumValue.value
});
if (localization) {
label = label ? label : localization.label;
hint = hint ? hint : localization.hint;
if (label && hint) {
break;
}
}
}
return { label: label, hint: hint };
}
}
function checkForTypeConstraints(
namespaces: ReadonlyArray<NamespaceLocalization>,
model: Model,
validationContext: ValidationContext
) {
for (const ns of namespaces) {
if (ns.types) {
for (const typeKey in ns.types) {
const type = ns.types[typeKey];
const modelType: TypeBase | undefined = model.getType(typeKey);
if (!modelType) {
validationContext.addMessage(
ValidationMessage.warn(
'There is no type "' +
typeKey +
'" in the model specification. This might be a spelling error.',
type.loc
)
);
continue;
}
if (type.fields) {
try {
const objectType = model.getObjectTypeOrThrow(typeKey);
for (const field in type.fields) {
if (!objectType.fields.find(f => f.name === field)) {
validationContext.addMessage(
ValidationMessage.warn(
'The type "' +
typeKey +
'" has no field "' +
field +
'". This might be a spelling error.',
type.fields[field].loc
)
);
}
}
} catch (e) {
validationContext.addMessage(
ValidationMessage.error(
'The type "' +
typeKey +
'" is a non-object-type. It does not have "fields" attribute. Did you mean to use "values" instead?',
type.loc
)
);
}
} else if (type.values) {
const enumType = model.getEnumType(typeKey);
if (!enumType) {
validationContext.addMessage(
ValidationMessage.error(
'The type "' +
typeKey +
'" is not an enum type. It does not have "values" attribute. Did you mean to use "fields" instead?',
type.loc
)
);
} else {
if (type.values) {
for (const value in type.values) {
if (!enumType.values.find(v => v.value === value)) {
validationContext.addMessage(
ValidationMessage.warn(
'The enum type "' +
typeKey +
'" has no value "' +
value +
'". This might be a spelling error.',
type.values[value].loc
)
);
}
}
}
}
}
}
}
}
}
function checkForDoubleDefinitions(
namespaces: ReadonlyArray<NamespaceLocalization>,
validationContext: ValidationContext
) {
const alreadySeen: string[] = [];
for (const ns of namespaces) {
if (ns.types) {
for (const type in ns.types) {
const typeConf = ns.types[type];
if (typeConf.hint && isExistingAndAdd(type + '/hint', alreadySeen)) {
validationContext.addMessage(
ValidationMessage.error(
'The attribute "hint" in type "' +
type +
'" was defined several times in the i18n translation',
typeConf.loc
)
);
}
if (typeConf.label && isExistingAndAdd(type + '/label', alreadySeen)) {
validationContext.addMessage(
ValidationMessage.error(
'The attribute "label" in type "' +
type +
'" was defined several times in the i18n translation',
typeConf.loc
)
);
}
if (typeConf.labelPlural && isExistingAndAdd(type + '/labelPlural', alreadySeen)) {
validationContext.addMessage(
ValidationMessage.error(
'The attribute "labelPlural" in type "' +
type +
'" was defined several times in the i18n translation',
typeConf.loc
)
);
}
if (typeConf && typeConf.fields) {
for (const locBase in typeConf.fields) {
const fieldConf = typeConf.fields[locBase];
if (
fieldConf &&
fieldConf.label &&
isExistingAndAdd(type + '/' + locBase + '/label', alreadySeen)
) {
validationContext.addMessage(
ValidationMessage.error(
'The attribute "label" in field "' +
locBase +
'" of type "' +
type +
'" was defined several times in the i18n translation',
fieldConf.loc
)
);
}
if (
fieldConf &&
fieldConf.hint &&
isExistingAndAdd(type + '/' + locBase + '/hint', alreadySeen)
) {
validationContext.addMessage(
ValidationMessage.error(
'The attribute "hint" in field "' +
locBase +
'" of type "' +
type +
'" was defined several times in the i18n translation',
fieldConf.loc
)
);
}
}
}
if (typeConf && typeConf.values) {
for (const locBase in typeConf.values) {
const valueConf = typeConf.values[locBase];
if (
valueConf &&
valueConf.label &&
isExistingAndAdd(type + '/' + locBase + '/label', alreadySeen)
) {
validationContext.addMessage(
ValidationMessage.error(
'The attribute "label" in value "' +
locBase +
'" of type "' +
type +
'" was defined several times in the i18n translation',
valueConf.loc
)
);
}
if (
valueConf &&
valueConf.hint &&
isExistingAndAdd(type + '/' + locBase + '/hint', alreadySeen)
) {
validationContext.addMessage(
ValidationMessage.error(
'The attribute "hint" in value "' +
locBase +
'" of type "' +
type +
'" was defined several times in the i18n translation',
valueConf.loc
)
);
}
}
}
}
}
}
}
function isExistingAndAdd(search: string, array: string[]) {
if (array.indexOf(search) >= 0) {
array.push(search);
return true;
}
array.push(search);
return false;
}
class GenericLocalizationProvider implements LocalizationProvider {
localizeField(field: Field): FieldLocalization {
return {
label: generateGenericName(field.name)
};
}
localizeType(type: Type): TypeLocalization {
return {
label: generateGenericName(type.name),
labelPlural: generateGenericName(type.pluralName)
};
}
localizeEnumValue(enumValue: EnumValue): FieldLocalization {
return {
label: generateGenericName(enumValue.value)
};
}
}
function generateGenericName(name: string | undefined): string | undefined {
if (name == undefined) {
return undefined;
}
return capitalize(name.replace(/([a-z])([A-Z])/g, (str, arg1, arg2) => `${arg1} ${decapitalize(arg2)}`));
} | fieldName: field.name |
s3ops.py | # Software License Agreement (BSD License)
#
# Copyright (c) 2009-2014, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: [email protected]
from boto.s3.bucket import Bucket
from boto.s3.prefix import Prefix
import os
import hashlib
from boto.s3.connection import OrdinaryCallingFormat, S3Connection
from boto.s3.key import Key
from boto.s3.acl import ACL, Grant
from boto.exception import S3ResponseError
from boto.s3.deletemarker import DeleteMarker
import boto.s3
from nephoria.baseops.botobaseops import BotoBaseOps
class S3opsException(Exception):
"""Exception raised for errors that occur when running S3 operations.
Attributes:
msg -- explanation of the error
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
print self.msg
class S3ops(BotoBaseOps):
s3_groups = {
"all_users":"http://acs.amazonaws.com/groups/global/AllUsers",
"authenticated_users":"http://acs.amazonaws.com/groups/global/AuthenticatedUsers",
"log_delivery":"http://acs.amazonaws.com/groups/s3/LogDelivery"
}
EUCARC_URL_NAME = 's3_url'
SERVICE_PREFIX = 's3'
CONNECTION_CLASS = S3Connection
def setup(self):
self.connection.calling_format = OrdinaryCallingFormat()
super(S3ops, self).setup()
def setup_resource_trackers(self):
"""
Setup keys in the test_resources hash in order to track artifacts created
"""
#todo add/define clean methods here...
self.test_resources_clean_methods["keys"] = None
self.test_resources_clean_methods["buckets"] = None
for resource_name in self.test_resources_clean_methods.iterkeys():
self.test_resources[resource_name] = []
def get_bucket(self, bucket_name):
"""
Bring balance in the force.
Basically this method just returns the output using boto connection,
but may be in future it will do something meaningful.
This wrapper ensures that tester can continue using nephoria APIs.
:param bucket_name:
:return:
"""
return self.connection.get_bucket(bucket_name)
def create_bucket(self, bucket_name, location=None):
"""
Test Coverages:
- Create a bucket.
- Ensures bucket exists by calling boto s3.lookup
bucket_name The name of the Bucket
"""
# TODO create_bucket move to boto3
bucket = self.get_bucket_by_name(bucket_name)
if bucket:
self.log.debug("Bucket '(%s)' already exists" % bucket_name)
else:
try:
if location:
bucket = self.connection.create_bucket(bucket_name, location=location)
else:
bucket = self.connection.create_bucket(bucket_name)
except self.connection.provider.storage_create_error, e:
raise S3opsException("Bucket '(%s)' is owned by another user" % bucket_name )
if not self.get_bucket_by_name(bucket.name):
raise S3opsException("Bucket could not be found after creation")
self.test_resources["buckets"].append(bucket)
self.log.debug("Created bucket: " + bucket_name)
return bucket
def delete_bucket(self, bucket):
"""
Test Coverage:
- Deletes a bucket.
- Checks if bucket still exists.
bucket_name The name of the Bucket
"""
if not isinstance(bucket, Bucket):
try:
bucket = self.connection.get_bucket(bucket)
except:
raise S3opsException("Bucket '(%s)' not found." % bucket)
bucket_name = bucket.name
try:
bucket.delete()
except self.connection.provider.storage_create_error, e:
raise S3opsException('Bucket (%s) is owned by another user' % bucket_name)
# Check if the bucket still exists
if self.get_bucket_by_name(bucket_name):
raise S3opsException('Bucket (%s) still exists after delete operation' % bucket_name )
self.log.debug("Bucket %s is deleted successfully." % bucket_name)
def delete_all_buckets(self):
'''
Deletes all buckets.
Returns: list of all buckets, which should be an empty list.
'''
buckets = self.get_all_bucket_names()
l = len(buckets)
if l > 1:
for i in range(l):
self.clear_bucket(buckets[i])
for i in range(l):
try:
self.delete_bucket(buckets[i])
except S3opsException:
pass
elif l == 1:
self.clear_bucket(buckets[0])
try:
self.delete_bucket(buckets[0])
except S3opsException:
pass
return self.connection.get_all_buckets()
def get_all_bucket_names(self):
"""
Returns: list of all bucket names
"""
buckets = self.connection.get_all_buckets()
l = len(buckets)
if l > 0:
for i in range(l):
buckets[i] = str(buckets[i].name)
return buckets
def | (self, bucket_name):
"""
Lookup a bucket by name, if it does not exist raise an exception
"""
bucket = self.connection.lookup(bucket_name)
if bucket:
return bucket
else:
return None
def upload_object(self, bucket_name, key_name, path_to_file=None, contents=None):
"""
Write the contents of a local file to walrus
bucket_name The name of the walrus Bucket.
key_name The name of the object containing the data in walrus.
path_to_file Fully qualified path to local file.
"""
bucket = self.get_bucket_by_name(bucket_name)
if bucket == None:
raise S3opsException("Could not find bucket " + bucket_name + " to upload file")
# Get a new, blank Key object from the bucket. This Key object only
# exists locally until we actually store data in it.
key = bucket.new_key(key_name)
if key == None:
raise RuntimeError( "Unable to create key " + key_name )
if path_to_file is None:
if contents is None:
contents = os.urandom(1024)
key.set_contents_from_string(contents)
else:
key.set_contents_from_filename(path_to_file)
self.log.debug("Uploaded key: " + str(key_name) + " to bucket:" + str(bucket_name))
self.test_resources["keys"].append(key)
return key
def get_objects_by_prefix(self, bucket_name, prefix):
"""
Get keys in the specified bucket that match the prefix if no prefix is passed all objects are returned
as a result set.
If only 1 key matches it will be returned as a Key object.
"""
bucket = self.get_bucket_by_name(bucket_name)
keys = bucket.get_all_keys(prefix=prefix)
if len(keys) < 1:
self.log.warn("Unable to find any keys with prefix " + prefix + " in " + str(bucket) )
if len(keys) == 2:
return keys[0]
return keys
def delete_object(self, object):
bucket = object.bucket
name = object.name
object.delete()
try:
self.connection.get_bucket(bucket).get_key(name)
raise S3opsException("Walrus object " + name + " in bucket " + bucket.name + " still exists after delete")
except Exception, e:
return
def clear_bucket(self, bucket_name=None):
"""Deletes the contents of the bucket specified and the bucket itself
THIS WILL DELETE EVERYTHING!
bucket bucket name to clear
"""
try:
bucket = self.connection.get_bucket(bucket_name=bucket_name)
except S3ResponseError as e:
self.log.debug('No bucket' + bucket_name + ' found: ' + e.message)
raise Exception('Not found')
try:
self.log.debug( "Getting bucket listing for " + bucket.name )
self.log.debug( "Iterating throught the bucket" )
key_list = bucket.list()
self.log.debug( "Starting loop" )
for k in key_list:
if isinstance(k, Prefix):
self.log.debug( "Skipping prefix" )
continue
self.log.debug( "Deleting key: " + k.name )
bucket.delete_key(k)
bucket.delete()
except S3ResponseError as e:
self.log.debug( "Exception caught doing bucket cleanup." + e.message )
#Todo: need to make this work with Walrus's non-S3-compliant error codes
if e.status == 409:
#Do version cleanup
self.log.debug( "Cleaning up versioning artifacts" )
try:
keys = bucket.get_all_versions()
for k in keys:
if isinstance(k, Key):
self.log.debug( "Got version: " + k.name + "--" + k.version_id + "-- Delete marker? " + str(k.delete_marker) )
self.log.debug( "Deleting key: " + k.name )
bucket.delete_key(key_name=k.name,version_id=k.version_id)
elif isinstance(k, DeleteMarker):
self.log.debug( "Got marker: " + k.name + "--" + k.version_id + "--" + str(k.is_latest) )
self.log.debug( "Deleting delete marker" )
bucket.delete_key(key_name=k.name,version_id=k.version_id)
self.log.debug( "Deleting bucket " + bucket.name )
bucket.delete()
except Exception as e:
self.log.debug( "Exception deleting versioning artifacts: " + e.message )
else:
self.log.debug('Got ' + e.message + ' and status ' + str(e.status))
def clear_keys_with_prefix(self, bucket, prefix):
try :
listing = self.connection.get_all_buckets()
for bucket in listing:
if bucket.name.startswith(prefix):
self.log.debug("Getting bucket listing for " + bucket.name)
key_list = bucket.list()
for k in key_list:
if isinstance(k, boto.s3.prefix.Prefix):
self.log.debug("Skipping prefix")
continue
self.log.debug("Deleting key: " + k.name)
bucket.delete_key(k)
bucket.delete()
else:
self.log.debug("skipping bucket: " + bucket.name)
except S3ResponseError as e:
raise S3opsException("Exception caught doing bucket cleanup.")
def get_canned_acl(self, canned_acl=None, bucket_owner_id=None, bucket_owner_display_name=None):
"""
Returns an acl object that can be applied to a bucket or key. It is intended to be used to verify
results that the service returns. To set a canned-acl you can simply set it on the bucket directly without
this method.
bucket_owner_id Account id of the owner of the bucket. Required
canned_acl Canned acl to implement. Required.
Options: ['private','public-read', 'public-read-write', 'authenticated-read', 'log-delivery-write', 'bucket-owner-full-control', 'bucket-owner-full-control']
bucket_owner_display_name Required. The account display name for the bucket owner, so that the correct permission can be generated fully
"""
if bucket_owner_id == None or canned_acl == None or bucket_owner_display_name == None :
raise S3opsException( "No user_id or canned_acl passed to get_canned_acl()" )
built_acl = ACL()
built_acl.add_user_grant(permission='FULL_CONTROL',user_id=bucket_owner_id, display_name=bucket_owner_display_name)
if canned_acl == "public-read":
built_acl.add_grant(Grant(permission="READ",type='Group',uri=self.s3_groups["all_users"]))
elif canned_acl == "public-read-write":
built_acl.add_grant(Grant(permission="READ",type='Group',uri=self.s3_groups["all_users"]))
built_acl.add_grant(Grant(permission="WRITE",type='Group',uri=self.s3_groups["all_users"]))
elif canned_acl == "authenticated-read":
built_acl.add_grant(Grant(permission="READ",type='Group',uri=self.s3_groups["authenticated_users"]))
elif canned_acl == "log-delivery-write":
built_acl.add_grant(Grant(permission="WRITE",type='Group',uri=self.s3_groups["log_delivery"]))
elif canned_acl == "bucket-owner-read":
if bucket_owner_id is None:
raise Exception("No bucket_owner_id passed when trying to create bucket-owner-read canned acl ")
built_acl.add_grant(Grant(permission="READ",id=bucket_owner_id))
elif canned_acl == "bucket-owner-full-control":
if bucket_owner_id is None:
raise Exception("No bucket_owner_id passed when trying to create bucket-owner-full-control canned acl ")
built_acl.add_grant(Grant(permission="FULL_CONTROL",id=bucket_owner_id))
return built_acl
def check_acl_equivalence(self, acl1=None, acl2=None):
'''
Checks if acl1 = acl2 based on comparison of the set of grants irrespective of order.
One limitation is that each grant's xml string deserialization must be the same to be
considered equivalent. This has implications for the grant displayname in particular.
For example, an ACL with an unknown account specified will not generally have a
display-name associated with the account id, so the comparison may fail in that case even
though the ids and permissions are identical.
Returns None if there is an input problem such as one or more inputs are None
acl1 An ACL object from boto.s3.acl
acl2 An ACL object from boto.s3.acl
'''
if acl1 == None or acl2 == None:
return None
acl1grants = set()
acl2grants = set()
#calculate the symmetric-difference of the two sets of grants
for val in acl1.grants:
acl1grants.add(val.to_xml())
for val in acl2.grants:
acl2grants.add(val.to_xml())
return not len(acl1grants.symmetric_difference(acl2grants)) > 0
def check_md5(self, eTag=None, data=None):
hasher = hashlib.md5()
hasher.update(data)
data_hash = "\"" + hasher.hexdigest() + "\""
if data_hash != eTag:
raise Exception( "Hash/eTag mismatch: \nhash = " + data_hash + "\neTag= " + eTag)
| get_bucket_by_name |
.ycm_extra_conf.py | #!/usr/bin/env python
#
# Copyright (C) 2014 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
import ycm_core
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-fexceptions',
'-DNDEBUG',
# THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which
# language to use when compiling headers. So it will guess. Badly. So C++
# headers will be compiled as C headers. You don't want that so ALWAYS specify
# a "-std=<something>".
# For a C project, you would set this to something like 'c99' instead of
# 'c++11'.
'-std=c++11',
# ...and the same thing goes for the magic -x option which specifies the
# language that the files to be compiled are written in. This is mostly
# relevant for c++ headers.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x',
'c++',
'-isystem',
'/usr/include',
'-isystem',
'/usr/local/include',
'-I', 'src',
'-I', 'thirdparty/nonius',
'-I', 'thirdparty/Catch',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in ['.h', '.hxx', '.hpp', '.hh', '.h++']
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
# This is the entry point; this function is called by ycmd to produce flags for
# a file.
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
|
return {
'flags': final_flags,
'do_cache': True
}
| relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to ) |
ImageRenaming.pyw | import os, sys, urllib.request
from tkinter import *
from tkinter.messagebox import *
__version__ = 3
__filename__ = "ImageRenaming"
__basename__ = os.path.basename(sys.argv[0])
__savepath__ = os.path.join(os.environ['APPDATA'], "QuentiumPrograms")
__iconpath__ = __savepath__ + "/{}.ico".format(__filename__)
try:urllib.request.urlopen("https://www.google.fr/", timeout=1); connection = True
except:connection = False
if not os.path.exists(__iconpath__):
try:os.mkdir(__savepath__)
except:pass
if connection == True:
try:urllib.request.urlretrieve("https://quentium.fr/+++PythonDL/{}.ico".format(__filename__), __iconpath__)
except:pass
if connection == True:
try:script_version = int(urllib.request.urlopen("https://quentium.fr/programs/index.php").read().decode().split(__filename__ + "<!-- Version: ")[1].split(" --></h2>")[0])
except:script_version = __version__
if script_version > __version__:
if os.path.exists(__iconpath__):popup = Tk(); popup.attributes("-topmost", 1); popup.iconbitmap(__iconpath__); popup.withdraw()
ask_update = askquestion(__filename__ + " V" + str(script_version), "Une mise à jour à été trouvée, souhaitez vous la télécharger puis l'éxécuter ?", icon="question")
if ask_update == "yes":
try:os.rename(__basename__, __filename__ + "-old.exe")
except:os.remove(__filename__ + "-old.exe"); os.rename(__basename__, __filename__ + "-old.exe")
if "-32" in str(__basename__):urllib.request.urlretrieve("https://quentium.fr/download.php?file={}-32.exe".format(__filename__), __filename__ + ".exe")
else:urllib.request.urlretrieve("https://quentium.fr/download.php?file={}.exe".format(__filename__), __filename__ + ".exe")
showwarning(__filename__, "Le programme va redémarrer pour fonctionner sous la nouvelle version.", icon="warning")
os.system("start " + __filename__ + ".exe"); os._exit(1)
__filename__ = __filename__ + " V" + str(__version__)
from datetime import datetime
from tkinter.filedialog import *
from tkinter import *
def start_rena | rectory = askdirectory()
if directory:
if askyesno(__filename__, "Êtes-vous sûr de renommer toutes les images dans ce dossier ? Cette action ne peux pas être annulée !"):
files1 = [f for f in os.listdir(directory) if f[-4:].lower() in (".jpg",".JPG",".png",".PNG",".jpeg",".JPEG",".bmp",".gif")]
for (index, filename) in enumerate(files1):
file = directory + "/" + filename
extension = os.path.splitext(filename)[1]
if check_var.get() == 0:
time1 = os.path.getctime(file)
elif check_var.get() == 1:
time1 = os.path.getmtime(file)
time2 = datetime.fromtimestamp(time1)
time = time2.strftime("%Y%m%d%H%M%S%f")
newname = time + "_" + str(os.path.getsize(file)) + extension
os.rename(file, directory + "/" + newname)
files2 = [f for f in os.listdir(directory) if f[-4:].lower() in (".jpg",".JPG",".png",".PNG",".jpeg",".JPEG",".bmp",".gif")]
for (index, filename) in enumerate(files2):
file = directory + "/" + filename
extension = os.path.splitext(filename)[1]
newname = "Image-%05d%s" % (index + 1, extension)
if os.path.exists(newname):
continue
if True:
os.rename(file, directory + "/" + newname)
imagerenaming.destroy()
os._exit(0)
else:
showwarning(__filename__, "Erreur : Aucun dossier n'a été sélectionné !")
imagerenaming = Tk()
width = 800
height = 500
imagerenaming.update_idletasks()
x = (imagerenaming.winfo_screenwidth() - width) // 2
y = (imagerenaming.winfo_screenheight() - height) // 2
imagerenaming.geometry("{}x{}+{}+{}".format(width , height, int(x), int(y)))
imagerenaming.resizable(width=False, height=False)
imagerenaming.configure(bg = "lightgray")
if os.path.exists(__iconpath__):
imagerenaming.iconbitmap(__iconpath__)
imagerenaming.title(__filename__)
Label(imagerenaming, text="Bienvenue dans le programme de renommage !", font="impact 30", fg="red", bg="lightgray").pack(pady=60)
check_var = IntVar()
check_var.set(0)
Radiobutton(imagerenaming, text="Date de création", variable=check_var, value=0, font="impact 20", bg="lightgray").pack(pady=10)
Radiobutton(imagerenaming, text="Date de modification", variable=check_var, value=1, font="impact 20", bg="lightgray").pack()
Button(imagerenaming, text="Renommer des images", command=start_rename, relief=GROOVE, width=25, font="impact 20", fg="black").pack(pady=50)
imagerenaming.mainloop()
| me():
di |
workspaceAadAdmin.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20190601preview
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Workspace active directory administrator
type WorkspaceAadAdmin struct {
pulumi.CustomResourceState
// Workspace active directory administrator type
AdministratorType pulumi.StringPtrOutput `pulumi:"administratorType"`
// Login of the workspace active directory administrator
Login pulumi.StringPtrOutput `pulumi:"login"`
// The name of the resource
Name pulumi.StringOutput `pulumi:"name"`
// Object ID of the workspace active directory administrator
Sid pulumi.StringPtrOutput `pulumi:"sid"`
// Tenant ID of the workspace active directory administrator
TenantId pulumi.StringPtrOutput `pulumi:"tenantId"`
// The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Type pulumi.StringOutput `pulumi:"type"`
}
// NewWorkspaceAadAdmin registers a new resource with the given unique name, arguments, and options.
func | (ctx *pulumi.Context,
name string, args *WorkspaceAadAdminArgs, opts ...pulumi.ResourceOption) (*WorkspaceAadAdmin, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
if args.WorkspaceName == nil {
return nil, errors.New("invalid value for required argument 'WorkspaceName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:synapse:WorkspaceAadAdmin"),
},
{
Type: pulumi.String("azure-nextgen:synapse/latest:WorkspaceAadAdmin"),
},
{
Type: pulumi.String("azure-nextgen:synapse/v20201201:WorkspaceAadAdmin"),
},
})
opts = append(opts, aliases)
var resource WorkspaceAadAdmin
err := ctx.RegisterResource("azure-nextgen:synapse/v20190601preview:WorkspaceAadAdmin", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetWorkspaceAadAdmin gets an existing WorkspaceAadAdmin resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetWorkspaceAadAdmin(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *WorkspaceAadAdminState, opts ...pulumi.ResourceOption) (*WorkspaceAadAdmin, error) {
var resource WorkspaceAadAdmin
err := ctx.ReadResource("azure-nextgen:synapse/v20190601preview:WorkspaceAadAdmin", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering WorkspaceAadAdmin resources.
type workspaceAadAdminState struct {
// Workspace active directory administrator type
AdministratorType *string `pulumi:"administratorType"`
// Login of the workspace active directory administrator
Login *string `pulumi:"login"`
// The name of the resource
Name *string `pulumi:"name"`
// Object ID of the workspace active directory administrator
Sid *string `pulumi:"sid"`
// Tenant ID of the workspace active directory administrator
TenantId *string `pulumi:"tenantId"`
// The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Type *string `pulumi:"type"`
}
type WorkspaceAadAdminState struct {
// Workspace active directory administrator type
AdministratorType pulumi.StringPtrInput
// Login of the workspace active directory administrator
Login pulumi.StringPtrInput
// The name of the resource
Name pulumi.StringPtrInput
// Object ID of the workspace active directory administrator
Sid pulumi.StringPtrInput
// Tenant ID of the workspace active directory administrator
TenantId pulumi.StringPtrInput
// The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Type pulumi.StringPtrInput
}
func (WorkspaceAadAdminState) ElementType() reflect.Type {
return reflect.TypeOf((*workspaceAadAdminState)(nil)).Elem()
}
type workspaceAadAdminArgs struct {
// Workspace active directory administrator type
AdministratorType *string `pulumi:"administratorType"`
// Login of the workspace active directory administrator
Login *string `pulumi:"login"`
// The name of the resource group. The name is case insensitive.
ResourceGroupName string `pulumi:"resourceGroupName"`
// Object ID of the workspace active directory administrator
Sid *string `pulumi:"sid"`
// Tenant ID of the workspace active directory administrator
TenantId *string `pulumi:"tenantId"`
// The name of the workspace
WorkspaceName string `pulumi:"workspaceName"`
}
// The set of arguments for constructing a WorkspaceAadAdmin resource.
type WorkspaceAadAdminArgs struct {
// Workspace active directory administrator type
AdministratorType pulumi.StringPtrInput
// Login of the workspace active directory administrator
Login pulumi.StringPtrInput
// The name of the resource group. The name is case insensitive.
ResourceGroupName pulumi.StringInput
// Object ID of the workspace active directory administrator
Sid pulumi.StringPtrInput
// Tenant ID of the workspace active directory administrator
TenantId pulumi.StringPtrInput
// The name of the workspace
WorkspaceName pulumi.StringInput
}
func (WorkspaceAadAdminArgs) ElementType() reflect.Type {
return reflect.TypeOf((*workspaceAadAdminArgs)(nil)).Elem()
}
type WorkspaceAadAdminInput interface {
pulumi.Input
ToWorkspaceAadAdminOutput() WorkspaceAadAdminOutput
ToWorkspaceAadAdminOutputWithContext(ctx context.Context) WorkspaceAadAdminOutput
}
func (*WorkspaceAadAdmin) ElementType() reflect.Type {
return reflect.TypeOf((*WorkspaceAadAdmin)(nil))
}
func (i *WorkspaceAadAdmin) ToWorkspaceAadAdminOutput() WorkspaceAadAdminOutput {
return i.ToWorkspaceAadAdminOutputWithContext(context.Background())
}
func (i *WorkspaceAadAdmin) ToWorkspaceAadAdminOutputWithContext(ctx context.Context) WorkspaceAadAdminOutput {
return pulumi.ToOutputWithContext(ctx, i).(WorkspaceAadAdminOutput)
}
type WorkspaceAadAdminOutput struct {
*pulumi.OutputState
}
func (WorkspaceAadAdminOutput) ElementType() reflect.Type {
return reflect.TypeOf((*WorkspaceAadAdmin)(nil))
}
func (o WorkspaceAadAdminOutput) ToWorkspaceAadAdminOutput() WorkspaceAadAdminOutput {
return o
}
func (o WorkspaceAadAdminOutput) ToWorkspaceAadAdminOutputWithContext(ctx context.Context) WorkspaceAadAdminOutput {
return o
}
func init() {
pulumi.RegisterOutputType(WorkspaceAadAdminOutput{})
}
| NewWorkspaceAadAdmin |
node_lifecycle_controller.go | /*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// The Controller sets tainted annotations on nodes.
// Tainted nodes should not be used for new work loads and
// some effort should be given to getting existing work
// loads off of tainted nodes.
package nodelifecycle
import (
"fmt"
"sync"
"time"
"k8s.io/klog"
coordv1beta1 "k8s.io/api/coordination/v1beta1"
"k8s.io/api/core/v1"
apiequality "k8s.io/apimachinery/pkg/api/equality"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/wait"
utilfeature "k8s.io/apiserver/pkg/util/feature"
appsv1informers "k8s.io/client-go/informers/apps/v1"
coordinformers "k8s.io/client-go/informers/coordination/v1beta1"
coreinformers "k8s.io/client-go/informers/core/v1"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/client-go/kubernetes/scheme"
v1core "k8s.io/client-go/kubernetes/typed/core/v1"
appsv1listers "k8s.io/client-go/listers/apps/v1"
coordlisters "k8s.io/client-go/listers/coordination/v1beta1"
corelisters "k8s.io/client-go/listers/core/v1"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/tools/record"
"k8s.io/client-go/util/flowcontrol"
"k8s.io/client-go/util/workqueue"
"k8s.io/kubernetes/pkg/controller"
"k8s.io/kubernetes/pkg/controller/nodelifecycle/scheduler"
nodeutil "k8s.io/kubernetes/pkg/controller/util/node"
"k8s.io/kubernetes/pkg/features"
kubeletapis "k8s.io/kubernetes/pkg/kubelet/apis"
schedulerapi "k8s.io/kubernetes/pkg/scheduler/api"
"k8s.io/kubernetes/pkg/util/metrics"
utilnode "k8s.io/kubernetes/pkg/util/node"
"k8s.io/kubernetes/pkg/util/system"
taintutils "k8s.io/kubernetes/pkg/util/taints"
)
func | () {
// Register prometheus metrics
Register()
}
var (
// UnreachableTaintTemplate is the taint for when a node becomes unreachable.
UnreachableTaintTemplate = &v1.Taint{
Key: schedulerapi.TaintNodeUnreachable,
Effect: v1.TaintEffectNoExecute,
}
// NotReadyTaintTemplate is the taint for when a node is not ready for
// executing pods
NotReadyTaintTemplate = &v1.Taint{
Key: schedulerapi.TaintNodeNotReady,
Effect: v1.TaintEffectNoExecute,
}
// map {NodeConditionType: {ConditionStatus: TaintKey}}
// represents which NodeConditionType under which ConditionStatus should be
// tainted with which TaintKey
// for certain NodeConditionType, there are multiple {ConditionStatus,TaintKey} pairs
nodeConditionToTaintKeyStatusMap = map[v1.NodeConditionType]map[v1.ConditionStatus]string{
v1.NodeReady: {
v1.ConditionFalse: schedulerapi.TaintNodeNotReady,
v1.ConditionUnknown: schedulerapi.TaintNodeUnreachable,
},
v1.NodeMemoryPressure: {
v1.ConditionTrue: schedulerapi.TaintNodeMemoryPressure,
},
v1.NodeDiskPressure: {
v1.ConditionTrue: schedulerapi.TaintNodeDiskPressure,
},
v1.NodeNetworkUnavailable: {
v1.ConditionTrue: schedulerapi.TaintNodeNetworkUnavailable,
},
v1.NodePIDPressure: {
v1.ConditionTrue: schedulerapi.TaintNodePIDPressure,
},
}
taintKeyToNodeConditionMap = map[string]v1.NodeConditionType{
schedulerapi.TaintNodeNotReady: v1.NodeReady,
schedulerapi.TaintNodeUnreachable: v1.NodeReady,
schedulerapi.TaintNodeNetworkUnavailable: v1.NodeNetworkUnavailable,
schedulerapi.TaintNodeMemoryPressure: v1.NodeMemoryPressure,
schedulerapi.TaintNodeDiskPressure: v1.NodeDiskPressure,
schedulerapi.TaintNodePIDPressure: v1.NodePIDPressure,
}
)
// ZoneState is the state of a given zone.
type ZoneState string
const (
stateInitial = ZoneState("Initial")
stateNormal = ZoneState("Normal")
stateFullDisruption = ZoneState("FullDisruption")
statePartialDisruption = ZoneState("PartialDisruption")
)
const (
// The amount of time the nodecontroller should sleep between retrying node health updates
retrySleepTime = 20 * time.Millisecond
nodeNameKeyIndex = "spec.nodeName"
)
// labelReconcileInfo lists Node labels to reconcile, and how to reconcile them.
// primaryKey and secondaryKey are keys of labels to reconcile.
// - If both keys exist, but their values don't match. Use the value from the
// primaryKey as the source of truth to reconcile.
// - If ensureSecondaryExists is true, and the secondaryKey does not
// exist, secondaryKey will be added with the value of the primaryKey.
var labelReconcileInfo = []struct {
primaryKey string
secondaryKey string
ensureSecondaryExists bool
}{
{
// Reconcile the beta and the stable OS label using the beta label as
// the source of truth.
// TODO(#73084): switch to using the stable label as the source of
// truth in v1.18.
primaryKey: kubeletapis.LabelOS,
secondaryKey: v1.LabelOSStable,
ensureSecondaryExists: true,
},
{
// Reconcile the beta and the stable arch label using the beta label as
// the source of truth.
// TODO(#73084): switch to using the stable label as the source of
// truth in v1.18.
primaryKey: kubeletapis.LabelArch,
secondaryKey: v1.LabelArchStable,
ensureSecondaryExists: true,
},
}
type nodeHealthData struct {
probeTimestamp metav1.Time
readyTransitionTimestamp metav1.Time
status *v1.NodeStatus
lease *coordv1beta1.Lease
}
// Controller is the controller that manages node's life cycle.
type Controller struct {
taintManager *scheduler.NoExecuteTaintManager
podInformerSynced cache.InformerSynced
kubeClient clientset.Interface
// This timestamp is to be used instead of LastProbeTime stored in Condition. We do this
// to avoid the problem with time skew across the cluster.
now func() metav1.Time
enterPartialDisruptionFunc func(nodeNum int) float32
enterFullDisruptionFunc func(nodeNum int) float32
computeZoneStateFunc func(nodeConditions []*v1.NodeCondition) (int, ZoneState)
knownNodeSet map[string]*v1.Node
// per Node map storing last observed health together with a local time when it was observed.
nodeHealthMap map[string]*nodeHealthData
// Lock to access evictor workers
evictorLock sync.Mutex
// workers that evicts pods from unresponsive nodes.
zonePodEvictor map[string]*scheduler.RateLimitedTimedQueue
// workers that are responsible for tainting nodes.
zoneNoExecuteTainter map[string]*scheduler.RateLimitedTimedQueue
zoneStates map[string]ZoneState
daemonSetStore appsv1listers.DaemonSetLister
daemonSetInformerSynced cache.InformerSynced
leaseLister coordlisters.LeaseLister
leaseInformerSynced cache.InformerSynced
nodeLister corelisters.NodeLister
nodeInformerSynced cache.InformerSynced
recorder record.EventRecorder
// Value controlling Controller monitoring period, i.e. how often does Controller
// check node health signal posted from kubelet. This value should be lower than
// nodeMonitorGracePeriod.
// TODO: Change node health monitor to watch based.
nodeMonitorPeriod time.Duration
// When node is just created, e.g. cluster bootstrap or node creation, we give
// a longer grace period.
nodeStartupGracePeriod time.Duration
// Controller will not proactively sync node health, but will monitor node
// health signal updated from kubelet. There are 2 kinds of node healthiness
// signals: NodeStatus and NodeLease. NodeLease signal is generated only when
// NodeLease feature is enabled. If it doesn't receive update for this amount
// of time, it will start posting "NodeReady==ConditionUnknown". The amount of
// time before which Controller start evicting pods is controlled via flag
// 'pod-eviction-timeout'.
// Note: be cautious when changing the constant, it must work with
// nodeStatusUpdateFrequency in kubelet and renewInterval in NodeLease
// controller. The node health signal update frequency is the minimal of the
// two.
// There are several constraints:
// 1. nodeMonitorGracePeriod must be N times more than the node health signal
// update frequency, where N means number of retries allowed for kubelet to
// post node status/lease. It is pointless to make nodeMonitorGracePeriod
// be less than the node health signal update frequency, since there will
// only be fresh values from Kubelet at an interval of node health signal
// update frequency. The constant must be less than podEvictionTimeout.
// 2. nodeMonitorGracePeriod can't be too large for user experience - larger
// value takes longer for user to see up-to-date node health.
nodeMonitorGracePeriod time.Duration
podEvictionTimeout time.Duration
evictionLimiterQPS float32
secondaryEvictionLimiterQPS float32
largeClusterThreshold int32
unhealthyZoneThreshold float32
// if set to true Controller will start TaintManager that will evict Pods from
// tainted nodes, if they're not tolerated.
runTaintManager bool
// if set to true Controller will taint Nodes with 'TaintNodeNotReady' and 'TaintNodeUnreachable'
// taints instead of evicting Pods itself.
useTaintBasedEvictions bool
// if set to true, NodeController will taint Nodes based on its condition for 'NetworkUnavailable',
// 'MemoryPressure', 'PIDPressure' and 'DiskPressure'.
taintNodeByCondition bool
nodeUpdateQueue workqueue.Interface
}
// NewNodeLifecycleController returns a new taint controller.
func NewNodeLifecycleController(
leaseInformer coordinformers.LeaseInformer,
podInformer coreinformers.PodInformer,
nodeInformer coreinformers.NodeInformer,
daemonSetInformer appsv1informers.DaemonSetInformer,
kubeClient clientset.Interface,
nodeMonitorPeriod time.Duration,
nodeStartupGracePeriod time.Duration,
nodeMonitorGracePeriod time.Duration,
podEvictionTimeout time.Duration,
evictionLimiterQPS float32,
secondaryEvictionLimiterQPS float32,
largeClusterThreshold int32,
unhealthyZoneThreshold float32,
runTaintManager bool,
useTaintBasedEvictions bool,
taintNodeByCondition bool) (*Controller, error) {
if kubeClient == nil {
klog.Fatalf("kubeClient is nil when starting Controller")
}
eventBroadcaster := record.NewBroadcaster()
recorder := eventBroadcaster.NewRecorder(scheme.Scheme, v1.EventSource{Component: "node-controller"})
eventBroadcaster.StartLogging(klog.Infof)
klog.Infof("Sending events to api server.")
eventBroadcaster.StartRecordingToSink(
&v1core.EventSinkImpl{
Interface: v1core.New(kubeClient.CoreV1().RESTClient()).Events(""),
})
if kubeClient.CoreV1().RESTClient().GetRateLimiter() != nil {
metrics.RegisterMetricAndTrackRateLimiterUsage("node_lifecycle_controller", kubeClient.CoreV1().RESTClient().GetRateLimiter())
}
nc := &Controller{
kubeClient: kubeClient,
now: metav1.Now,
knownNodeSet: make(map[string]*v1.Node),
nodeHealthMap: make(map[string]*nodeHealthData),
recorder: recorder,
nodeMonitorPeriod: nodeMonitorPeriod,
nodeStartupGracePeriod: nodeStartupGracePeriod,
nodeMonitorGracePeriod: nodeMonitorGracePeriod,
zonePodEvictor: make(map[string]*scheduler.RateLimitedTimedQueue),
zoneNoExecuteTainter: make(map[string]*scheduler.RateLimitedTimedQueue),
zoneStates: make(map[string]ZoneState),
podEvictionTimeout: podEvictionTimeout,
evictionLimiterQPS: evictionLimiterQPS,
secondaryEvictionLimiterQPS: secondaryEvictionLimiterQPS,
largeClusterThreshold: largeClusterThreshold,
unhealthyZoneThreshold: unhealthyZoneThreshold,
runTaintManager: runTaintManager,
useTaintBasedEvictions: useTaintBasedEvictions && runTaintManager,
taintNodeByCondition: taintNodeByCondition,
nodeUpdateQueue: workqueue.NewNamed("node_lifecycle_controller"),
}
if useTaintBasedEvictions {
klog.Infof("Controller is using taint based evictions.")
}
nc.enterPartialDisruptionFunc = nc.ReducedQPSFunc
nc.enterFullDisruptionFunc = nc.HealthyQPSFunc
nc.computeZoneStateFunc = nc.ComputeZoneState
podInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
pod := obj.(*v1.Pod)
if nc.taintManager != nil {
nc.taintManager.PodUpdated(nil, pod)
}
},
UpdateFunc: func(prev, obj interface{}) {
prevPod := prev.(*v1.Pod)
newPod := obj.(*v1.Pod)
if nc.taintManager != nil {
nc.taintManager.PodUpdated(prevPod, newPod)
}
},
DeleteFunc: func(obj interface{}) {
pod, isPod := obj.(*v1.Pod)
// We can get DeletedFinalStateUnknown instead of *v1.Pod here and we need to handle that correctly.
if !isPod {
deletedState, ok := obj.(cache.DeletedFinalStateUnknown)
if !ok {
klog.Errorf("Received unexpected object: %v", obj)
return
}
pod, ok = deletedState.Obj.(*v1.Pod)
if !ok {
klog.Errorf("DeletedFinalStateUnknown contained non-Pod object: %v", deletedState.Obj)
return
}
}
if nc.taintManager != nil {
nc.taintManager.PodUpdated(pod, nil)
}
},
})
nc.podInformerSynced = podInformer.Informer().HasSynced
if nc.runTaintManager {
podInformer.Informer().AddIndexers(cache.Indexers{
nodeNameKeyIndex: func(obj interface{}) ([]string, error) {
pod, ok := obj.(*v1.Pod)
if !ok {
return []string{}, nil
}
if len(pod.Spec.NodeName) == 0 {
return []string{}, nil
}
return []string{pod.Spec.NodeName}, nil
},
})
podIndexer := podInformer.Informer().GetIndexer()
podLister := podInformer.Lister()
podGetter := func(name, namespace string) (*v1.Pod, error) { return podLister.Pods(namespace).Get(name) }
podByNodeNameLister := func(nodeName string) ([]v1.Pod, error) {
objs, err := podIndexer.ByIndex(nodeNameKeyIndex, nodeName)
if err != nil {
return nil, err
}
pods := make([]v1.Pod, 0, len(objs))
for _, obj := range objs {
pod, ok := obj.(*v1.Pod)
if !ok {
continue
}
pods = append(pods, *pod)
}
return pods, nil
}
nodeLister := nodeInformer.Lister()
nodeGetter := func(name string) (*v1.Node, error) { return nodeLister.Get(name) }
nc.taintManager = scheduler.NewNoExecuteTaintManager(kubeClient, podGetter, nodeGetter, podByNodeNameLister)
nodeInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{
AddFunc: nodeutil.CreateAddNodeHandler(func(node *v1.Node) error {
nc.taintManager.NodeUpdated(nil, node)
return nil
}),
UpdateFunc: nodeutil.CreateUpdateNodeHandler(func(oldNode, newNode *v1.Node) error {
nc.taintManager.NodeUpdated(oldNode, newNode)
return nil
}),
DeleteFunc: nodeutil.CreateDeleteNodeHandler(func(node *v1.Node) error {
nc.taintManager.NodeUpdated(node, nil)
return nil
}),
})
}
klog.Infof("Controller will reconcile labels.")
nodeInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{
AddFunc: nodeutil.CreateAddNodeHandler(func(node *v1.Node) error {
nc.nodeUpdateQueue.Add(node.Name)
return nil
}),
UpdateFunc: nodeutil.CreateUpdateNodeHandler(func(_, newNode *v1.Node) error {
nc.nodeUpdateQueue.Add(newNode.Name)
return nil
}),
})
if nc.taintNodeByCondition {
klog.Infof("Controller will taint node by condition.")
}
nc.leaseLister = leaseInformer.Lister()
if utilfeature.DefaultFeatureGate.Enabled(features.NodeLease) {
nc.leaseInformerSynced = leaseInformer.Informer().HasSynced
} else {
// Always indicate that lease is synced to prevent syncing lease.
nc.leaseInformerSynced = func() bool { return true }
}
nc.nodeLister = nodeInformer.Lister()
nc.nodeInformerSynced = nodeInformer.Informer().HasSynced
nc.daemonSetStore = daemonSetInformer.Lister()
nc.daemonSetInformerSynced = daemonSetInformer.Informer().HasSynced
return nc, nil
}
// Run starts an asynchronous loop that monitors the status of cluster nodes.
func (nc *Controller) Run(stopCh <-chan struct{}) {
defer utilruntime.HandleCrash()
klog.Infof("Starting node controller")
defer klog.Infof("Shutting down node controller")
if !controller.WaitForCacheSync("taint", stopCh, nc.leaseInformerSynced, nc.nodeInformerSynced, nc.podInformerSynced, nc.daemonSetInformerSynced) {
return
}
if nc.runTaintManager {
go nc.taintManager.Run(stopCh)
}
// Close node update queue to cleanup go routine.
defer nc.nodeUpdateQueue.ShutDown()
// Start workers to reconcile labels and/or update NoSchedule taint for nodes.
for i := 0; i < scheduler.UpdateWorkerSize; i++ {
// Thanks to "workqueue", each worker just need to get item from queue, because
// the item is flagged when got from queue: if new event come, the new item will
// be re-queued until "Done", so no more than one worker handle the same item and
// no event missed.
go wait.Until(nc.doNodeProcessingPassWorker, time.Second, stopCh)
}
if nc.useTaintBasedEvictions {
// Handling taint based evictions. Because we don't want a dedicated logic in TaintManager for NC-originated
// taints and we normally don't rate limit evictions caused by taints, we need to rate limit adding taints.
go wait.Until(nc.doNoExecuteTaintingPass, scheduler.NodeEvictionPeriod, stopCh)
} else {
// Managing eviction of nodes:
// When we delete pods off a node, if the node was not empty at the time we then
// queue an eviction watcher. If we hit an error, retry deletion.
go wait.Until(nc.doEvictionPass, scheduler.NodeEvictionPeriod, stopCh)
}
// Incorporate the results of node health signal pushed from kubelet to master.
go wait.Until(func() {
if err := nc.monitorNodeHealth(); err != nil {
klog.Errorf("Error monitoring node health: %v", err)
}
}, nc.nodeMonitorPeriod, stopCh)
<-stopCh
}
func (nc *Controller) doNodeProcessingPassWorker() {
for {
obj, shutdown := nc.nodeUpdateQueue.Get()
// "nodeUpdateQueue" will be shutdown when "stopCh" closed;
// we do not need to re-check "stopCh" again.
if shutdown {
return
}
nodeName := obj.(string)
if nc.taintNodeByCondition {
if err := nc.doNoScheduleTaintingPass(nodeName); err != nil {
klog.Errorf("Failed to taint NoSchedule on node <%s>, requeue it: %v", nodeName, err)
// TODO(k82cn): Add nodeName back to the queue
}
}
// TODO: re-evaluate whether there are any labels that need to be
// reconcile in 1.19. Remove this function if it's no longer necessary.
if err := nc.reconcileNodeLabels(nodeName); err != nil {
klog.Errorf("Failed to reconcile labels for node <%s>, requeue it: %v", nodeName, err)
// TODO(yujuhong): Add nodeName back to the queue
}
nc.nodeUpdateQueue.Done(nodeName)
}
}
func (nc *Controller) doNoScheduleTaintingPass(nodeName string) error {
node, err := nc.nodeLister.Get(nodeName)
if err != nil {
// If node not found, just ignore it.
if apierrors.IsNotFound(err) {
return nil
}
return err
}
// Map node's condition to Taints.
var taints []v1.Taint
for _, condition := range node.Status.Conditions {
if taintMap, found := nodeConditionToTaintKeyStatusMap[condition.Type]; found {
if taintKey, found := taintMap[condition.Status]; found {
taints = append(taints, v1.Taint{
Key: taintKey,
Effect: v1.TaintEffectNoSchedule,
})
}
}
}
if node.Spec.Unschedulable {
// If unschedulable, append related taint.
taints = append(taints, v1.Taint{
Key: schedulerapi.TaintNodeUnschedulable,
Effect: v1.TaintEffectNoSchedule,
})
}
// Get exist taints of node.
nodeTaints := taintutils.TaintSetFilter(node.Spec.Taints, func(t *v1.Taint) bool {
// only NoSchedule taints are candidates to be compared with "taints" later
if t.Effect != v1.TaintEffectNoSchedule {
return false
}
// Find unschedulable taint of node.
if t.Key == schedulerapi.TaintNodeUnschedulable {
return true
}
// Find node condition taints of node.
_, found := taintKeyToNodeConditionMap[t.Key]
return found
})
taintsToAdd, taintsToDel := taintutils.TaintSetDiff(taints, nodeTaints)
// If nothing to add not delete, return true directly.
if len(taintsToAdd) == 0 && len(taintsToDel) == 0 {
return nil
}
if !nodeutil.SwapNodeControllerTaint(nc.kubeClient, taintsToAdd, taintsToDel, node) {
return fmt.Errorf("failed to swap taints of node %+v", node)
}
return nil
}
func (nc *Controller) doNoExecuteTaintingPass() {
nc.evictorLock.Lock()
defer nc.evictorLock.Unlock()
for k := range nc.zoneNoExecuteTainter {
// Function should return 'false' and a time after which it should be retried, or 'true' if it shouldn't (it succeeded).
nc.zoneNoExecuteTainter[k].Try(func(value scheduler.TimedValue) (bool, time.Duration) {
node, err := nc.nodeLister.Get(value.Value)
if apierrors.IsNotFound(err) {
klog.Warningf("Node %v no longer present in nodeLister!", value.Value)
return true, 0
} else if err != nil {
klog.Warningf("Failed to get Node %v from the nodeLister: %v", value.Value, err)
// retry in 50 millisecond
return false, 50 * time.Millisecond
}
_, condition := nodeutil.GetNodeCondition(&node.Status, v1.NodeReady)
// Because we want to mimic NodeStatus.Condition["Ready"] we make "unreachable" and "not ready" taints mutually exclusive.
taintToAdd := v1.Taint{}
oppositeTaint := v1.Taint{}
switch condition.Status {
case v1.ConditionFalse:
taintToAdd = *NotReadyTaintTemplate
oppositeTaint = *UnreachableTaintTemplate
case v1.ConditionUnknown:
taintToAdd = *UnreachableTaintTemplate
oppositeTaint = *NotReadyTaintTemplate
default:
// It seems that the Node is ready again, so there's no need to taint it.
klog.V(4).Infof("Node %v was in a taint queue, but it's ready now. Ignoring taint request.", value.Value)
return true, 0
}
result := nodeutil.SwapNodeControllerTaint(nc.kubeClient, []*v1.Taint{&taintToAdd}, []*v1.Taint{&oppositeTaint}, node)
if result {
//count the evictionsNumber
zone := utilnode.GetZoneKey(node)
evictionsNumber.WithLabelValues(zone).Inc()
}
return result, 0
})
}
}
func (nc *Controller) doEvictionPass() {
nc.evictorLock.Lock()
defer nc.evictorLock.Unlock()
for k := range nc.zonePodEvictor {
// Function should return 'false' and a time after which it should be retried, or 'true' if it shouldn't (it succeeded).
nc.zonePodEvictor[k].Try(func(value scheduler.TimedValue) (bool, time.Duration) {
node, err := nc.nodeLister.Get(value.Value)
if apierrors.IsNotFound(err) {
klog.Warningf("Node %v no longer present in nodeLister!", value.Value)
} else if err != nil {
klog.Warningf("Failed to get Node %v from the nodeLister: %v", value.Value, err)
}
nodeUID, _ := value.UID.(string)
remaining, err := nodeutil.DeletePods(nc.kubeClient, nc.recorder, value.Value, nodeUID, nc.daemonSetStore)
if err != nil {
utilruntime.HandleError(fmt.Errorf("unable to evict node %q: %v", value.Value, err))
return false, 0
}
if remaining {
klog.Infof("Pods awaiting deletion due to Controller eviction")
}
//count the evictionsNumber
if node != nil {
zone := utilnode.GetZoneKey(node)
evictionsNumber.WithLabelValues(zone).Inc()
}
return true, 0
})
}
}
// monitorNodeHealth verifies node health are constantly updated by kubelet, and
// if not, post "NodeReady==ConditionUnknown".
// For nodes who are not ready or not reachable for a long period of time.
// This function will taint them if TaintBasedEvictions feature was enabled.
// Otherwise, it would evict it directly.
func (nc *Controller) monitorNodeHealth() error {
// We are listing nodes from local cache as we can tolerate some small delays
// comparing to state from etcd and there is eventual consistency anyway.
nodes, err := nc.nodeLister.List(labels.Everything())
if err != nil {
return err
}
added, deleted, newZoneRepresentatives := nc.classifyNodes(nodes)
for i := range newZoneRepresentatives {
nc.addPodEvictorForNewZone(newZoneRepresentatives[i])
}
for i := range added {
klog.V(1).Infof("Controller observed a new Node: %#v", added[i].Name)
nodeutil.RecordNodeEvent(nc.recorder, added[i].Name, string(added[i].UID), v1.EventTypeNormal, "RegisteredNode", fmt.Sprintf("Registered Node %v in Controller", added[i].Name))
nc.knownNodeSet[added[i].Name] = added[i]
nc.addPodEvictorForNewZone(added[i])
if nc.useTaintBasedEvictions {
nc.markNodeAsReachable(added[i])
} else {
nc.cancelPodEviction(added[i])
}
}
for i := range deleted {
klog.V(1).Infof("Controller observed a Node deletion: %v", deleted[i].Name)
nodeutil.RecordNodeEvent(nc.recorder, deleted[i].Name, string(deleted[i].UID), v1.EventTypeNormal, "RemovingNode", fmt.Sprintf("Removing Node %v from Controller", deleted[i].Name))
delete(nc.knownNodeSet, deleted[i].Name)
}
zoneToNodeConditions := map[string][]*v1.NodeCondition{}
for i := range nodes {
var gracePeriod time.Duration
var observedReadyCondition v1.NodeCondition
var currentReadyCondition *v1.NodeCondition
node := nodes[i].DeepCopy()
if err := wait.PollImmediate(retrySleepTime, retrySleepTime*scheduler.NodeHealthUpdateRetry, func() (bool, error) {
gracePeriod, observedReadyCondition, currentReadyCondition, err = nc.tryUpdateNodeHealth(node)
if err == nil {
return true, nil
}
name := node.Name
node, err = nc.kubeClient.CoreV1().Nodes().Get(name, metav1.GetOptions{})
if err != nil {
klog.Errorf("Failed while getting a Node to retry updating node health. Probably Node %s was deleted.", name)
return false, err
}
return false, nil
}); err != nil {
klog.Errorf("Update health of Node '%v' from Controller error: %v. "+
"Skipping - no pods will be evicted.", node.Name, err)
continue
}
// We do not treat a master node as a part of the cluster for network disruption checking.
if !system.IsMasterNode(node.Name) {
zoneToNodeConditions[utilnode.GetZoneKey(node)] = append(zoneToNodeConditions[utilnode.GetZoneKey(node)], currentReadyCondition)
}
decisionTimestamp := nc.now()
if currentReadyCondition != nil {
// Check eviction timeout against decisionTimestamp
switch observedReadyCondition.Status {
case v1.ConditionFalse:
if nc.useTaintBasedEvictions {
// We want to update the taint straight away if Node is already tainted with the UnreachableTaint
if taintutils.TaintExists(node.Spec.Taints, UnreachableTaintTemplate) {
taintToAdd := *NotReadyTaintTemplate
if !nodeutil.SwapNodeControllerTaint(nc.kubeClient, []*v1.Taint{&taintToAdd}, []*v1.Taint{UnreachableTaintTemplate}, node) {
klog.Errorf("Failed to instantly swap UnreachableTaint to NotReadyTaint. Will try again in the next cycle.")
}
} else if nc.markNodeForTainting(node) {
klog.V(2).Infof("Node %v is NotReady as of %v. Adding it to the Taint queue.",
node.Name,
decisionTimestamp,
)
}
} else {
if decisionTimestamp.After(nc.nodeHealthMap[node.Name].readyTransitionTimestamp.Add(nc.podEvictionTimeout)) {
if nc.evictPods(node) {
klog.V(2).Infof("Node is NotReady. Adding Pods on Node %s to eviction queue: %v is later than %v + %v",
node.Name,
decisionTimestamp,
nc.nodeHealthMap[node.Name].readyTransitionTimestamp,
nc.podEvictionTimeout,
)
}
}
}
case v1.ConditionUnknown:
if nc.useTaintBasedEvictions {
// We want to update the taint straight away if Node is already tainted with the UnreachableTaint
if taintutils.TaintExists(node.Spec.Taints, NotReadyTaintTemplate) {
taintToAdd := *UnreachableTaintTemplate
if !nodeutil.SwapNodeControllerTaint(nc.kubeClient, []*v1.Taint{&taintToAdd}, []*v1.Taint{NotReadyTaintTemplate}, node) {
klog.Errorf("Failed to instantly swap UnreachableTaint to NotReadyTaint. Will try again in the next cycle.")
}
} else if nc.markNodeForTainting(node) {
klog.V(2).Infof("Node %v is unresponsive as of %v. Adding it to the Taint queue.",
node.Name,
decisionTimestamp,
)
}
} else {
if decisionTimestamp.After(nc.nodeHealthMap[node.Name].probeTimestamp.Add(nc.podEvictionTimeout)) {
if nc.evictPods(node) {
klog.V(2).Infof("Node is unresponsive. Adding Pods on Node %s to eviction queues: %v is later than %v + %v",
node.Name,
decisionTimestamp,
nc.nodeHealthMap[node.Name].readyTransitionTimestamp,
nc.podEvictionTimeout-gracePeriod,
)
}
}
}
case v1.ConditionTrue:
if nc.useTaintBasedEvictions {
removed, err := nc.markNodeAsReachable(node)
if err != nil {
klog.Errorf("Failed to remove taints from node %v. Will retry in next iteration.", node.Name)
}
if removed {
klog.V(2).Infof("Node %s is healthy again, removing all taints", node.Name)
}
} else {
if nc.cancelPodEviction(node) {
klog.V(2).Infof("Node %s is ready again, cancelled pod eviction", node.Name)
}
}
}
// Report node event.
if currentReadyCondition.Status != v1.ConditionTrue && observedReadyCondition.Status == v1.ConditionTrue {
nodeutil.RecordNodeStatusChange(nc.recorder, node, "NodeNotReady")
if err = nodeutil.MarkAllPodsNotReady(nc.kubeClient, node); err != nil {
utilruntime.HandleError(fmt.Errorf("Unable to mark all pods NotReady on node %v: %v", node.Name, err))
}
}
}
}
nc.handleDisruption(zoneToNodeConditions, nodes)
return nil
}
// tryUpdateNodeHealth checks a given node's conditions and tries to update it. Returns grace period to
// which given node is entitled, state of current and last observed Ready Condition, and an error if it occurred.
func (nc *Controller) tryUpdateNodeHealth(node *v1.Node) (time.Duration, v1.NodeCondition, *v1.NodeCondition, error) {
var gracePeriod time.Duration
var observedReadyCondition v1.NodeCondition
_, currentReadyCondition := nodeutil.GetNodeCondition(&node.Status, v1.NodeReady)
if currentReadyCondition == nil {
// If ready condition is nil, then kubelet (or nodecontroller) never posted node status.
// A fake ready condition is created, where LastHeartbeatTime and LastTransitionTime is set
// to node.CreationTimestamp to avoid handle the corner case.
observedReadyCondition = v1.NodeCondition{
Type: v1.NodeReady,
Status: v1.ConditionUnknown,
LastHeartbeatTime: node.CreationTimestamp,
LastTransitionTime: node.CreationTimestamp,
}
gracePeriod = nc.nodeStartupGracePeriod
if _, found := nc.nodeHealthMap[node.Name]; found {
nc.nodeHealthMap[node.Name].status = &node.Status
} else {
nc.nodeHealthMap[node.Name] = &nodeHealthData{
status: &node.Status,
probeTimestamp: node.CreationTimestamp,
readyTransitionTimestamp: node.CreationTimestamp,
}
}
} else {
// If ready condition is not nil, make a copy of it, since we may modify it in place later.
observedReadyCondition = *currentReadyCondition
gracePeriod = nc.nodeMonitorGracePeriod
}
savedNodeHealth, found := nc.nodeHealthMap[node.Name]
// There are following cases to check:
// - both saved and new status have no Ready Condition set - we leave everything as it is,
// - saved status have no Ready Condition, but current one does - Controller was restarted with Node data already present in etcd,
// - saved status have some Ready Condition, but current one does not - it's an error, but we fill it up because that's probably a good thing to do,
// - both saved and current statuses have Ready Conditions and they have the same LastProbeTime - nothing happened on that Node, it may be
// unresponsive, so we leave it as it is,
// - both saved and current statuses have Ready Conditions, they have different LastProbeTimes, but the same Ready Condition State -
// everything's in order, no transition occurred, we update only probeTimestamp,
// - both saved and current statuses have Ready Conditions, different LastProbeTimes and different Ready Condition State -
// Ready Condition changed it state since we last seen it, so we update both probeTimestamp and readyTransitionTimestamp.
// TODO: things to consider:
// - if 'LastProbeTime' have gone back in time its probably an error, currently we ignore it,
// - currently only correct Ready State transition outside of Node Controller is marking it ready by Kubelet, we don't check
// if that's the case, but it does not seem necessary.
var savedCondition *v1.NodeCondition
var savedLease *coordv1beta1.Lease
if found {
_, savedCondition = nodeutil.GetNodeCondition(savedNodeHealth.status, v1.NodeReady)
savedLease = savedNodeHealth.lease
}
if !found {
klog.Warningf("Missing timestamp for Node %s. Assuming now as a timestamp.", node.Name)
savedNodeHealth = &nodeHealthData{
status: &node.Status,
probeTimestamp: nc.now(),
readyTransitionTimestamp: nc.now(),
}
} else if savedCondition == nil && currentReadyCondition != nil {
klog.V(1).Infof("Creating timestamp entry for newly observed Node %s", node.Name)
savedNodeHealth = &nodeHealthData{
status: &node.Status,
probeTimestamp: nc.now(),
readyTransitionTimestamp: nc.now(),
}
} else if savedCondition != nil && currentReadyCondition == nil {
klog.Errorf("ReadyCondition was removed from Status of Node %s", node.Name)
// TODO: figure out what to do in this case. For now we do the same thing as above.
savedNodeHealth = &nodeHealthData{
status: &node.Status,
probeTimestamp: nc.now(),
readyTransitionTimestamp: nc.now(),
}
} else if savedCondition != nil && currentReadyCondition != nil && savedCondition.LastHeartbeatTime != currentReadyCondition.LastHeartbeatTime {
var transitionTime metav1.Time
// If ReadyCondition changed since the last time we checked, we update the transition timestamp to "now",
// otherwise we leave it as it is.
if savedCondition.LastTransitionTime != currentReadyCondition.LastTransitionTime {
klog.V(3).Infof("ReadyCondition for Node %s transitioned from %v to %v", node.Name, savedCondition, currentReadyCondition)
transitionTime = nc.now()
} else {
transitionTime = savedNodeHealth.readyTransitionTimestamp
}
if klog.V(5) {
klog.Infof("Node %s ReadyCondition updated. Updating timestamp: %+v vs %+v.", node.Name, savedNodeHealth.status, node.Status)
} else {
klog.V(3).Infof("Node %s ReadyCondition updated. Updating timestamp.", node.Name)
}
savedNodeHealth = &nodeHealthData{
status: &node.Status,
probeTimestamp: nc.now(),
readyTransitionTimestamp: transitionTime,
}
}
var observedLease *coordv1beta1.Lease
if utilfeature.DefaultFeatureGate.Enabled(features.NodeLease) {
// Always update the probe time if node lease is renewed.
// Note: If kubelet never posted the node status, but continues renewing the
// heartbeat leases, the node controller will assume the node is healthy and
// take no action.
observedLease, _ = nc.leaseLister.Leases(v1.NamespaceNodeLease).Get(node.Name)
if observedLease != nil && (savedLease == nil || savedLease.Spec.RenewTime.Before(observedLease.Spec.RenewTime)) {
savedNodeHealth.lease = observedLease
savedNodeHealth.probeTimestamp = nc.now()
}
}
nc.nodeHealthMap[node.Name] = savedNodeHealth
if nc.now().After(savedNodeHealth.probeTimestamp.Add(gracePeriod)) {
// NodeReady condition or lease was last set longer ago than gracePeriod, so
// update it to Unknown (regardless of its current value) in the master.
nodeConditionTypes := []v1.NodeConditionType{
v1.NodeReady,
v1.NodeMemoryPressure,
v1.NodeDiskPressure,
v1.NodePIDPressure,
// We don't change 'NodeNetworkUnavailable' condition, as it's managed on a control plane level.
// v1.NodeNetworkUnavailable,
}
nowTimestamp := nc.now()
for _, nodeConditionType := range nodeConditionTypes {
_, currentCondition := nodeutil.GetNodeCondition(&node.Status, nodeConditionType)
if currentCondition == nil {
klog.V(2).Infof("Condition %v of node %v was never updated by kubelet", nodeConditionType, node.Name)
node.Status.Conditions = append(node.Status.Conditions, v1.NodeCondition{
Type: nodeConditionType,
Status: v1.ConditionUnknown,
Reason: "NodeStatusNeverUpdated",
Message: "Kubelet never posted node status.",
LastHeartbeatTime: node.CreationTimestamp,
LastTransitionTime: nowTimestamp,
})
} else {
klog.V(4).Infof("node %v hasn't been updated for %+v. Last %v is: %+v",
node.Name, nc.now().Time.Sub(savedNodeHealth.probeTimestamp.Time), nodeConditionType, currentCondition)
if currentCondition.Status != v1.ConditionUnknown {
currentCondition.Status = v1.ConditionUnknown
currentCondition.Reason = "NodeStatusUnknown"
currentCondition.Message = "Kubelet stopped posting node status."
currentCondition.LastTransitionTime = nowTimestamp
}
}
}
// We need to update currentReadyCondition due to its value potentially changed.
_, currentReadyCondition = nodeutil.GetNodeCondition(&node.Status, v1.NodeReady)
if !apiequality.Semantic.DeepEqual(currentReadyCondition, &observedReadyCondition) {
if _, err := nc.kubeClient.CoreV1().Nodes().UpdateStatus(node); err != nil {
klog.Errorf("Error updating node %s: %v", node.Name, err)
return gracePeriod, observedReadyCondition, currentReadyCondition, err
}
nc.nodeHealthMap[node.Name] = &nodeHealthData{
status: &node.Status,
probeTimestamp: nc.nodeHealthMap[node.Name].probeTimestamp,
readyTransitionTimestamp: nc.now(),
lease: observedLease,
}
return gracePeriod, observedReadyCondition, currentReadyCondition, nil
}
}
return gracePeriod, observedReadyCondition, currentReadyCondition, nil
}
func (nc *Controller) handleDisruption(zoneToNodeConditions map[string][]*v1.NodeCondition, nodes []*v1.Node) {
newZoneStates := map[string]ZoneState{}
allAreFullyDisrupted := true
for k, v := range zoneToNodeConditions {
zoneSize.WithLabelValues(k).Set(float64(len(v)))
unhealthy, newState := nc.computeZoneStateFunc(v)
zoneHealth.WithLabelValues(k).Set(float64(100*(len(v)-unhealthy)) / float64(len(v)))
unhealthyNodes.WithLabelValues(k).Set(float64(unhealthy))
if newState != stateFullDisruption {
allAreFullyDisrupted = false
}
newZoneStates[k] = newState
if _, had := nc.zoneStates[k]; !had {
klog.Errorf("Setting initial state for unseen zone: %v", k)
nc.zoneStates[k] = stateInitial
}
}
allWasFullyDisrupted := true
for k, v := range nc.zoneStates {
if _, have := zoneToNodeConditions[k]; !have {
zoneSize.WithLabelValues(k).Set(0)
zoneHealth.WithLabelValues(k).Set(100)
unhealthyNodes.WithLabelValues(k).Set(0)
delete(nc.zoneStates, k)
continue
}
if v != stateFullDisruption {
allWasFullyDisrupted = false
break
}
}
// At least one node was responding in previous pass or in the current pass. Semantics is as follows:
// - if the new state is "partialDisruption" we call a user defined function that returns a new limiter to use,
// - if the new state is "normal" we resume normal operation (go back to default limiter settings),
// - if new state is "fullDisruption" we restore normal eviction rate,
// - unless all zones in the cluster are in "fullDisruption" - in that case we stop all evictions.
if !allAreFullyDisrupted || !allWasFullyDisrupted {
// We're switching to full disruption mode
if allAreFullyDisrupted {
klog.V(0).Info("Controller detected that all Nodes are not-Ready. Entering master disruption mode.")
for i := range nodes {
if nc.useTaintBasedEvictions {
_, err := nc.markNodeAsReachable(nodes[i])
if err != nil {
klog.Errorf("Failed to remove taints from Node %v", nodes[i].Name)
}
} else {
nc.cancelPodEviction(nodes[i])
}
}
// We stop all evictions.
for k := range nc.zoneStates {
if nc.useTaintBasedEvictions {
nc.zoneNoExecuteTainter[k].SwapLimiter(0)
} else {
nc.zonePodEvictor[k].SwapLimiter(0)
}
}
for k := range nc.zoneStates {
nc.zoneStates[k] = stateFullDisruption
}
// All rate limiters are updated, so we can return early here.
return
}
// We're exiting full disruption mode
if allWasFullyDisrupted {
klog.V(0).Info("Controller detected that some Nodes are Ready. Exiting master disruption mode.")
// When exiting disruption mode update probe timestamps on all Nodes.
now := nc.now()
for i := range nodes {
v := nc.nodeHealthMap[nodes[i].Name]
v.probeTimestamp = now
v.readyTransitionTimestamp = now
nc.nodeHealthMap[nodes[i].Name] = v
}
// We reset all rate limiters to settings appropriate for the given state.
for k := range nc.zoneStates {
nc.setLimiterInZone(k, len(zoneToNodeConditions[k]), newZoneStates[k])
nc.zoneStates[k] = newZoneStates[k]
}
return
}
// We know that there's at least one not-fully disrupted so,
// we can use default behavior for rate limiters
for k, v := range nc.zoneStates {
newState := newZoneStates[k]
if v == newState {
continue
}
klog.V(0).Infof("Controller detected that zone %v is now in state %v.", k, newState)
nc.setLimiterInZone(k, len(zoneToNodeConditions[k]), newState)
nc.zoneStates[k] = newState
}
}
}
func (nc *Controller) setLimiterInZone(zone string, zoneSize int, state ZoneState) {
switch state {
case stateNormal:
if nc.useTaintBasedEvictions {
nc.zoneNoExecuteTainter[zone].SwapLimiter(nc.evictionLimiterQPS)
} else {
nc.zonePodEvictor[zone].SwapLimiter(nc.evictionLimiterQPS)
}
case statePartialDisruption:
if nc.useTaintBasedEvictions {
nc.zoneNoExecuteTainter[zone].SwapLimiter(
nc.enterPartialDisruptionFunc(zoneSize))
} else {
nc.zonePodEvictor[zone].SwapLimiter(
nc.enterPartialDisruptionFunc(zoneSize))
}
case stateFullDisruption:
if nc.useTaintBasedEvictions {
nc.zoneNoExecuteTainter[zone].SwapLimiter(
nc.enterFullDisruptionFunc(zoneSize))
} else {
nc.zonePodEvictor[zone].SwapLimiter(
nc.enterFullDisruptionFunc(zoneSize))
}
}
}
// classifyNodes classifies the allNodes to three categories:
// 1. added: the nodes that in 'allNodes', but not in 'knownNodeSet'
// 2. deleted: the nodes that in 'knownNodeSet', but not in 'allNodes'
// 3. newZoneRepresentatives: the nodes that in both 'knownNodeSet' and 'allNodes', but no zone states
func (nc *Controller) classifyNodes(allNodes []*v1.Node) (added, deleted, newZoneRepresentatives []*v1.Node) {
for i := range allNodes {
if _, has := nc.knownNodeSet[allNodes[i].Name]; !has {
added = append(added, allNodes[i])
} else {
// Currently, we only consider new zone as updated.
zone := utilnode.GetZoneKey(allNodes[i])
if _, found := nc.zoneStates[zone]; !found {
newZoneRepresentatives = append(newZoneRepresentatives, allNodes[i])
}
}
}
// If there's a difference between lengths of known Nodes and observed nodes
// we must have removed some Node.
if len(nc.knownNodeSet)+len(added) != len(allNodes) {
knowSetCopy := map[string]*v1.Node{}
for k, v := range nc.knownNodeSet {
knowSetCopy[k] = v
}
for i := range allNodes {
delete(knowSetCopy, allNodes[i].Name)
}
for i := range knowSetCopy {
deleted = append(deleted, knowSetCopy[i])
}
}
return
}
// HealthyQPSFunc returns the default value for cluster eviction rate - we take
// nodeNum for consistency with ReducedQPSFunc.
func (nc *Controller) HealthyQPSFunc(nodeNum int) float32 {
return nc.evictionLimiterQPS
}
// ReducedQPSFunc returns the QPS for when a the cluster is large make
// evictions slower, if they're small stop evictions altogether.
func (nc *Controller) ReducedQPSFunc(nodeNum int) float32 {
if int32(nodeNum) > nc.largeClusterThreshold {
return nc.secondaryEvictionLimiterQPS
}
return 0
}
// addPodEvictorForNewZone checks if new zone appeared, and if so add new evictor.
func (nc *Controller) addPodEvictorForNewZone(node *v1.Node) {
nc.evictorLock.Lock()
defer nc.evictorLock.Unlock()
zone := utilnode.GetZoneKey(node)
if _, found := nc.zoneStates[zone]; !found {
nc.zoneStates[zone] = stateInitial
if !nc.useTaintBasedEvictions {
nc.zonePodEvictor[zone] =
scheduler.NewRateLimitedTimedQueue(
flowcontrol.NewTokenBucketRateLimiter(nc.evictionLimiterQPS, scheduler.EvictionRateLimiterBurst))
} else {
nc.zoneNoExecuteTainter[zone] =
scheduler.NewRateLimitedTimedQueue(
flowcontrol.NewTokenBucketRateLimiter(nc.evictionLimiterQPS, scheduler.EvictionRateLimiterBurst))
}
// Init the metric for the new zone.
klog.Infof("Initializing eviction metric for zone: %v", zone)
evictionsNumber.WithLabelValues(zone).Add(0)
}
}
// cancelPodEviction removes any queued evictions, typically because the node is available again. It
// returns true if an eviction was queued.
func (nc *Controller) cancelPodEviction(node *v1.Node) bool {
zone := utilnode.GetZoneKey(node)
nc.evictorLock.Lock()
defer nc.evictorLock.Unlock()
wasDeleting := nc.zonePodEvictor[zone].Remove(node.Name)
if wasDeleting {
klog.V(2).Infof("Cancelling pod Eviction on Node: %v", node.Name)
return true
}
return false
}
// evictPods queues an eviction for the provided node name, and returns false if the node is already
// queued for eviction.
func (nc *Controller) evictPods(node *v1.Node) bool {
nc.evictorLock.Lock()
defer nc.evictorLock.Unlock()
return nc.zonePodEvictor[utilnode.GetZoneKey(node)].Add(node.Name, string(node.UID))
}
func (nc *Controller) markNodeForTainting(node *v1.Node) bool {
nc.evictorLock.Lock()
defer nc.evictorLock.Unlock()
return nc.zoneNoExecuteTainter[utilnode.GetZoneKey(node)].Add(node.Name, string(node.UID))
}
func (nc *Controller) markNodeAsReachable(node *v1.Node) (bool, error) {
nc.evictorLock.Lock()
defer nc.evictorLock.Unlock()
err := controller.RemoveTaintOffNode(nc.kubeClient, node.Name, node, UnreachableTaintTemplate)
if err != nil {
klog.Errorf("Failed to remove taint from node %v: %v", node.Name, err)
return false, err
}
err = controller.RemoveTaintOffNode(nc.kubeClient, node.Name, node, NotReadyTaintTemplate)
if err != nil {
klog.Errorf("Failed to remove taint from node %v: %v", node.Name, err)
return false, err
}
return nc.zoneNoExecuteTainter[utilnode.GetZoneKey(node)].Remove(node.Name), nil
}
// ComputeZoneState returns a slice of NodeReadyConditions for all Nodes in a given zone.
// The zone is considered:
// - fullyDisrupted if there're no Ready Nodes,
// - partiallyDisrupted if at least than nc.unhealthyZoneThreshold percent of Nodes are not Ready,
// - normal otherwise
func (nc *Controller) ComputeZoneState(nodeReadyConditions []*v1.NodeCondition) (int, ZoneState) {
readyNodes := 0
notReadyNodes := 0
for i := range nodeReadyConditions {
if nodeReadyConditions[i] != nil && nodeReadyConditions[i].Status == v1.ConditionTrue {
readyNodes++
} else {
notReadyNodes++
}
}
switch {
case readyNodes == 0 && notReadyNodes > 0:
return notReadyNodes, stateFullDisruption
case notReadyNodes > 2 && float32(notReadyNodes)/float32(notReadyNodes+readyNodes) >= nc.unhealthyZoneThreshold:
return notReadyNodes, statePartialDisruption
default:
return notReadyNodes, stateNormal
}
}
// reconcileNodeLabels reconciles node labels.
func (nc *Controller) reconcileNodeLabels(nodeName string) error {
node, err := nc.nodeLister.Get(nodeName)
if err != nil {
// If node not found, just ignore it.
if apierrors.IsNotFound(err) {
return nil
}
return err
}
if node.Labels == nil {
// Nothing to reconcile.
return nil
}
labelsToUpdate := map[string]string{}
for _, r := range labelReconcileInfo {
primaryValue, primaryExists := node.Labels[r.primaryKey]
secondaryValue, secondaryExists := node.Labels[r.secondaryKey]
if !primaryExists {
// The primary label key does not exist. This should not happen
// within our supported version skew range, when no external
// components/factors modifying the node object. Ignore this case.
continue
}
if secondaryExists && primaryValue != secondaryValue {
// Secondary label exists, but not consistent with the primary
// label. Need to reconcile.
labelsToUpdate[r.secondaryKey] = primaryValue
} else if !secondaryExists && r.ensureSecondaryExists {
// Apply secondary label based on primary label.
labelsToUpdate[r.secondaryKey] = primaryValue
}
}
if len(labelsToUpdate) == 0 {
return nil
}
if !nodeutil.AddOrUpdateLabelsOnNode(nc.kubeClient, labelsToUpdate, node) {
return fmt.Errorf("failed update labels for node %+v", node)
}
return nil
}
| init |
display.py | import threading
try:
from .grab import Image
except:pass
def grab_bytes():
return Image().asbytes
def send(s,a):
s.post(b's'+grab_bytes(),a)
def show_bytes(r): | if not r.startswith('s'):return
Image(r[1:]).show()
def conf(s,a):
def _conf():
while True:
send(s,a)
threading.Thread(target=_conf).start() | |
info.go | // Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// info prints debugging information about the go environment.
// It is used to help examine the execution environment of rules_go
package main
import (
"flag"
"fmt"
"log"
"os"
"os/exec"
)
const endOfHereDoc = "EndOfGoInfoReport"
func invoke(goenv *GoEnv, out *os.File, args []string) error {
cmd := exec.Command(goenv.Go, args...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Env = append(os.Environ(), goenv.Env()...)
if out != nil {
cmd.Stdout = out
cmd.Stderr = out
}
if err := cmd.Run(); err != nil {
return fmt.Errorf("error running %v: %v", args, err)
}
return nil
}
func run(args []string) error {
filename := ""
script := false
flags := flag.NewFlagSet("info", flag.ExitOnError)
flags.BoolVar(&script, "script", script, "Write in script mode")
flags.StringVar(&filename, "out", filename, "The file to write the report to")
goenv := envFlags(flags)
if err := flags.Parse(args); err != nil {
return err
}
f := os.Stderr
if filename != "" {
var err error
f, err = os.Create(filename)
if err != nil {
return fmt.Errorf("Could not create report file: %v", err)
}
defer f.Close()
}
if script {
fmt.Fprintln(f, "cat << "+endOfHereDoc)
}
if err := invoke(goenv, f, []string{"version"}); err != nil {
return err
}
if err := invoke(goenv, f, []string{"env"}); err != nil {
return err
}
if script {
fmt.Fprintln(f, endOfHereDoc)
}
return nil
}
func main() | {
if err := run(os.Args[1:]); err != nil {
log.Fatal(err)
}
} |
|
BinaryTreeNode.d.ts | /**
* @file binary tree node
* @author arlenyang
*/
import BinaryTreeNodeSuperClass from "./BinaryTreeNodeSuperclass";
export default class | extends BinaryTreeNodeSuperClass {
constructor(data: any);
}
| BinaryTreeNode |
test_vnc_load_data.py | #
# Copyright (c) 2018 Juniper Networks, Inc. All rights reserved.
#
import sys
import os
import logging
import json
import test_case
from vnc_api.exceptions import NoIdError, RefsExistError
from vnc_api.gen.resource_client import *
from vnc_api.gen.resource_xsd import *
from vnc_api.utils import obj_type_to_vnc_class
import shutil
sys.path.append("../common/tests")
from time import sleep
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def retry_exc_handler(tries_remaining, exception, delay):
print >> sys.stderr, "Caught '%s', %d tries remaining, sleeping for %s seconds" % (exception, tries_remaining, delay)
def retries(max_tries, delay=5, backoff=1, exceptions=(Exception,),hook=None):
def dec(func):
def f2(*args, **kwargs):
mydelay = delay
tries = range(max_tries)
tries.reverse()
for tries_remaining in tries:
try:
return func(*args, **kwargs)
except exceptions as e:
if tries_remaining > 0:
if hook is not None:
hook(tries_remaining, e, mydelay)
sleep(mydelay)
mydelay = mydelay * backoff
else:
raise
return f2
return dec
#Testing if all the objects in the json file are created. If not, create them.
class TestInitData1(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
super(TestInitData1, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
"../fabric-ansible/ansible-playbooks")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
super(TestInitData1, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
def create_object(self, object, res_type, fq_name):
# Get the class name from object type
vnc_cls = obj_type_to_vnc_class(res_type, __name__)
instance_obj = vnc_cls.from_dict(**object)
try:
if(res_type == "job-template"):
schema_name = fq_name.replace('template', 'schema.json')
with open(os.path.join("../fabric-ansible/ansible-playbooks" +
'/schema/', schema_name),'r+') as schema_file:
schema_json = json.load(schema_file)
object["job_template_input_schema"] = schema_json.get(
"input_schema")
object["job_template_output_schema"] = schema_json.get(
"output_schema")
self._vnc_lib.job_template_create(instance_obj)
else:
self._vnc_lib._object_create(res_type, instance_obj)
except RefsExistError:
pass
def test_load_init_data_2(self):
object = {}
res_type = ""
fq_name = ""
try:
with open("../fabric-ansible/ansible-playbooks/conf"
"/predef_payloads.json") as data_file:
input_json = json.load(data_file)
for item in input_json.get('data'):
res_type = item.get("object_type")
for object in item.get("objects"):
fq_name = object.get("name")
self._vnc_lib._object_read(res_type=res_type, fq_name=fq_name)
except NoIdError:
self.create_object(object, res_type, fq_name)
except Exception as e:
print ("Test failed due to unexpected error: %s" % str(e))
# Test when object_type having invalid name
class | (test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
json_data = {
"data": [
{
"object_type": "abc",
"objects": [{"fq_name": ["test"]}]
}
]
}
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
json.dump(json_data, f)
super(TestInitDataError2, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError2, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
@retries(5, hook=retry_exc_handler)
def test_load_init_data_02(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
jb_list = self._vnc_lib.job_templates_list()
self.assertEquals(len(jb_list.get('job-templates')), 0)
except Exception as e:
print( "Test failed due to unexpected error: %s" % str(e))
# Testing when json is invalid
class TestInitDataError3(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
json_data = "abc"
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
f.write(json_data)
super(TestInitDataError3, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError3, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
@retries(5, hook=retry_exc_handler)
def test_load_init_data_04(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
jb_list = self._vnc_lib.job_templates_list()
self.assertEquals(len(jb_list.get('job-templates')), 0)
except Exception as e:
print("Test failed due to unexpected error: %s" % str(e))
# Testing when tag type is unknown
class TestInitDataError4(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
# create a file in current dir and put some invalid json
# create predef_payloads.json and schema/files
json_data = {
"data": [
{
"object_type": "tag",
"objects": [
{
"fq_name": [
"abc=management_ip"
],
"name": "abc=management_ip",
"tag_type_name": "abc",
"tag_value": "management_ip"
}
]
}
]
}
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
json.dump(json_data, f)
super(TestInitDataError4, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError4, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
@retries(5, hook=retry_exc_handler)
def test_load_init_data_05(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
tags = self._vnc_lib.tags_list()
self.assertEquals(len(tags.get('tags')), 0)
except Exception as e:
print("Test failed due to unexpected error: %s" % str(e))
| TestInitDataError2 |
beautifier.min.js | !function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define("beautifier",[],e):"object"==typeof exports?exports.beautifier=e():t.beautifier=e()}("undefined"!=typeof self?self:"undefined"!=typeof windows?window:"undefined"!=typeof global?global:this,(function(){return function(){"use strict";var t={7:function(t){function e(t,e){t="string"==typeof t?t:t.source,e="string"==typeof e?e:e.source,this.__directives_block_pattern=new RegExp(t+/ beautify( \w+[:]\w+)+ /.source+e,"g"),this.__directive_pattern=/ (\w+)[:](\w+)/g,this.__directives_end_ignore_pattern=new RegExp(t+/\sbeautify\signore:end\s/.source+e,"g")}e.prototype.get_directives=function(t){if(!t.match(this.__directives_block_pattern))return null;var e={};this.__directive_pattern.lastIndex=0;for(var i=this.__directive_pattern.exec(t);i;)e[i[1]]=i[2],i=this.__directive_pattern.exec(t);return e},e.prototype.readIgnored=function(t){return t.readUntilAfter(this.__directives_end_ignore_pattern)},t.exports.t=e},418:function(t){var e=RegExp.prototype.hasOwnProperty("sticky");function i(t){this.__input=t||"",this.__input_length=this.__input.length,this.__position=0}i.prototype.restart=function(){this.__position=0},i.prototype.back=function(){this.__position>0&&(this.__position-=1)},i.prototype.hasNext=function(){return this.__position<this.__input_length},i.prototype.next=function(){var t=null;return this.hasNext()&&(t=this.__input.charAt(this.__position),this.__position+=1),t},i.prototype.peek=function(t){var e=null;return t=t||0,(t+=this.__position)>=0&&t<this.__input_length&&(e=this.__input.charAt(t)),e},i.prototype.__match=function(t,i){t.lastIndex=i;var n=t.exec(this.__input);return!n||e&&t.sticky||n.index!==i&&(n=null),n},i.prototype.test=function(t,e){return e=e||0,(e+=this.__position)>=0&&e<this.__input_length&&!!this.__match(t,e)},i.prototype.testChar=function(t,e){var i=this.peek(e);return t.lastIndex=0,null!==i&&t.test(i)},i.prototype.match=function(t){var e=this.__match(t,this.__position);return e?this.__position+=e[0].length:e=null,e},i.prototype.read=function(t,e,i){var n,_="";return t&&(n=this.match(t))&&(_+=n[0]),!e||!n&&t||(_+=this.readUntil(e,i)),_},i.prototype.readUntil=function(t,e){var i,n=this.__position;t.lastIndex=this.__position;var _=t.exec(this.__input);return _?(n=_.index,e&&(n+=_[0].length)):n=this.__input_length,i=this.__input.substring(this.__position,n),this.__position=n,i},i.prototype.readUntilAfter=function(t){return this.readUntil(t,!0)},i.prototype.get_regexp=function(t,i){var n=null,_="g";return i&&e&&(_="y"),"string"==typeof t&&""!==t?n=new RegExp(t,_):t&&(n=new RegExp(t.source,_)),n},i.prototype.get_literal_regexp=function(t){return RegExp(t.replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&"))},i.prototype.peekUntilAfter=function(t){var e=this.__position,i=this.readUntilAfter(t);return this.__position=e,i},i.prototype.lookBack=function(t){var e=this.__position-1;return e>=t.length&&this.__input.substring(e-t.length,e).toLowerCase()===t},t.exports.g=i},915:function(t){function e(t,e){this.raw_options=function(t,e){var i,n={};for(i in t=function(t){var e,i={};for(e in t)i[e.replace(/-/g,"_")]=t[e];return i}(t))i!==e&&(n[i]=t[i]);if(e&&t[e])for(i in t[e])n[i]=t[e][i];return n}(t,e),this.disabled=this._get_boolean("disabled"),this.eol=this._get_characters("eol","auto"),this.end_with_newline=this._get_boolean("end_with_newline"),this.indent_size=this._get_number("indent_size",4),this.indent_char=this._get_characters("indent_char"," "),this.indent_level=this._get_number("indent_level"),this.preserve_newlines=this._get_boolean("preserve_newlines",!0),this.max_preserve_newlines=this._get_number("max_preserve_newlines",32786),this.preserve_newlines||(this.max_preserve_newlines=0),this.indent_with_tabs=this._get_boolean("indent_with_tabs","\t"===this.indent_char),this.indent_with_tabs&&(this.indent_char="\t",1===this.indent_size&&(this.indent_size=4)),this.wrap_line_length=this._get_number("wrap_line_length",this._get_number("max_char")),this.indent_empty_lines=this._get_boolean("indent_empty_lines"),this.templating=this._get_selection_list("templating",["auto","none","django","erb","handlebars","php","smarty"],["auto"])}e.prototype._get_array=function(t,e){var i=this.raw_options[t],n=e||[];return"object"==typeof i?null!==i&&"function"==typeof i.concat&&(n=i.concat()):"string"==typeof i&&(n=i.split(/[^a-zA-Z0-9_\/\-]+/)),n},e.prototype._get_boolean=function(t,e){var i=this.raw_options[t];return void 0===i?!!e:!!i},e.prototype._get_characters=function(t,e){var i=this.raw_options[t],n=e||"";return"string"==typeof i&&(n=i.replace(/\\r/,"\r").replace(/\\n/,"\n").replace(/\\t/,"\t")),n},e.prototype._get_number=function(t,e){var i=this.raw_options[t];e=parseInt(e,10),isNaN(e)&&(e=0);var n=parseInt(i,10);return isNaN(n)&&(n=e),n},e.prototype._get_selection=function(t,e,i){var n=this._get_selection_list(t,e,i);if(1!==n.length)throw new Error("Invalid Option Value: The option '"+t+"' can only be one of the following values:\n"+e+"\nYou passed in: '"+this.raw_options[t]+"'");return n[0]},e.prototype._get_selection_list=function(t,e,i){if(!e||0===e.length)throw new Error("Selection list cannot be empty.");if(i=i||[e[0]],!this._is_valid_selection(i,e))throw new Error("Invalid Default Value!");var n=this._get_array(t,i);if(!this._is_valid_selection(n,e))throw new Error("Invalid Option Value: The option '"+t+"' can contain only the following values:\n"+e+"\nYou passed in: '"+this.raw_options[t]+"'");return n},e.prototype._is_valid_selection=function(t,e){return t.length&&e.length&&!t.some((function(t){return-1===e.indexOf(t)}))},t.exports.Ei=e},558:function(t){function e(t){this.__parent=t,this.__character_count=0,this.__indent_count=-1,this.__alignment_count=0,this.__wrap_point_index=0,this.__wrap_point_character_count=0,this.__wrap_point_indent_count=-1,this.__wrap_point_alignment_count=0,this.__items=[]}function i(t,e){this.__cache=[""],this.__indent_size=t.indent_size,this.__indent_string=t.indent_char,t.indent_with_tabs||(this.__indent_string=new Array(t.indent_size+1).join(t.indent_char)),e=e||"",t.indent_level>0&&(e=new Array(t.indent_level+1).join(this.__indent_string)),this.__base_string=e,this.__base_string_length=e.length}function n(t,n){this.__indent_cache=new i(t,n),this.raw=!1,this._end_with_newline=t.end_with_newline,this.indent_size=t.indent_size,this.wrap_line_length=t.wrap_line_length,this.indent_empty_lines=t.indent_empty_lines,this.__lines=[],this.previous_line=null,this.current_line=null,this.next_line=new e(this),this.space_before_token=!1,this.non_breaking_space=!1,this.previous_token_wrapped=!1,this.__add_outputline()}e.prototype.clone_empty=function(){var t=new e(this.__parent);return t.set_indent(this.__indent_count,this.__alignment_count),t},e.prototype.item=function(t){return t<0?this.__items[this.__items.length+t]:this.__items[t]},e.prototype.has_match=function(t){for(var e=this.__items.length-1;e>=0;e--)if(this.__items[e].match(t))return!0;return!1},e.prototype.set_indent=function(t,e){this.is_empty()&&(this.__indent_count=t||0,this.__alignment_count=e||0,this.__character_count=this.__parent.get_indent_size(this.__indent_count,this.__alignment_count))},e.prototype._set_wrap_point=function(){this.__parent.wrap_line_length&&(this.__wrap_point_index=this.__items.length,this.__wrap_point_character_count=this.__character_count,this.__wrap_point_indent_count=this.__parent.next_line.__indent_count,this.__wrap_point_alignment_count=this.__parent.next_line.__alignment_count)},e.prototype._should_wrap=function(){return this.__wrap_point_index&&this.__character_count>this.__parent.wrap_line_length&&this.__wrap_point_character_count>this.__parent.next_line.__character_count},e.prototype._allow_wrap=function(){if(this._should_wrap()){this.__parent.add_new_line();var t=this.__parent.current_line;return t.set_indent(this.__wrap_point_indent_count,this.__wrap_point_alignment_count),t.__items=this.__items.slice(this.__wrap_point_index),this.__items=this.__items.slice(0,this.__wrap_point_index),t.__character_count+=this.__character_count-this.__wrap_point_character_count,this.__character_count=this.__wrap_point_character_count," "===t.__items[0]&&(t.__items.splice(0,1),t.__character_count-=1),!0}return!1},e.prototype.is_empty=function(){return 0===this.__items.length},e.prototype.last=function(){return this.is_empty()?null:this.__items[this.__items.length-1]},e.prototype.push=function(t){this.__items.push(t);var e=t.lastIndexOf("\n");-1!==e?this.__character_count=t.length-e:this.__character_count+=t.length},e.prototype.pop=function(){var t=null;return this.is_empty()||(t=this.__items.pop(),this.__character_count-=t.length),t},e.prototype._remove_indent=function(){this.__indent_count>0&&(this.__indent_count-=1,this.__character_count-=this.__parent.indent_size)},e.prototype._remove_wrap_indent=function(){this.__wrap_point_indent_count>0&&(this.__wrap_point_indent_count-=1)},e.prototype.trim=function(){for(;" "===this.last();)this.__items.pop(),this.__character_count-=1},e.prototype.toString=function(){var t="";return this.is_empty()?this.__parent.indent_empty_lines&&(t=this.__parent.get_indent_string(this.__indent_count)):(t=this.__parent.get_indent_string(this.__indent_count,this.__alignment_count),t+=this.__items.join("")),t},i.prototype.get_indent_size=function(t,e){var i=this.__base_string_length;return e=e||0,t<0&&(i=0),(i+=t*this.__indent_size)+e},i.prototype.get_indent_string=function(t,e){var i=this.__base_string;return e=e||0,t<0&&(t=0,i=""),e+=t*this.__indent_size,this.__ensure_cache(e),i+this.__cache[e]},i.prototype.__ensure_cache=function(t){for(;t>=this.__cache.length;)this.__add_column()},i.prototype.__add_column=function(){var t=this.__cache.length,e=0,i="";this.__indent_size&&t>=this.__indent_size&&(t-=(e=Math.floor(t/this.__indent_size))*this.__indent_size,i=new Array(e+1).join(this.__indent_string)),t&&(i+=new Array(t+1).join(" ")),this.__cache.push(i)},n.prototype.__add_outputline=function(){this.previous_line=this.current_line,this.current_line=this.next_line.clone_empty(),this.__lines.push(this.current_line)},n.prototype.get_line_number=function(){return this.__lines.length},n.prototype.get_indent_string=function(t,e){return this.__indent_cache.get_indent_string(t,e)},n.prototype.get_indent_size=function(t,e){return this.__indent_cache.get_indent_size(t,e)},n.prototype.is_empty=function(){return!this.previous_line&&this.current_line.is_empty()},n.prototype.add_new_line=function(t){return!(this.is_empty()||!t&&this.just_added_newline()||(this.raw||this.__add_outputline(),0))},n.prototype.get_code=function(t){this.trim(!0);var e=this.current_line.pop();e&&("\n"===e[e.length-1]&&(e=e.replace(/\n+$/g,"")),this.current_line.push(e)),this._end_with_newline&&this.__add_outputline();var i=this.__lines.join("\n");return"\n"!==t&&(i=i.replace(/[\n]/g,t)),i},n.prototype.set_wrap_point=function(){this.current_line._set_wrap_point()},n.prototype.set_indent=function(t,e){return t=t||0,e=e||0,this.next_line.set_indent(t,e),this.__lines.length>1?(this.current_line.set_indent(t,e),!0):(this.current_line.set_indent(),!1)},n.prototype.add_raw_token=function(t){for(var e=0;e<t.newlines;e++)this.__add_outputline();this.current_line.set_indent(-1),this.current_line.push(t.whitespace_before),this.current_line.push(t.text),this.space_before_token=!1,this.non_breaking_space=!1,this.previous_token_wrapped=!1},n.prototype.add_token=function(t){this.__add_space_before_token(),this.current_line.push(t),this.space_before_token=!1,this.non_breaking_space=!1,this.previous_token_wrapped=this.current_line._allow_wrap()},n.prototype.__add_space_before_token=function(){this.space_before_token&&!this.just_added_newline()&&(this.non_breaking_space||this.set_wrap_point(),this.current_line.push(" "))},n.prototype.remove_indent=function(t){for(var e=this.__lines.length;t<e;)this.__lines[t]._remove_indent(),t++;this.current_line._remove_wrap_indent()},n.prototype.trim=function(t){for(t=void 0!==t&&t,this.current_line.trim();t&&this.__lines.length>1&&this.current_line.is_empty();)this.__lines.pop(),this.current_line=this.__lines[this.__lines.length-1],this.current_line.trim();this.previous_line=this.__lines.length>1?this.__lines[this.__lines.length-2]:null},n.prototype.just_added_newline=function(){return this.current_line.is_empty()},n.prototype.just_added_blankline=function(){return this.is_empty()||this.current_line.is_empty()&&this.previous_line.is_empty()},n.prototype.ensure_empty_line_above=function(t,i){for(var n=this.__lines.length-2;n>=0;){var _=this.__lines[n];if(_.is_empty())break;if(0!==_.item(0).indexOf(t)&&_.item(-1)!==i){this.__lines.splice(n+1,0,new e(this)),this.previous_line=this.__lines[this.__lines.length-2];break}n--}},t.exports.r=n},348:function(t){function e(t,e){this._input=t,this._starting_pattern=null,this._match_pattern=null,this._until_pattern=null,this._until_after=!1,e&&(this._starting_pattern=this._input.get_regexp(e._starting_pattern,!0),this._match_pattern=this._input.get_regexp(e._match_pattern,!0),this._until_pattern=this._input.get_regexp(e._until_pattern),this._until_after=e._until_after)}e.prototype.read=function(){var t=this._input.read(this._starting_pattern);return this._starting_pattern&&!t||(t+=this._input.read(this._match_pattern,this._until_pattern,this._until_after)),t},e.prototype.read_match=function(){return this._input.match(this._match_pattern)},e.prototype.until_after=function(t){var e=this._create();return e._until_after=!0,e._until_pattern=this._input.get_regexp(t),e._update(),e},e.prototype.until=function(t){var e=this._create();return e._until_after=!1,e._until_pattern=this._input.get_regexp(t),e._update(),e},e.prototype.starting_with=function(t){var e=this._create();return e._starting_pattern=this._input.get_regexp(t,!0),e._update(),e},e.prototype.matching=function(t){var e=this._create();return e._match_pattern=this._input.get_regexp(t,!0),e._update(),e},e.prototype._create=function(){return new e(this._input,this)},e.prototype._update=function(){},t.exports.c=e},800:function(t,e,i){var n=i(348).c,_={django:!1,erb:!1,handlebars:!1,php:!1,smarty:!1};function s(t,e){n.call(this,t,e),this.__template_pattern=null,this._disabled=Object.assign({},_),this._excluded=Object.assign({},_),e&&(this.__template_pattern=this._input.get_regexp(e.__template_pattern),this._excluded=Object.assign(this._excluded,e._excluded),this._disabled=Object.assign(this._disabled,e._disabled));var i=new n(t);this.__patterns={handlebars_comment:i.starting_with(/{{!--/).until_after(/--}}/),handlebars_unescaped:i.starting_with(/{{{/).until_after(/}}}/),handlebars:i.starting_with(/{{/).until_after(/}}/),php:i.starting_with(/<\?(?:[= ]|php)/).until_after(/\?>/),erb:i.starting_with(/<%[^%]/).until_after(/[^%]%>/),django:i.starting_with(/{%/).until_after(/%}/),django_value:i.starting_with(/{{/).until_after(/}}/),django_comment:i.starting_with(/{#/).until_after(/#}/),smarty:i.starting_with(/{(?=[^}{\s\n])/).until_after(/[^\s\n]}/),smarty_comment:i.starting_with(/{\*/).until_after(/\*}/),smarty_literal:i.starting_with(/{literal}/).until_after(/{\/literal}/)}}s.prototype=new n,s.prototype._create=function(){return new s(this._input,this)},s.prototype._update=function(){this.__set_templated_pattern()},s.prototype.disable=function(t){var e=this._create();return e._disabled[t]=!0,e._update(),e},s.prototype.read_options=function(t){var e=this._create();for(var i in _)e._disabled[i]=-1===t.templating.indexOf(i);return e._update(),e},s.prototype.exclude=function(t){var e=this._create();return e._excluded[t]=!0,e._update(),e},s.prototype.read=function(){var t="";t=this._match_pattern?this._input.read(this._starting_pattern):this._input.read(this._starting_pattern,this.__template_pattern);for(var e=this._read_template();e;)this._match_pattern?e+=this._input.read(this._match_pattern):e+=this._input.readUntil(this.__template_pattern),t+=e,e=this._read_template();return this._until_after&&(t+=this._input.readUntilAfter(this._until_pattern)),t},s.prototype.__set_templated_pattern=function(){var t=[];this._disabled.php||t.push(this.__patterns.php._starting_pattern.source),this._disabled.handlebars||t.push(this.__patterns.handlebars._starting_pattern.source),this._disabled.erb||t.push(this.__patterns.erb._starting_pattern.source),this._disabled.django||(t.push(this.__patterns.django._starting_pattern.source),t.push(this.__patterns.django_value._starting_pattern.source),t.push(this.__patterns.django_comment._starting_pattern.source)),this._disabled.smarty||t.push(this.__patterns.smarty._starting_pattern.source),this._until_pattern&&t.push(this._until_pattern.source),this.__template_pattern=this._input.get_regexp("(?:"+t.join("|")+")")},s.prototype._read_template=function(){var t="",e=this._input.peek();if("<"===e){var i=this._input.peek(1);this._disabled.php||this._excluded.php||"?"!==i||(t=t||this.__patterns.php.read()),this._disabled.erb||this._excluded.erb||"%"!==i||(t=t||this.__patterns.erb.read())}else"{"===e&&(this._disabled.handlebars||this._excluded.handlebars||(t=(t=(t=t||this.__patterns.handlebars_comment.read())||this.__patterns.handlebars_unescaped.read())||this.__patterns.handlebars.read()),this._disabled.django||(this._excluded.django||this._excluded.handlebars||(t=t||this.__patterns.django_value.read()),this._excluded.django||(t=(t=t||this.__patterns.django_comment.read())||this.__patterns.django.read())),this._disabled.smarty||this._disabled.django&&this._disabled.handlebars&&(t=(t=(t=t||this.__patterns.smarty_comment.read())||this.__patterns.smarty_literal.read())||this.__patterns.smarty.read()));return t},t.exports.A=s},650:function(t){t.exports.W=function(t,e,i,n){this.type=t,this.text=e,this.comments_before=null,this.newlines=i||0,this.whitespace_before=n||"",this.parent=null,this.next=null,this.previous=null,this.opened=null,this.closed=null,this.directives=null}},147:function(t,e,i){var n=i(418).g,_=i(650).W,s=i(134).q,a=i(401).d,u={START:"TK_START",RAW:"TK_RAW",EOF:"TK_EOF"},r=function(t,e){this._input=new n(t),this._options=e||{},this.__tokens=null,this._patterns={},this._patterns.whitespace=new a(this._input)};r.prototype.tokenize=function(){var t;this._input.restart(),this.__tokens=new s,this._reset();for(var e=new _(u.START,""),i=null,n=[],a=new s;e.type!==u.EOF;){for(t=this._get_next_token(e,i);this._is_comment(t);)a.add(t),t=this._get_next_token(e,i);a.isEmpty()||(t.comments_before=a,a=new s),t.parent=i,this._is_opening(t)?(n.push(i),i=t):i&&this._is_closing(t,i)&&(t.opened=i,i.closed=t,i=n.pop(),t.parent=i),t.previous=e,e.next=t,this.__tokens.add(t),e=t}return this.__tokens},r.prototype._is_first_token=function(){return this.__tokens.isEmpty()},r.prototype._reset=function(){},r.prototype._get_next_token=function(t,e){this._readWhitespace();var i=this._input.read(/.+/g);return i?this._create_token(u.RAW,i):this._create_token(u.EOF,"")},r.prototype._is_comment=function(t){return!1},r.prototype._is_opening=function(t){return!1},r.prototype._is_closing=function(t,e){return!1},r.prototype._create_token=function(t,e){return new _(t,e,this._patterns.whitespace.newline_count,this._patterns.whitespace.whitespace_before_token)},r.prototype._readWhitespace=function(){return this._patterns.whitespace.read()},t.exports.d=r,t.exports.o=u},134:function(t){function e(t){this.__tokens=[],this.__tokens_length=this.__tokens.length,this.__position=0,this.__parent_token=t}e.prototype.restart=function(){this.__position=0},e.prototype.isEmpty=function(){return 0===this.__tokens_length},e.prototype.hasNext=function(){return this.__position<this.__tokens_length},e.prototype.next=function(){var t=null;return this.hasNext()&&(t=this.__tokens[this.__position],this.__position+=1),t},e.prototype.peek=function(t){var e=null;return t=t||0,(t+=this.__position)>=0&&t<this.__tokens_length&&(e=this.__tokens[t]),e},e.prototype.add=function(t){this.__parent_token&&(t.parent=this.__parent_token),this.__tokens.push(t),this.__tokens_length+=1},t.exports.q=e},401:function(t,e,i){var n=i(348).c;function _(t,e){n.call(this,t,e),e?this._line_regexp=this._input.get_regexp(e._line_regexp):this.__set_whitespace_patterns("",""),this.newline_count=0,this.whitespace_before_token=""}_.prototype=new n,_.prototype.__set_whitespace_patterns=function(t,e){t+="\\t ",e+="\\n\\r",this._match_pattern=this._input.get_regexp("["+t+e+"]+",!0),this._newline_regexp=this._input.get_regexp("\\r\\n|["+e+"]")},_.prototype.read=function(){this.newline_count=0,this.whitespace_before_token="";var t=this._input.read(this._match_pattern);if(" "===t)this.whitespace_before_token=" ";else if(t){var e=this.__split(this._newline_regexp,t);this.newline_count=e.length-1,this.whitespace_before_token=e[this.newline_count]}return t},_.prototype.matching=function(t,e){var i=this._create();return i.__set_whitespace_patterns(t,e),i._update(),i},_.prototype._create=function(){return new _(this._input,this)},_.prototype.__split=function(t,e){t.lastIndex=0;for(var i=0,n=[],_=t.exec(e);_;)n.push(e.substring(i,_.index)),i=_.index+_[0].length,_=t.exec(e);return i<e.length?n.push(e.substring(i,e.length)):n.push(""),n},t.exports.d=_},434:function(t,e,i){var n=i(492).E,_=i(558).r,s=i(418).g,a=new(0,i(7).t)(/\/\*/,/\*\//),u=/\r\n|[\r\n]/,r=/\r\n|[\r\n]/g,o=/\s/,h=/(?:\s|\n)+/g,p=/\/\*(?:[\s\S]*?)((?:\*\/)|$)/g,l=/\/\/(?:[^\n\r\u2028\u2029]*)/g;function c(t,e){this._source_text=t||"",this._options=new n(e),this._ch=null,this._input=null,this.NESTED_AT_RULE={"@page":!0,"@font-face":!0,"@keyframes":!0,"@media":!0,"@supports":!0,"@document":!0},this.CONDITIONAL_GROUP_RULE={"@media":!0,"@supports":!0,"@document":!0}}c.prototype.eatString=function(t){var e="";for(this._ch=this._input.next();this._ch;){if(e+=this._ch,"\\"===this._ch)e+=this._input.next();else if(-1!==t.indexOf(this._ch)||"\n"===this._ch)break;this._ch=this._input.next()}return e},c.prototype.eatWhitespace=function(t){for(var e=o.test(this._input.peek()),i=0;o.test(this._input.peek());)this._ch=this._input.next(),t&&"\n"===this._ch&&(0===i||i<this._options.max_preserve_newlines)&&(i++,this._output.add_new_line(!0));return e},c.prototype.foundNestedPseudoClass=function(){for(var t=0,e=1,i=this._input.peek(e);i;){if("{"===i)return!0;if("("===i)t+=1;else if(")"===i){if(0===t)return!1;t-=1}else if(";"===i||"}"===i)return!1;e++,i=this._input.peek(e)}return!1},c.prototype.print_string=function(t){this._output.set_indent(this._indentLevel),this._output.non_breaking_space=!0,this._output.add_token(t)},c.prototype.preserveSingleSpace=function(t){t&&(this._output.space_before_token=!0)},c.prototype.indent=function(){this._indentLevel++},c.prototype.outdent=function(){this._indentLevel>0&&this._indentLevel--},c.prototype.beautify=function(){if(this._options.disabled)return this._source_text;var t=this._source_text,e=this._options.eol;"auto"===e&&(e="\n",t&&u.test(t||"")&&(e=t.match(u)[0]));var i=(t=t.replace(r,"\n")).match(/^[\t ]*/)[0];this._output=new _(this._options,i),this._input=new s(t),this._indentLevel=0,this._nestedLevel=0,this._ch=null;for(var n,c,f=0,d=!1,g=!1,b=!1,m=!1,k=!1,y=this._ch;n=""!==this._input.read(h),c=y,this._ch=this._input.next(),"\\"===this._ch&&this._input.hasNext()&&(this._ch+=this._input.next()),y=this._ch,this._ch;)if("/"===this._ch&&"*"===this._input.peek()){this._output.add_new_line(),this._input.back();var w=this._input.read(p),x=a.get_directives(w);x&&"start"===x.ignore&&(w+=a.readIgnored(this._input)),this.print_string(w),this.eatWhitespace(!0),this._output.add_new_line()}else if("/"===this._ch&&"/"===this._input.peek())this._output.space_before_token=!0,this._input.back(),this.print_string(this._input.read(l)),this.eatWhitespace(!0);else if("@"===this._ch)if(this.preserveSingleSpace(n),"{"===this._input.peek())this.print_string(this._ch+this.eatString("}"));else{this.print_string(this._ch);var v=this._input.peekUntilAfter(/[: ,;{}()[\]\/='"]/g);v.match(/[ :]$/)&&(v=this.eatString(": ").replace(/\s$/,""),this.print_string(v),this._output.space_before_token=!0),"extend"===(v=v.replace(/\s$/,""))?m=!0:"import"===v&&(k=!0),v in this.NESTED_AT_RULE?(this._nestedLevel+=1,v in this.CONDITIONAL_GROUP_RULE&&(b=!0)):d||0!==f||-1===v.indexOf(":")||(g=!0,this.indent())}else"#"===this._ch&&"{"===this._input.peek()?(this.preserveSingleSpace(n),this.print_string(this._ch+this.eatString("}"))):"{"===this._ch?(g&&(g=!1,this.outdent()),b?(b=!1,d=this._indentLevel>=this._nestedLevel):d=this._indentLevel>=this._nestedLevel-1,this._options.newline_between_rules&&d&&this._output.previous_line&&"{"!==this._output.previous_line.item(-1)&&this._output.ensure_empty_line_above("/",","),this._output.space_before_token=!0,"expand"===this._options.brace_style?(this._output.add_new_line(),this.print_string(this._ch),this.indent(),this._output.set_indent(this._indentLevel)):(this.indent(),this.print_string(this._ch)),this.eatWhitespace(!0),this._output.add_new_line()):"}"===this._ch?(this.outdent(),this._output.add_new_line(),"{"===c&&this._output.trim(!0),k=!1,m=!1,g&&(this.outdent(),g=!1),this.print_string(this._ch),d=!1,this._nestedLevel&&this._nestedLevel--,this.eatWhitespace(!0),this._output.add_new_line(),this._options.newline_between_rules&&!this._output.just_added_blankline()&&"}"!==this._input.peek()&&this._output.add_new_line(!0)):":"===this._ch?!d&&!b||this._input.lookBack("&")||this.foundNestedPseudoClass()||this._input.lookBack("(")||m||0!==f?(this._input.lookBack(" ")&&(this._output.space_before_token=!0),":"===this._input.peek()?(this._ch=this._input.next(),this.print_string("::")):this.print_string(":")):(this.print_string(":"),g||(g=!0,this._output.space_before_token=!0,this.eatWhitespace(!0),this.indent())):'"'===this._ch||"'"===this._ch?(this.preserveSingleSpace(n),this.print_string(this._ch+this.eatString(this._ch)),this.eatWhitespace(!0)):";"===this._ch?0===f?(g&&(this.outdent(),g=!1),m=!1,k=!1,this.print_string(this._ch),this.eatWhitespace(!0),"/"!==this._input.peek()&&this._output.add_new_line()):(this.print_string(this._ch),this.eatWhitespace(!0),this._output.space_before_token=!0):"("===this._ch?this._input.lookBack("url")?(this.print_string(this._ch),this.eatWhitespace(),f++,this.indent(),this._ch=this._input.next(),")"===this._ch||'"'===this._ch||"'"===this._ch?this._input.back():this._ch&&(this.print_string(this._ch+this.eatString(")")),f&&(f--,this.outdent()))):(this.preserveSingleSpace(n),this.print_string(this._ch),this.eatWhitespace(),f++,this.indent()):")"===this._ch?(f&&(f--,this.outdent()),this.print_string(this._ch)):","===this._ch?(this.print_string(this._ch),this.eatWhitespace(!0),!this._options.selector_separator_newline||g||0!==f||k?this._output.space_before_token=!0:this._output.add_new_line()):">"!==this._ch&&"+"!==this._ch&&"~"!==this._ch||g||0!==f?"]"===this._ch?this.print_string(this._ch):"["===this._ch?(this.preserveSingleSpace(n),this.print_string(this._ch)):"="===this._ch?(this.eatWhitespace(),this.print_string("="),o.test(this._ch)&&(this._ch="")):"!"!==this._ch||this._input.lookBack("\\")?(this.preserveSingleSpace(n),this.print_string(this._ch)):(this.print_string(" "),this.print_string(this._ch)):this._options.space_around_combinator?(this._output.space_before_token=!0,this.print_string(this._ch),this._output.space_before_token=!0):(this.print_string(this._ch),this.eatWhitespace(),this._ch&&o.test(this._ch)&&(this._ch=""));return this._output.get_code(e)},t.exports.K=c},968:function(t,e,i){var n=i(434).K,_=i(492).E;t.exports=function(t,e){return new n(t,e).beautify()},t.exports.defaultOptions=function(){return new _}},492:function(t,e,i){var n=i(915).Ei;function _(t){n.call(this,t,"css"),this.selector_separator_newline=this._get_boolean("selector_separator_newline",!0),this.newline_between_rules=this._get_boolean("newline_between_rules",!0);var e=this._get_boolean("space_around_selector_separator");this.space_around_combinator=this._get_boolean("space_around_combinator")||e;var i=this._get_selection_list("brace_style",["collapse","expand","end-expand","none","preserve-inline"]);this.brace_style="collapse";for(var _=0;_<i.length;_++)"expand"!==i[_]?this.brace_style="collapse":this.brace_style=i[_]}_.prototype=new n,t.exports.E=_},843:function(t,e,i){var n=i(402).E,_=i(558).r,s=i(41).d,a=i(41).o,u=/\r\n|[\r\n]/,r=/\r\n|[\r\n]/g,o=function(t,e){this.indent_level=0,this.alignment_size=0,this.max_preserve_newlines=t.max_preserve_newlines,this.preserve_newlines=t.preserve_newlines,this._output=new _(t,e)};o.prototype.current_line_has_match=function(t){return this._output.current_line.has_match(t)},o.prototype.set_space_before_token=function(t,e){this._output.space_before_token=t,this._output.non_breaking_space=e},o.prototype.set_wrap_point=function(){this._output.set_indent(this.indent_level,this.alignment_size),this._output.set_wrap_point()},o.prototype.add_raw_token=function(t){this._output.add_raw_token(t)},o.prototype.print_preserved_newlines=function(t){var e=0;t.type!==a.TEXT&&t.previous.type!==a.TEXT&&(e=t.newlines?1:0),this.preserve_newlines&&(e=t.newlines<this.max_preserve_newlines+1?t.newlines:this.max_preserve_newlines+1);for(var i=0;i<e;i++)this.print_newline(i>0);return 0!==e},o.prototype.traverse_whitespace=function(t){return!(!t.whitespace_before&&!t.newlines||(this.print_preserved_newlines(t)||(this._output.space_before_token=!0),0))},o.prototype.previous_token_wrapped=function(){return this._output.previous_token_wrapped},o.prototype.print_newline=function(t){this._output.add_new_line(t)},o.prototype.print_token=function(t){t.text&&(this._output.set_indent(this.indent_level,this.alignment_size),this._output.add_token(t.text))},o.prototype.indent=function(){this.indent_level++},o.prototype.get_full_indent=function(t){return(t=this.indent_level+(t||0))<1?"":this._output.get_indent_string(t)};function h(t,e){return-1!==e.indexOf(t)}function p(t,e,i){this.parent=t||null,this.tag=e?e.tag_name:"",this.indent_level=i||0,this.parser_token=e||null}function l(t){this._printer=t,this._current_frame=null}function c(t,e,i,_){this._source_text=t||"",e=e||{},this._js_beautify=i,this._css_beautify=_,this._tag_stack=null;var s=new n(e,"html");this._options=s,this._is_wrap_attributes_force="force"===this._options.wrap_attributes.substr(0,"force".length),this._is_wrap_attributes_force_expand_multiline="force-expand-multiline"===this._options.wrap_attributes,this._is_wrap_attributes_force_aligned="force-aligned"===this._options.wrap_attributes,this._is_wrap_attributes_aligned_multiple="aligned-multiple"===this._options.wrap_attributes,this._is_wrap_attributes_preserve="preserve"===this._options.wrap_attributes.substr(0,"preserve".length),this._is_wrap_attributes_preserve_aligned="preserve-aligned"===this._options.wrap_attributes}l.prototype.get_parser_token=function(){return this._current_frame?this._current_frame.parser_token:null},l.prototype.record_tag=function(t){var e=new p(this._current_frame,t,this._printer.indent_level);this._current_frame=e},l.prototype._try_pop_frame=function(t){var e=null;return t&&(e=t.parser_token,this._printer.indent_level=t.indent_level,this._current_frame=t.parent),e},l.prototype._get_frame=function(t,e){for(var i=this._current_frame;i&&-1===t.indexOf(i.tag);){if(e&&-1!==e.indexOf(i.tag)){i=null;break}i=i.parent}return i},l.prototype.try_pop=function(t,e){var i=this._get_frame([t],e);return this._try_pop_frame(i)},l.prototype.indent_to_tag=function(t){var e=this._get_frame(t);e&&(this._printer.indent_level=e.indent_level)},c.prototype.beautify=function(){if(this._options.disabled)return this._source_text;var t=this._source_text,e=this._options.eol;"auto"===this._options.eol&&(e="\n",t&&u.test(t)&&(e=t.match(u)[0]));var i=(t=t.replace(r,"\n")).match(/^[\t ]*/)[0],n={text:"",type:""},_=new f,h=new o(this._options,i),p=new s(t,this._options).tokenize();this._tag_stack=new l(h);for(var c=null,d=p.next();d.type!==a.EOF;)d.type===a.TAG_OPEN||d.type===a.COMMENT?_=c=this._handle_tag_open(h,d,_,n):d.type===a.ATTRIBUTE||d.type===a.EQUALS||d.type===a.VALUE||d.type===a.TEXT&&!_.tag_complete?c=this._handle_inside_tag(h,d,_,p):d.type===a.TAG_CLOSE?c=this._handle_tag_close(h,d,_):d.type===a.TEXT?c=this._handle_text(h,d,_):h.add_raw_token(d),n=c,d=p.next();return h._output.get_code(e)},c.prototype._handle_tag_close=function(t,e,i){var n={text:e.text,type:e.type};return t.alignment_size=0,i.tag_complete=!0,t.set_space_before_token(e.newlines||""!==e.whitespace_before,!0),i.is_unformatted?t.add_raw_token(e):("<"===i.tag_start_char&&(t.set_space_before_token("/"===e.text[0],!0),this._is_wrap_attributes_force_expand_multiline&&i.has_wrapped_attrs&&t.print_newline(!1)),t.print_token(e)),!i.indent_content||i.is_unformatted||i.is_content_unformatted||(t.indent(),i.indent_content=!1),i.is_inline_element||i.is_unformatted||i.is_content_unformatted||t.set_wrap_point(),n},c.prototype._handle_inside_tag=function(t,e,i,n){var _=i.has_wrapped_attrs,s={text:e.text,type:e.type};if(t.set_space_before_token(e.newlines||""!==e.whitespace_before,!0),i.is_unformatted)t.add_raw_token(e);else if("{"===i.tag_start_char&&e.type===a.TEXT)t.print_preserved_newlines(e)?(e.newlines=0,t.add_raw_token(e)):t.print_token(e);else{if(e.type===a.ATTRIBUTE?(t.set_space_before_token(!0),i.attr_count+=1):(e.type===a.EQUALS||e.type===a.VALUE&&e.previous.type===a.EQUALS)&&t.set_space_before_token(!1),e.type===a.ATTRIBUTE&&"<"===i.tag_start_char&&((this._is_wrap_attributes_preserve||this._is_wrap_attributes_preserve_aligned)&&(t.traverse_whitespace(e),_=_||0!==e.newlines),this._is_wrap_attributes_force)){var u=i.attr_count>1;if(this._is_wrap_attributes_force_expand_multiline&&1===i.attr_count){var r,o=!0,h=0;do{if((r=n.peek(h)).type===a.ATTRIBUTE){o=!1;break}h+=1}while(h<4&&r.type!==a.EOF&&r.type!==a.TAG_CLOSE);u=!o}u&&(t.print_newline(!1),_=!0)}t.print_token(e),_=_||t.previous_token_wrapped(),i.has_wrapped_attrs=_}return s},c.prototype._handle_text=function(t,e,i){var n={text:e.text,type:"TK_CONTENT"};return i.custom_beautifier_name?this._print_custom_beatifier_text(t,e,i):i.is_unformatted||i.is_content_unformatted?t.add_raw_token(e):(t.traverse_whitespace(e),t.print_token(e)),n},c.prototype._print_custom_beatifier_text=function(t,e,i){var n=this;if(""!==e.text){var _,s=e.text,a=1,u="",r="";"javascript"===i.custom_beautifier_name&&"function"==typeof this._js_beautify?_=this._js_beautify:"css"===i.custom_beautifier_name&&"function"==typeof this._css_beautify?_=this._css_beautify:"html"===i.custom_beautifier_name&&(_=function(t,e){return new c(t,e,n._js_beautify,n._css_beautify).beautify()}),"keep"===this._options.indent_scripts?a=0:"separate"===this._options.indent_scripts&&(a=-t.indent_level);var o=t.get_full_indent(a);if(s=s.replace(/\n[ \t]*$/,""),"html"!==i.custom_beautifier_name&&"<"===s[0]&&s.match(/^(<!--|<!\[CDATA\[)/)){var h=/^(<!--[^\n]*|<!\[CDATA\[)(\n?)([ \t\n]*)([\s\S]*)(-->|]]>)$/.exec(s);if(!h)return void t.add_raw_token(e);u=o+h[1]+"\n",s=h[4],h[5]&&(r=o+h[5]),s=s.replace(/\n[ \t]*$/,""),(h[2]||-1!==h[3].indexOf("\n"))&&(h=h[3].match(/[ \t]+$/))&&(e.whitespace_before=h[0])}if(s)if(_){var p=function(){this.eol="\n"};p.prototype=this._options.raw_options,s=_(o+s,new p)}else{var l=e.whitespace_before;l&&(s=s.replace(new RegExp("\n("+l+")?","g"),"\n")),s=o+s.replace(/\n/g,"\n"+o)}u&&(s=s?u+s+"\n"+r:u+r),t.print_newline(!1),s&&(e.text=s,e.whitespace_before="",e.newlines=0,t.add_raw_token(e),t.print_newline(!0))}},c.prototype._handle_tag_open=function(t,e,i,n){var _=this._get_tag_open_token(e);return!i.is_unformatted&&!i.is_content_unformatted||i.is_empty_element||e.type!==a.TAG_OPEN||0!==e.text.indexOf("</")?(t.traverse_whitespace(e),this._set_tag_position(t,e,_,i,n),_.is_inline_element||t.set_wrap_point(),t.print_token(e)):(t.add_raw_token(e),_.start_tag_token=this._tag_stack.try_pop(_.tag_name)),(this._is_wrap_attributes_force_aligned||this._is_wrap_attributes_aligned_multiple||this._is_wrap_attributes_preserve_aligned)&&(_.alignment_size=e.text.length+1),_.tag_complete||_.is_unformatted||(t.alignment_size=_.alignment_size),_};var f=function(t,e){var i;this.parent=t||null,this.text="",this.type="TK_TAG_OPEN",this.tag_name="",this.is_inline_element=!1,this.is_unformatted=!1,this.is_content_unformatted=!1,this.is_empty_element=!1,this.is_start_tag=!1,this.is_end_tag=!1,this.indent_content=!1,this.multiline_content=!1,this.custom_beautifier_name=null,this.start_tag_token=null,this.attr_count=0,this.has_wrapped_attrs=!1,this.alignment_size=0,this.tag_complete=!1,this.tag_start_char="",this.tag_check="",e?(this.tag_start_char=e.text[0],this.text=e.text,"<"===this.tag_start_char?(i=e.text.match(/^<([^\s>]*)/),this.tag_check=i?i[1]:""):(i=e.text.match(/^{{(?:[\^]|#\*?)?([^\s}]+)/),this.tag_check=i?i[1]:"","{{#>"===e.text&&">"===this.tag_check&&null!==e.next&&(this.tag_check=e.next.text)),this.tag_check=this.tag_check.toLowerCase(),e.type===a.COMMENT&&(this.tag_complete=!0),this.is_start_tag="/"!==this.tag_check.charAt(0),this.tag_name=this.is_start_tag?this.tag_check:this.tag_check.substr(1),this.is_end_tag=!this.is_start_tag||e.closed&&"/>"===e.closed.text,this.is_end_tag=this.is_end_tag||"{"===this.tag_start_char&&(this.text.length<3||/[^#\^]/.test(this.text.charAt(2)))):this.tag_complete=!0};c.prototype._get_tag_open_token=function(t){var e=new f(this._tag_stack.get_parser_token(),t);return e.alignment_size=this._options.wrap_attributes_indent_size,e.is_end_tag=e.is_end_tag||h(e.tag_check,this._options.void_elements),e.is_empty_element=e.tag_complete||e.is_start_tag&&e.is_end_tag,e.is_unformatted=!e.tag_complete&&h(e.tag_check,this._options.unformatted),e.is_content_unformatted=!e.is_empty_element&&h(e.tag_check,this._options.content_unformatted),e.is_inline_element=h(e.tag_name,this._options.inline)||"{"===e.tag_start_char,e},c.prototype._set_tag_position=function(t,e,i,n,_){if(i.is_empty_element||(i.is_end_tag?i.start_tag_token=this._tag_stack.try_pop(i.tag_name):(this._do_optional_end_element(i)&&(i.is_inline_element||t.print_newline(!1)),this._tag_stack.record_tag(i),"script"!==i.tag_name&&"style"!==i.tag_name||i.is_unformatted||i.is_content_unformatted||(i.custom_beautifier_name=function(t,e){var i=null,n=null;return e.closed?("script"===t?i="text/javascript":"style"===t&&(i="text/css"),(i=function(t){for(var e=null,i=t.next;i.type!==a.EOF&&t.closed!==i;){if(i.type===a.ATTRIBUTE&&"type"===i.text){i.next&&i.next.type===a.EQUALS&&i.next.next&&i.next.next.type===a.VALUE&&(e=i.next.next.text);break}i=i.next}return e}(e)||i).search("text/css")>-1?n="css":i.search(/module|((text|application|dojo)\/(x-)?(javascript|ecmascript|jscript|livescript|(ld\+)?json|method|aspect))/)>-1?n="javascript":i.search(/(text|application|dojo)\/(x-)?(html)/)>-1?n="html":i.search(/test\/null/)>-1&&(n="null"),n):null}(i.tag_check,e)))),h(i.tag_check,this._options.extra_liners)&&(t.print_newline(!1),t._output.just_added_blankline()||t.print_newline(!0)),i.is_empty_element)"{"===i.tag_start_char&&"else"===i.tag_check&&(this._tag_stack.indent_to_tag(["if","unless","each"]),i.indent_content=!0,t.current_line_has_match(/{{#if/)||t.print_newline(!1)),"!--"===i.tag_name&&_.type===a.TAG_CLOSE&&n.is_end_tag&&-1===i.text.indexOf("\n")||(i.is_inline_element||i.is_unformatted||t.print_newline(!1),this._calcluate_parent_multiline(t,i));else if(i.is_end_tag){var s=!1;s=(s=i.start_tag_token&&i.start_tag_token.multiline_content)||!i.is_inline_element&&!(n.is_inline_element||n.is_unformatted)&&!(_.type===a.TAG_CLOSE&&i.start_tag_token===n)&&"TK_CONTENT"!==_.type,(i.is_content_unformatted||i.is_unformatted)&&(s=!1),s&&t.print_newline(!1)}else i.indent_content=!i.custom_beautifier_name,"<"===i.tag_start_char&&("html"===i.tag_name?i.indent_content=this._options.indent_inner_html:"head"===i.tag_name?i.indent_content=this._options.indent_head_inner_html:"body"===i.tag_name&&(i.indent_content=this._options.indent_body_inner_html)),i.is_inline_element||i.is_unformatted||"TK_CONTENT"===_.type&&!i.is_content_unformatted||t.print_newline(!1),this._calcluate_parent_multiline(t,i)},c.prototype._calcluate_parent_multiline=function(t,e){!e.parent||!t._output.just_added_newline()||(e.is_inline_element||e.is_unformatted)&&e.parent.is_inline_element||(e.parent.multiline_content=!0)};var d=["address","article","aside","blockquote","details","div","dl","fieldset","figcaption","figure","footer","form","h1","h2","h3","h4","h5","h6","header","hr","main","nav","ol","p","pre","section","table","ul"],g=["a","audio","del","ins","map","noscript","video"];c.prototype._do_optional_end_element=function(t){var e=null;if(!t.is_empty_element&&t.is_start_tag&&t.parent){if("body"===t.tag_name)e=e||this._tag_stack.try_pop("head");else if("li"===t.tag_name)e=e||this._tag_stack.try_pop("li",["ol","ul"]);else if("dd"===t.tag_name||"dt"===t.tag_name)e=(e=e||this._tag_stack.try_pop("dt",["dl"]))||this._tag_stack.try_pop("dd",["dl"]);else if("p"===t.parent.tag_name&&-1!==d.indexOf(t.tag_name)){var i=t.parent.parent;i&&-1!==g.indexOf(i.tag_name)||(e=e||this._tag_stack.try_pop("p"))}else"rp"===t.tag_name||"rt"===t.tag_name?e=(e=e||this._tag_stack.try_pop("rt",["ruby","rtc"]))||this._tag_stack.try_pop("rp",["ruby","rtc"]):"optgroup"===t.tag_name?e=e||this._tag_stack.try_pop("optgroup",["select"]):"option"===t.tag_name?e=e||this._tag_stack.try_pop("option",["select","datalist","optgroup"]):"colgroup"===t.tag_name?e=e||this._tag_stack.try_pop("caption",["table"]):"thead"===t.tag_name?e=(e=e||this._tag_stack.try_pop("caption",["table"]))||this._tag_stack.try_pop("colgroup",["table"]):"tbody"===t.tag_name||"tfoot"===t.tag_name?e=(e=(e=(e=e||this._tag_stack.try_pop("caption",["table"]))||this._tag_stack.try_pop("colgroup",["table"]))||this._tag_stack.try_pop("thead",["table"]))||this._tag_stack.try_pop("tbody",["table"]):"tr"===t.tag_name?e=(e=(e=e||this._tag_stack.try_pop("caption",["table"]))||this._tag_stack.try_pop("colgroup",["table"]))||this._tag_stack.try_pop("tr",["table","thead","tbody","tfoot"]):"th"!==t.tag_name&&"td"!==t.tag_name||(e=(e=e||this._tag_stack.try_pop("td",["table","thead","tbody","tfoot","tr"]))||this._tag_stack.try_pop("th",["table","thead","tbody","tfoot","tr"]));return t.parent=this._tag_stack.get_parser_token(),e}},t.exports.K=c},391:function(t,e,i){var n=i(843).K,_=i(402).E;t.exports=function(t,e,i,_){return new n(t,e,i,_).beautify()},t.exports.defaultOptions=function(){return new _}},402:function(t,e,i){var n=i(915).Ei;function _(t){n.call(this,t,"html"),1===this.templating.length&&"auto"===this.templating[0]&&(this.templating=["django","erb","handlebars","php"]),this.indent_inner_html=this._get_boolean("indent_inner_html"),this.indent_body_inner_html=this._get_boolean("indent_body_inner_html",!0),this.indent_head_inner_html=this._get_boolean("indent_head_inner_html",!0),this.indent_handlebars=this._get_boolean("indent_handlebars",!0),this.wrap_attributes=this._get_selection("wrap_attributes",["auto","force","force-aligned","force-expand-multiline","aligned-multiple","preserve","preserve-aligned"]),this.wrap_attributes_indent_size=this._get_number("wrap_attributes_indent_size",this.indent_size),this.extra_liners=this._get_array("extra_liners",["head","body","/html"]),this.inline=this._get_array("inline",["a","abbr","area","audio","b","bdi","bdo","br","button","canvas","cite","code","data","datalist","del","dfn","em","embed","i","iframe","img","input","ins","kbd","keygen","label","map","mark","math","meter","noscript","object","output","progress","q","ruby","s","samp","select","small","span","strong","sub","sup","svg","template","textarea","time","u","var","video","wbr","text","acronym","big","strike","tt"]),this.void_elements=this._get_array("void_elements",["area","base","br","col","embed","hr","img","input","keygen","link","menuitem","meta","param","source","track","wbr","!doctype","?xml","basefont","isindex"]),this.unformatted=this._get_array("unformatted",[]),this.content_unformatted=this._get_array("content_unformatted",["pre","textarea"]),this.unformatted_content_delimiter=this._get_characters("unformatted_content_delimiter"),this.indent_scripts=this._get_selection("indent_scripts",["normal","keep","separate"])}_.prototype=new n,t.exports.E=_},41:function(t,e,i){var n=i(147).d,_=i(147).o,s=i(7).t,a=i(800).A,u=i(348).c,r={TAG_OPEN:"TK_TAG_OPEN",TAG_CLOSE:"TK_TAG_CLOSE",ATTRIBUTE:"TK_ATTRIBUTE",EQUALS:"TK_EQUALS",VALUE:"TK_VALUE",COMMENT:"TK_COMMENT",TEXT:"TK_TEXT",UNKNOWN:"TK_UNKNOWN",START:_.START,RAW:_.RAW,EOF:_.EOF},o=new s(/<\!--/,/-->/),h=function(t,e){n.call(this,t,e),this._current_tag_name="";var i=new a(this._input).read_options(this._options),_=new u(this._input);if(this.__patterns={word:i.until(/[\n\r\t <]/),single_quote:i.until_after(/'/),double_quote:i.until_after(/"/),attribute:i.until(/[\n\r\t =>]|\/>/),element_name:i.until(/[\n\r\t >\/]/),handlebars_comment:_.starting_with(/{{!--/).until_after(/--}}/),handlebars:_.starting_with(/{{/).until_after(/}}/),handlebars_open:_.until(/[\n\r\t }]/),handlebars_raw_close:_.until(/}}/),comment:_.starting_with(/<!--/).until_after(/-->/),cdata:_.starting_with(/<!\[CDATA\[/).until_after(/]]>/),conditional_comment:_.starting_with(/<!\[/).until_after(/]>/),processing:_.starting_with(/<\?/).until_after(/\?>/)},this._options.indent_handlebars&&(this.__patterns.word=this.__patterns.word.exclude("handlebars")),this._unformatted_content_delimiter=null,this._options.unformatted_content_delimiter){var s=this._input.get_literal_regexp(this._options.unformatted_content_delimiter);this.__patterns.unformatted_content_delimiter=_.matching(s).until_after(s)}};(h.prototype=new n)._is_comment=function(t){return!1},h.prototype._is_opening=function(t){return t.type===r.TAG_OPEN},h.prototype._is_closing=function(t,e){return t.type===r.TAG_CLOSE&&e&&((">"===t.text||"/>"===t.text)&&"<"===e.text[0]||"}}"===t.text&&"{"===e.text[0]&&"{"===e.text[1])},h.prototype._reset=function(){this._current_tag_name=""},h.prototype._get_next_token=function(t,e){var i=null;this._readWhitespace();var n=this._input.peek();return null===n?this._create_token(r.EOF,""):i=(i=(i=(i=(i=(i=(i=(i=(i=i||this._read_open_handlebars(n,e))||this._read_attribute(n,t,e))||this._read_close(n,e))||this._read_raw_content(n,t,e))||this._read_content_word(n))||this._read_comment_or_cdata(n))||this._read_processing(n))||this._read_open(n,e))||this._create_token(r.UNKNOWN,this._input.next())},h.prototype._read_comment_or_cdata=function(t){var e=null,i=null,n=null;return"<"===t&&("!"===this._input.peek(1)&&((i=this.__patterns.comment.read())?(n=o.get_directives(i))&&"start"===n.ignore&&(i+=o.readIgnored(this._input)):i=this.__patterns.cdata.read()),i&&((e=this._create_token(r.COMMENT,i)).directives=n)),e},h.prototype._read_processing=function(t){var e=null,i=null;if("<"===t){var n=this._input.peek(1);"!"!==n&&"?"!==n||(i=(i=this.__patterns.conditional_comment.read())||this.__patterns.processing.read()),i&&((e=this._create_token(r.COMMENT,i)).directives=null)}return e},h.prototype._read_open=function(t,e){var i=null,n=null;return e||"<"===t&&(i=this._input.next(),"/"===this._input.peek()&&(i+=this._input.next()),i+=this.__patterns.element_name.read(),n=this._create_token(r.TAG_OPEN,i)),n},h.prototype._read_open_handlebars=function(t,e){var i=null,n=null;return e||this._options.indent_handlebars&&"{"===t&&"{"===this._input.peek(1)&&("!"===this._input.peek(2)?(i=(i=this.__patterns.handlebars_comment.read())||this.__patterns.handlebars.read(),n=this._create_token(r.COMMENT,i)):(i=this.__patterns.handlebars_open.read(),n=this._create_token(r.TAG_OPEN,i))),n},h.prototype._read_close=function(t,e){var i=null,n=null;return e&&("<"===e.text[0]&&(">"===t||"/"===t&&">"===this._input.peek(1))?(i=this._input.next(),"/"===t&&(i+=this._input.next()),n=this._create_token(r.TAG_CLOSE,i)):"{"===e.text[0]&&"}"===t&&"}"===this._input.peek(1)&&(this._input.next(),this._input.next(),n=this._create_token(r.TAG_CLOSE,"}}"))),n},h.prototype._read_attribute=function(t,e,i){var n=null,_="";if(i&&"<"===i.text[0])if("="===t)n=this._create_token(r.EQUALS,this._input.next());else if('"'===t||"'"===t){var s=this._input.next();s+='"'===t?this.__patterns.double_quote.read():this.__patterns.single_quote.read(),n=this._create_token(r.VALUE,s)}else(_=this.__patterns.attribute.read())&&(n=e.type===r.EQUALS?this._create_token(r.VALUE,_):this._create_token(r.ATTRIBUTE,_));return n},h.prototype._is_content_unformatted=function(t){return-1===this._options.void_elements.indexOf(t)&&(-1!==this._options.content_unformatted.indexOf(t)||-1!==this._options.unformatted.indexOf(t))},h.prototype._read_raw_content=function(t,e,i){var n="";if(i&&"{"===i.text[0])n=this.__patterns.handlebars_raw_close.read();else if(e.type===r.TAG_CLOSE&&"<"===e.opened.text[0]&&"/"!==e.text[0]){var _=e.opened.text.substr(1).toLowerCase();if("script"===_||"style"===_){var s=this._read_comment_or_cdata(t);if(s)return s.type=r.TEXT,s;n=this._input.readUntil(new RegExp("</"+_+"[\\n\\r\\t ]*?>","ig"))}else this._is_content_unformatted(_)&&(n=this._input.readUntil(new RegExp("</"+_+"[\\n\\r\\t ]*?>","ig")))}return n?this._create_token(r.TEXT,n):null},h.prototype._read_content_word=function(t){var e="";if(this._options.unformatted_content_delimiter&&t===this._options.unformatted_content_delimiter[0]&&(e=this.__patterns.unformatted_content_delimiter.read()),e||(e=this.__patterns.word.read()),e)return this._create_token(r.TEXT,e)},t.exports.d=h,t.exports.o=r},772:function(t,e,i){var n=i(82),_=i(968),s=i(391);function a(t,e,i,a){return s(t,e,i=i||n,a=a||_)}a.defaultOptions=s.defaultOptions,t.exports.js=n,t.exports.css=_,t.exports.html=a},273:function(t,e){var i="\\xaa\\xb5\\xba\\xc0-\\xd6\\xd8-\\xf6\\xf8-\\u02c1\\u02c6-\\u02d1\\u02e0-\\u02e4\\u02ec\\u02ee\\u0370-\\u0374\\u0376\\u0377\\u037a-\\u037d\\u0386\\u0388-\\u038a\\u038c\\u038e-\\u03a1\\u03a3-\\u03f5\\u03f7-\\u0481\\u048a-\\u0527\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u05d0-\\u05ea\\u05f0-\\u05f2\\u0620-\\u064a\\u066e\\u066f\\u0671-\\u06d3\\u06d5\\u06e5\\u06e6\\u06ee\\u06ef\\u06fa-\\u06fc\\u06ff\\u0710\\u0712-\\u072f\\u074d-\\u07a5\\u07b1\\u07ca-\\u07ea\\u07f4\\u07f5\\u07fa\\u0800-\\u0815\\u081a\\u0824\\u0828\\u0840-\\u0858\\u08a0\\u08a2-\\u08ac\\u0904-\\u0939\\u093d\\u0950\\u0958-\\u0961\\u0971-\\u0977\\u0979-\\u097f\\u0985-\\u098c\\u098f\\u0990\\u0993-\\u09a8\\u09aa-\\u09b0\\u09b2\\u09b6-\\u09b9\\u09bd\\u09ce\\u09dc\\u09dd\\u09df-\\u09e1\\u09f0\\u09f1\\u0a05-\\u0a0a\\u0a0f\\u0a10\\u0a13-\\u0a28\\u0a2a-\\u0a30\\u0a32\\u0a33\\u0a35\\u0a36\\u0a38\\u0a39\\u0a59-\\u0a5c\\u0a5e\\u0a72-\\u0a74\\u0a85-\\u0a8d\\u0a8f-\\u0a91\\u0a93-\\u0aa8\\u0aaa-\\u0ab0\\u0ab2\\u0ab3\\u0ab5-\\u0ab9\\u0abd\\u0ad0\\u0ae0\\u0ae1\\u0b05-\\u0b0c\\u0b0f\\u0b10\\u0b13-\\u0b28\\u0b2a-\\u0b30\\u0b32\\u0b33\\u0b35-\\u0b39\\u0b3d\\u0b5c\\u0b5d\\u0b5f-\\u0b61\\u0b71\\u0b83\\u0b85-\\u0b8a\\u0b8e-\\u0b90\\u0b92-\\u0b95\\u0b99\\u0b9a\\u0b9c\\u0b9e\\u0b9f\\u0ba3\\u0ba4\\u0ba8-\\u0baa\\u0bae-\\u0bb9\\u0bd0\\u0c05-\\u0c0c\\u0c0e-\\u0c10\\u0c12-\\u0c28\\u0c2a-\\u0c33\\u0c35-\\u0c39\\u0c3d\\u0c58\\u0c59\\u0c60\\u0c61\\u0c85-\\u0c8c\\u0c8e-\\u0c90\\u0c92-\\u0ca8\\u0caa-\\u0cb3\\u0cb5-\\u0cb9\\u0cbd\\u0cde\\u0ce0\\u0ce1\\u0cf1\\u0cf2\\u0d05-\\u0d0c\\u0d0e-\\u0d10\\u0d12-\\u0d3a\\u0d3d\\u0d4e\\u0d60\\u0d61\\u0d7a-\\u0d7f\\u0d85-\\u0d96\\u0d9a-\\u0db1\\u0db3-\\u0dbb\\u0dbd\\u0dc0-\\u0dc6\\u0e01-\\u0e30\\u0e32\\u0e33\\u0e40-\\u0e46\\u0e81\\u0e82\\u0e84\\u0e87\\u0e88\\u0e8a\\u0e8d\\u0e94-\\u0e97\\u0e99-\\u0e9f\\u0ea1-\\u0ea3\\u0ea5\\u0ea7\\u0eaa\\u0eab\\u0ead-\\u0eb0\\u0eb2\\u0eb3\\u0ebd\\u0ec0-\\u0ec4\\u0ec6\\u0edc-\\u0edf\\u0f00\\u0f40-\\u0f47\\u0f49-\\u0f6c\\u0f88-\\u0f8c\\u1000-\\u102a\\u103f\\u1050-\\u1055\\u105a-\\u105d\\u1061\\u1065\\u1066\\u106e-\\u1070\\u1075-\\u1081\\u108e\\u10a0-\\u10c5\\u10c7\\u10cd\\u10d0-\\u10fa\\u10fc-\\u1248\\u124a-\\u124d\\u1250-\\u1256\\u1258\\u125a-\\u125d\\u1260-\\u1288\\u128a-\\u128d\\u1290-\\u12b0\\u12b2-\\u12b5\\u12b8-\\u12be\\u12c0\\u12c2-\\u12c5\\u12c8-\\u12d6\\u12d8-\\u1310\\u1312-\\u1315\\u1318-\\u135a\\u1380-\\u138f\\u13a0-\\u13f4\\u1401-\\u166c\\u166f-\\u167f\\u1681-\\u169a\\u16a0-\\u16ea\\u16ee-\\u16f0\\u1700-\\u170c\\u170e-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176c\\u176e-\\u1770\\u1780-\\u17b3\\u17d7\\u17dc\\u1820-\\u1877\\u1880-\\u18a8\\u18aa\\u18b0-\\u18f5\\u1900-\\u191c\\u1950-\\u196d\\u1970-\\u1974\\u1980-\\u19ab\\u19c1-\\u19c7\\u1a00-\\u1a16\\u1a20-\\u1a54\\u1aa7\\u1b05-\\u1b33\\u1b45-\\u1b4b\\u1b83-\\u1ba0\\u1bae\\u1baf\\u1bba-\\u1be5\\u1c00-\\u1c23\\u1c4d-\\u1c4f\\u1c5a-\\u1c7d\\u1ce9-\\u1cec\\u1cee-\\u1cf1\\u1cf5\\u1cf6\\u1d00-\\u1dbf\\u1e00-\\u1f15\\u1f18-\\u1f1d\\u1f20-\\u1f45\\u1f48-\\u1f4d\\u1f50-\\u1f57\\u1f59\\u1f5b\\u1f5d\\u1f5f-\\u1f7d\\u1f80-\\u1fb4\\u1fb6-\\u1fbc\\u1fbe\\u1fc2-\\u1fc4\\u1fc6-\\u1fcc\\u1fd0-\\u1fd3\\u1fd6-\\u1fdb\\u1fe0-\\u1fec\\u1ff2-\\u1ff4\\u1ff6-\\u1ffc\\u2071\\u207f\\u2090-\\u209c\\u2102\\u2107\\u210a-\\u2113\\u2115\\u2119-\\u211d\\u2124\\u2126\\u2128\\u212a-\\u212d\\u212f-\\u2139\\u213c-\\u213f\\u2145-\\u2149\\u214e\\u2160-\\u2188\\u2c00-\\u2c2e\\u2c30-\\u2c5e\\u2c60-\\u2ce4\\u2ceb-\\u2cee\\u2cf2\\u2cf3\\u2d00-\\u2d25\\u2d27\\u2d2d\\u2d30-\\u2d67\\u2d6f\\u2d80-\\u2d96\\u2da0-\\u2da6\\u2da8-\\u2dae\\u2db0-\\u2db6\\u2db8-\\u2dbe\\u2dc0-\\u2dc6\\u2dc8-\\u2dce\\u2dd0-\\u2dd6\\u2dd8-\\u2dde\\u2e2f\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303c\\u3041-\\u3096\\u309d-\\u309f\\u30a1-\\u30fa\\u30fc-\\u30ff\\u3105-\\u312d\\u3131-\\u318e\\u31a0-\\u31ba\\u31f0-\\u31ff\\u3400-\\u4db5\\u4e00-\\u9fcc\\ua000-\\ua48c\\ua4d0-\\ua4fd\\ua500-\\ua60c\\ua610-\\ua61f\\ua62a\\ua62b\\ua640-\\ua66e\\ua67f-\\ua697\\ua6a0-\\ua6ef\\ua717-\\ua71f\\ua722-\\ua788\\ua78b-\\ua78e\\ua790-\\ua793\\ua7a0-\\ua7aa\\ua7f8-\\ua801\\ua803-\\ua805\\ua807-\\ua80a\\ua80c-\\ua822\\ua840-\\ua873\\ua882-\\ua8b3\\ua8f2-\\ua8f7\\ua8fb\\ua90a-\\ua925\\ua930-\\ua946\\ua960-\\ua97c\\ua984-\\ua9b2\\ua9cf\\uaa00-\\uaa28\\uaa40-\\uaa42\\uaa44-\\uaa4b\\uaa60-\\uaa76\\uaa7a\\uaa80-\\uaaaf\\uaab1\\uaab5\\uaab6\\uaab9-\\uaabd\\uaac0\\uaac2\\uaadb-\\uaadd\\uaae0-\\uaaea\\uaaf2-\\uaaf4\\uab01-\\uab06\\uab09-\\uab0e\\uab11-\\uab16\\uab20-\\uab26\\uab28-\\uab2e\\uabc0-\\uabe2\\uac00-\\ud7a3\\ud7b0-\\ud7c6\\ud7cb-\\ud7fb\\uf900-\\ufa6d\\ufa70-\\ufad9\\ufb00-\\ufb06\\ufb13-\\ufb17\\ufb1d\\ufb1f-\\ufb28\\ufb2a-\\ufb36\\ufb38-\\ufb3c\\ufb3e\\ufb40\\ufb41\\ufb43\\ufb44\\ufb46-\\ufbb1\\ufbd3-\\ufd3d\\ufd50-\\ufd8f\\ufd92-\\ufdc7\\ufdf0-\\ufdfb\\ufe70-\\ufe74\\ufe76-\\ufefc\\uff21-\\uff3a\\uff41-\\uff5a\\uff66-\\uffbe\\uffc2-\\uffc7\\uffca-\\uffcf\\uffd2-\\uffd7\\uffda-\\uffdc",n="(?:\\\\u[0-9a-fA-F]{4}|[\\x23\\x24\\x40\\x41-\\x5a\\x5f\\x61-\\x7a"+i+"])";e.identifier=new RegExp(n+"(?:\\\\u[0-9a-fA-F]{4}|[\\x24\\x30-\\x39\\x41-\\x5a\\x5f\\x61-\\x7a\\xaa\\xb5\\xba\\xc0-\\xd6\\xd8-\\xf6\\xf8-\\u02c1\\u02c6-\\u02d1\\u02e0-\\u02e4\\u02ec\\u02ee\\u0370-\\u0374\\u0376\\u0377\\u037a-\\u037d\\u0386\\u0388-\\u038a\\u038c\\u038e-\\u03a1\\u03a3-\\u03f5\\u03f7-\\u0481\\u048a-\\u0527\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u05d0-\\u05ea\\u05f0-\\u05f2\\u0620-\\u064a\\u066e\\u066f\\u0671-\\u06d3\\u06d5\\u06e5\\u06e6\\u06ee\\u06ef\\u06fa-\\u06fc\\u06ff\\u0710\\u0712-\\u072f\\u074d-\\u07a5\\u07b1\\u07ca-\\u07ea\\u07f4\\u07f5\\u07fa\\u0800-\\u0815\\u081a\\u0824\\u0828\\u0840-\\u0858\\u08a0\\u08a2-\\u08ac\\u0904-\\u0939\\u093d\\u0950\\u0958-\\u0961\\u0971-\\u0977\\u0979-\\u097f\\u0985-\\u098c\\u098f\\u0990\\u0993-\\u09a8\\u09aa-\\u09b0\\u09b2\\u09b6-\\u09b9\\u09bd\\u09ce\\u09dc\\u09dd\\u09df-\\u09e1\\u09f0\\u09f1\\u0a05-\\u0a0a\\u0a0f\\u0a10\\u0a13-\\u0a28\\u0a2a-\\u0a30\\u0a32\\u0a33\\u0a35\\u0a36\\u0a38\\u0a39\\u0a59-\\u0a5c\\u0a5e\\u0a72-\\u0a74\\u0a85-\\u0a8d\\u0a8f-\\u0a91\\u0a93-\\u0aa8\\u0aaa-\\u0ab0\\u0ab2\\u0ab3\\u0ab5-\\u0ab9\\u0abd\\u0ad0\\u0ae0\\u0ae1\\u0b05-\\u0b0c\\u0b0f\\u0b10\\u0b13-\\u0b28\\u0b2a-\\u0b30\\u0b32\\u0b33\\u0b35-\\u0b39\\u0b3d\\u0b5c\\u0b5d\\u0b5f-\\u0b61\\u0b71\\u0b83\\u0b85-\\u0b8a\\u0b8e-\\u0b90\\u0b92-\\u0b95\\u0b99\\u0b9a\\u0b9c\\u0b9e\\u0b9f\\u0ba3\\u0ba4\\u0ba8-\\u0baa\\u0bae-\\u0bb9\\u0bd0\\u0c05-\\u0c0c\\u0c0e-\\u0c10\\u0c12-\\u0c28\\u0c2a-\\u0c33\\u0c35-\\u0c39\\u0c3d\\u0c58\\u0c59\\u0c60\\u0c61\\u0c85-\\u0c8c\\u0c8e-\\u0c90\\u0c92-\\u0ca8\\u0caa-\\u0cb3\\u0cb5-\\u0cb9\\u0cbd\\u0cde\\u0ce0\\u0ce1\\u0cf1\\u0cf2\\u0d05-\\u0d0c\\u0d0e-\\u0d10\\u0d12-\\u0d3a\\u0d3d\\u0d4e\\u0d60\\u0d61\\u0d7a-\\u0d7f\\u0d85-\\u0d96\\u0d9a-\\u0db1\\u0db3-\\u0dbb\\u0dbd\\u0dc0-\\u0dc6\\u0e01-\\u0e30\\u0e32\\u0e33\\u0e40-\\u0e46\\u0e81\\u0e82\\u0e84\\u0e87\\u0e88\\u0e8a\\u0e8d\\u0e94-\\u0e97\\u0e99-\\u0e9f\\u0ea1-\\u0ea3\\u0ea5\\u0ea7\\u0eaa\\u0eab\\u0ead-\\u0eb0\\u0eb2\\u0eb3\\u0ebd\\u0ec0-\\u0ec4\\u0ec6\\u0edc-\\u0edf\\u0f00\\u0f40-\\u0f47\\u0f49-\\u0f6c\\u0f88-\\u0f8c\\u1000-\\u102a\\u103f\\u1050-\\u1055\\u105a-\\u105d\\u1061\\u1065\\u1066\\u106e-\\u1070\\u1075-\\u1081\\u108e\\u10a0-\\u10c5\\u10c7\\u10cd\\u10d0-\\u10fa\\u10fc-\\u1248\\u124a-\\u124d\\u1250-\\u1256\\u1258\\u125a-\\u125d\\u1260-\\u1288\\u128a-\\u128d\\u1290-\\u12b0\\u12b2-\\u12b5\\u12b8-\\u12be\\u12c0\\u12c2-\\u12c5\\u12c8-\\u12d6\\u12d8-\\u1310\\u1312-\\u1315\\u1318-\\u135a\\u1380-\\u138f\\u13a0-\\u13f4\\u1401-\\u166c\\u166f-\\u167f\\u1681-\\u169a\\u16a0-\\u16ea\\u16ee-\\u16f0\\u1700-\\u170c\\u170e-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176c\\u176e-\\u1770\\u1780-\\u17b3\\u17d7\\u17dc\\u1820-\\u1877\\u1880-\\u18a8\\u18aa\\u18b0-\\u18f5\\u1900-\\u191c\\u1950-\\u196d\\u1970-\\u1974\\u1980-\\u19ab\\u19c1-\\u19c7\\u1a00-\\u1a16\\u1a20-\\u1a54\\u1aa7\\u1b05-\\u1b33\\u1b45-\\u1b4b\\u1b83-\\u1ba0\\u1bae\\u1baf\\u1bba-\\u1be5\\u1c00-\\u1c23\\u1c4d-\\u1c4f\\u1c5a-\\u1c7d\\u1ce9-\\u1cec\\u1cee-\\u1cf1\\u1cf5\\u1cf6\\u1d00-\\u1dbf\\u1e00-\\u1f15\\u1f18-\\u1f1d\\u1f20-\\u1f45\\u1f48-\\u1f4d\\u1f50-\\u1f57\\u1f59\\u1f5b\\u1f5d\\u1f5f-\\u1f7d\\u1f80-\\u1fb4\\u1fb6-\\u1fbc\\u1fbe\\u1fc2-\\u1fc4\\u1fc6-\\u1fcc\\u1fd0-\\u1fd3\\u1fd6-\\u1fdb\\u1fe0-\\u1fec\\u1ff2-\\u1ff4\\u1ff6-\\u1ffc\\u2071\\u207f\\u2090-\\u209c\\u2102\\u2107\\u210a-\\u2113\\u2115\\u2119-\\u211d\\u2124\\u2126\\u2128\\u212a-\\u212d\\u212f-\\u2139\\u213c-\\u213f\\u2145-\\u2149\\u214e\\u2160-\\u2188\\u2c00-\\u2c2e\\u2c30-\\u2c5e\\u2c60-\\u2ce4\\u2ceb-\\u2cee\\u2cf2\\u2cf3\\u2d00-\\u2d25\\u2d27\\u2d2d\\u2d30-\\u2d67\\u2d6f\\u2d80-\\u2d96\\u2da0-\\u2da6\\u2da8-\\u2dae\\u2db0-\\u2db6\\u2db8-\\u2dbe\\u2dc0-\\u2dc6\\u2dc8-\\u2dce\\u2dd0-\\u2dd6\\u2dd8-\\u2dde\\u2e2f\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303c\\u3041-\\u3096\\u309d-\\u309f\\u30a1-\\u30fa\\u30fc-\\u30ff\\u3105-\\u312d\\u3131-\\u318e\\u31a0-\\u31ba\\u31f0-\\u31ff\\u3400-\\u4db5\\u4e00-\\u9fcc\\ua000-\\ua48c\\ua4d0-\\ua4fd\\ua500-\\ua60c\\ua610-\\ua61f\\ua62a\\ua62b\\ua640-\\ua66e\\ua67f-\\ua697\\ua6a0-\\ua6ef\\ua717-\\ua71f\\ua722-\\ua788\\ua78b-\\ua78e\\ua790-\\ua793\\ua7a0-\\ua7aa\\ua7f8-\\ua801\\ua803-\\ua805\\ua807-\\ua80a\\ua80c-\\ua822\\ua840-\\ua873\\ua882-\\ua8b3\\ua8f2-\\ua8f7\\ua8fb\\ua90a-\\ua925\\ua930-\\ua946\\ua960-\\ua97c\\ua984-\\ua9b2\\ua9cf\\uaa00-\\uaa28\\uaa40-\\uaa42\\uaa44-\\uaa4b\\uaa60-\\uaa76\\uaa7a\\uaa80-\\uaaaf\\uaab1\\uaab5\\uaab6\\uaab9-\\uaabd\\uaac0\\uaac2\\uaadb-\\uaadd\\uaae0-\\uaaea\\uaaf2-\\uaaf4\\uab01-\\uab06\\uab09-\\uab0e\\uab11-\\uab16\\uab20-\\uab26\\uab28-\\uab2e\\uabc0-\\uabe2\\uac00-\\ud7a3\\ud7b0-\\ud7c6\\ud7cb-\\ud7fb\\uf900-\\ufa6d\\ufa70-\\ufad9\\ufb00-\\ufb06\\ufb13-\\ufb17\\ufb1d\\ufb1f-\\ufb28\\ufb2a-\\ufb36\\ufb38-\\ufb3c\\ufb3e\\ufb40\\ufb41\\ufb43\\ufb44\\ufb46-\\ufbb1\\ufbd3-\\ufd3d\\ufd50-\\ufd8f\\ufd92-\\ufdc7\\ufdf0-\\ufdfb\\ufe70-\\ufe74\\ufe76-\\ufefc\\uff21-\\uff3a\\uff41-\\uff5a\\uff66-\\uffbe\\uffc2-\\uffc7\\uffca-\\uffcf\\uffd2-\\uffd7\\uffda-\\uffdc\\u0300-\\u036f\\u0483-\\u0487\\u0591-\\u05bd\\u05bf\\u05c1\\u05c2\\u05c4\\u05c5\\u05c7\\u0610-\\u061a\\u0620-\\u0649\\u0672-\\u06d3\\u06e7-\\u06e8\\u06fb-\\u06fc\\u0730-\\u074a\\u0800-\\u0814\\u081b-\\u0823\\u0825-\\u0827\\u0829-\\u082d\\u0840-\\u0857\\u08e4-\\u08fe\\u0900-\\u0903\\u093a-\\u093c\\u093e-\\u094f\\u0951-\\u0957\\u0962-\\u0963\\u0966-\\u096f\\u0981-\\u0983\\u09bc\\u09be-\\u09c4\\u09c7\\u09c8\\u09d7\\u09df-\\u09e0\\u0a01-\\u0a03\\u0a3c\\u0a3e-\\u0a42\\u0a47\\u0a48\\u0a4b-\\u0a4d\\u0a51\\u0a66-\\u0a71\\u0a75\\u0a81-\\u0a83\\u0abc\\u0abe-\\u0ac5\\u0ac7-\\u0ac9\\u0acb-\\u0acd\\u0ae2-\\u0ae3\\u0ae6-\\u0aef\\u0b01-\\u0b03\\u0b3c\\u0b3e-\\u0b44\\u0b47\\u0b48\\u0b4b-\\u0b4d\\u0b56\\u0b57\\u0b5f-\\u0b60\\u0b66-\\u0b6f\\u0b82\\u0bbe-\\u0bc2\\u0bc6-\\u0bc8\\u0bca-\\u0bcd\\u0bd7\\u0be6-\\u0bef\\u0c01-\\u0c03\\u0c46-\\u0c48\\u0c4a-\\u0c4d\\u0c55\\u0c56\\u0c62-\\u0c63\\u0c66-\\u0c6f\\u0c82\\u0c83\\u0cbc\\u0cbe-\\u0cc4\\u0cc6-\\u0cc8\\u0cca-\\u0ccd\\u0cd5\\u0cd6\\u0ce2-\\u0ce3\\u0ce6-\\u0cef\\u0d02\\u0d03\\u0d46-\\u0d48\\u0d57\\u0d62-\\u0d63\\u0d66-\\u0d6f\\u0d82\\u0d83\\u0dca\\u0dcf-\\u0dd4\\u0dd6\\u0dd8-\\u0ddf\\u0df2\\u0df3\\u0e34-\\u0e3a\\u0e40-\\u0e45\\u0e50-\\u0e59\\u0eb4-\\u0eb9\\u0ec8-\\u0ecd\\u0ed0-\\u0ed9\\u0f18\\u0f19\\u0f20-\\u0f29\\u0f35\\u0f37\\u0f39\\u0f41-\\u0f47\\u0f71-\\u0f84\\u0f86-\\u0f87\\u0f8d-\\u0f97\\u0f99-\\u0fbc\\u0fc6\\u1000-\\u1029\\u1040-\\u1049\\u1067-\\u106d\\u1071-\\u1074\\u1082-\\u108d\\u108f-\\u109d\\u135d-\\u135f\\u170e-\\u1710\\u1720-\\u1730\\u1740-\\u1750\\u1772\\u1773\\u1780-\\u17b2\\u17dd\\u17e0-\\u17e9\\u180b-\\u180d\\u1810-\\u1819\\u1920-\\u192b\\u1930-\\u193b\\u1951-\\u196d\\u19b0-\\u19c0\\u19c8-\\u19c9\\u19d0-\\u19d9\\u1a00-\\u1a15\\u1a20-\\u1a53\\u1a60-\\u1a7c\\u1a7f-\\u1a89\\u1a90-\\u1a99\\u1b46-\\u1b4b\\u1b50-\\u1b59\\u1b6b-\\u1b73\\u1bb0-\\u1bb9\\u1be6-\\u1bf3\\u1c00-\\u1c22\\u1c40-\\u1c49\\u1c5b-\\u1c7d\\u1cd0-\\u1cd2\\u1d00-\\u1dbe\\u1e01-\\u1f15\\u200c\\u200d\\u203f\\u2040\\u2054\\u20d0-\\u20dc\\u20e1\\u20e5-\\u20f0\\u2d81-\\u2d96\\u2de0-\\u2dff\\u3021-\\u3028\\u3099\\u309a\\ua640-\\ua66d\\ua674-\\ua67d\\ua69f\\ua6f0-\\ua6f1\\ua7f8-\\ua800\\ua806\\ua80b\\ua823-\\ua827\\ua880-\\ua881\\ua8b4-\\ua8c4\\ua8d0-\\ua8d9\\ua8f3-\\ua8f7\\ua900-\\ua909\\ua926-\\ua92d\\ua930-\\ua945\\ua980-\\ua983\\ua9b3-\\ua9c0\\uaa00-\\uaa27\\uaa40-\\uaa41\\uaa4c-\\uaa4d\\uaa50-\\uaa59\\uaa7b\\uaae0-\\uaae9\\uaaf2-\\uaaf3\\uabc0-\\uabe1\\uabec\\uabed\\uabf0-\\uabf9\\ufb20-\\ufb28\\ufe00-\\ufe0f\\ufe20-\\ufe26\\ufe33\\ufe34\\ufe4d-\\ufe4f\\uff10-\\uff19\\uff3f])*","g"),e.identifierStart=new RegExp(n),e.identifierMatch=new RegExp("(?:\\\\u[0-9a-fA-F]{4}|[\\x24\\x30-\\x39\\x41-\\x5a\\x5f\\x61-\\x7a"+i+"\\u0300-\\u036f\\u0483-\\u0487\\u0591-\\u05bd\\u05bf\\u05c1\\u05c2\\u05c4\\u05c5\\u05c7\\u0610-\\u061a\\u0620-\\u0649\\u0672-\\u06d3\\u06e7-\\u06e8\\u06fb-\\u06fc\\u0730-\\u074a\\u0800-\\u0814\\u081b-\\u0823\\u0825-\\u0827\\u0829-\\u082d\\u0840-\\u0857\\u08e4-\\u08fe\\u0900-\\u0903\\u093a-\\u093c\\u093e-\\u094f\\u0951-\\u0957\\u0962-\\u0963\\u0966-\\u096f\\u0981-\\u0983\\u09bc\\u09be-\\u09c4\\u09c7\\u09c8\\u09d7\\u09df-\\u09e0\\u0a01-\\u0a03\\u0a3c\\u0a3e-\\u0a42\\u0a47\\u0a48\\u0a4b-\\u0a4d\\u0a51\\u0a66-\\u0a71\\u0a75\\u0a81-\\u0a83\\u0abc\\u0abe-\\u0ac5\\u0ac7-\\u0ac9\\u0acb-\\u0acd\\u0ae2-\\u0ae3\\u0ae6-\\u0aef\\u0b01-\\u0b03\\u0b3c\\u0b3e-\\u0b44\\u0b47\\u0b48\\u0b4b-\\u0b4d\\u0b56\\u0b57\\u0b5f-\\u0b60\\u0b66-\\u0b6f\\u0b82\\u0bbe-\\u0bc2\\u0bc6-\\u0bc8\\u0bca-\\u0bcd\\u0bd7\\u0be6-\\u0bef\\u0c01-\\u0c03\\u0c46-\\u0c48\\u0c4a-\\u0c4d\\u0c55\\u0c56\\u0c62-\\u0c63\\u0c66-\\u0c6f\\u0c82\\u0c83\\u0cbc\\u0cbe-\\u0cc4\\u0cc6-\\u0cc8\\u0cca-\\u0ccd\\u0cd5\\u0cd6\\u0ce2-\\u0ce3\\u0ce6-\\u0cef\\u0d02\\u0d03\\u0d46-\\u0d48\\u0d57\\u0d62-\\u0d63\\u0d66-\\u0d6f\\u0d82\\u0d83\\u0dca\\u0dcf-\\u0dd4\\u0dd6\\u0dd8-\\u0ddf\\u0df2\\u0df3\\u0e34-\\u0e3a\\u0e40-\\u0e45\\u0e50-\\u0e59\\u0eb4-\\u0eb9\\u0ec8-\\u0ecd\\u0ed0-\\u0ed9\\u0f18\\u0f19\\u0f20-\\u0f29\\u0f35\\u0f37\\u0f39\\u0f41-\\u0f47\\u0f71-\\u0f84\\u0f86-\\u0f87\\u0f8d-\\u0f97\\u0f99-\\u0fbc\\u0fc6\\u1000-\\u1029\\u1040-\\u1049\\u1067-\\u106d\\u1071-\\u1074\\u1082-\\u108d\\u108f-\\u109d\\u135d-\\u135f\\u170e-\\u1710\\u1720-\\u1730\\u1740-\\u1750\\u1772\\u1773\\u1780-\\u17b2\\u17dd\\u17e0-\\u17e9\\u180b-\\u180d\\u1810-\\u1819\\u1920-\\u192b\\u1930-\\u193b\\u1951-\\u196d\\u19b0-\\u19c0\\u19c8-\\u19c9\\u19d0-\\u19d9\\u1a00-\\u1a15\\u1a20-\\u1a53\\u1a60-\\u1a7c\\u1a7f-\\u1a89\\u1a90-\\u1a99\\u1b46-\\u1b4b\\u1b50-\\u1b59\\u1b6b-\\u1b73\\u1bb0-\\u1bb9\\u1be6-\\u1bf3\\u1c00-\\u1c22\\u1c40-\\u1c49\\u1c5b-\\u1c7d\\u1cd0-\\u1cd2\\u1d00-\\u1dbe\\u1e01-\\u1f15\\u200c\\u200d\\u203f\\u2040\\u2054\\u20d0-\\u20dc\\u20e1\\u20e5-\\u20f0\\u2d81-\\u2d96\\u2de0-\\u2dff\\u3021-\\u3028\\u3099\\u309a\\ua640-\\ua66d\\ua674-\\ua67d\\ua69f\\ua6f0-\\ua6f1\\ua7f8-\\ua800\\ua806\\ua80b\\ua823-\\ua827\\ua880-\\ua881\\ua8b4-\\ua8c4\\ua8d0-\\ua8d9\\ua8f3-\\ua8f7\\ua900-\\ua909\\ua926-\\ua92d\\ua930-\\ua945\\ua980-\\ua983\\ua9b3-\\ua9c0\\uaa00-\\uaa27\\uaa40-\\uaa41\\uaa4c-\\uaa4d\\uaa50-\\uaa59\\uaa7b\\uaae0-\\uaae9\\uaaf2-\\uaaf3\\uabc0-\\uabe1\\uabec\\uabed\\uabf0-\\uabf9\\ufb20-\\ufb28\\ufe00-\\ufe0f\\ufe20-\\ufe26\\ufe33\\ufe34\\ufe4d-\\ufe4f\\uff10-\\uff19\\uff3f])+"),e.newline=/[\n\r\u2028\u2029]/,e.lineBreak=new RegExp("\r\n|"+e.newline.source),e.allLineBreaks=new RegExp(e.lineBreak.source,"g")},282:function(t,e,i){var n=i(558).r,_=i(650).W,s=i(273),a=i(962).E,u=i(76).d2,r=i(76).O9,o=i(76).L2,h=i(76).o3;function p(t,e){return-1!==e.indexOf(t)}function l(t,e){return t&&t.type===h.RESERVED&&t.text===e}function c(t,e){return t&&t.type===h.RESERVED&&p(t.text,e)}var f=["case","return","do","if","throw","else","await","break","continue","async"],d=function(t){for(var e={},i=0;i<t.length;i++)e[t[i].replace(/-/g,"_")]=t[i];return e}(["before-newline","after-newline","preserve-newline"]),g=[d.before_newline,d.preserve_newline],b="BlockStatement",m="Statement",k="ObjectLiteral",y="ArrayLiteral",w="ForInitializer",x="Conditional",v="Expression";function E(t,e){e.multiline_frame||e.mode===w||e.mode===x||t.remove_indent(e.start_line_index)}function T(t){return t===y}function O(t){return p(t,[v,w,x])}function R(t,e){e=e||{},this._source_text=t||"",this._output=null,this._tokens=null,this._last_last_text=null,this._flags=null,this._previous_flags=null,this._flag_store=null,this._options=new a(e)}R.prototype.create_flags=function(t,e){var i=0;return t&&(i=t.indentation_level,!this._output.just_added_newline()&&t.line_indent_level>i&&(i=t.line_indent_level)),{mode:e,parent:t,last_token:t?t.last_token:new _(h.START_BLOCK,""),last_word:t?t.last_word:"",declaration_statement:!1,declaration_assignment:!1,multiline_frame:!1,inline_frame:!1,if_block:!1,else_block:!1,do_block:!1,do_while:!1,import_block:!1,in_case_statement:!1,in_case:!1,case_body:!1,indentation_level:i,alignment:0,line_indent_level:t?t.line_indent_level:i,start_line_index:this._output.get_line_number(),ternary_depth:0}},R.prototype._reset=function(t){var e=t.match(/^[\t ]*/)[0];this._last_last_text="",this._output=new n(this._options,e),this._output.raw=this._options.test_output_raw,this._flag_store=[],this.set_mode(b);var i=new u(t,this._options);return this._tokens=i.tokenize(),t},R.prototype.beautify=function(){if(this._options.disabled)return this._source_text;var t=this._reset(this._source_text),e=this._options.eol;"auto"===this._options.eol&&(e="\n",t&&s.lineBreak.test(t||"")&&(e=t.match(s.lineBreak)[0]));for(var i=this._tokens.next();i;)this.handle_token(i),this._last_last_text=this._flags.last_token.text,this._flags.last_token=i,i=this._tokens.next();return this._output.get_code(e)},R.prototype.handle_token=function(t,e){t.type===h.START_EXPR?this.handle_start_expr(t):t.type===h.END_EXPR?this.handle_end_expr(t):t.type===h.START_BLOCK?this.handle_start_block(t):t.type===h.END_BLOCK?this.handle_end_block(t):t.type===h.WORD||t.type===h.RESERVED?this.handle_word(t):t.type===h.SEMICOLON?this.handle_semicolon(t):t.type===h.STRING?this.handle_string(t):t.type===h.EQUALS?this.handle_equals(t):t.type===h.OPERATOR?this.handle_operator(t):t.type===h.COMMA?this.handle_comma(t):t.type===h.BLOCK_COMMENT?this.handle_block_comment(t,e):t.type===h.COMMENT?this.handle_comment(t,e):t.type===h.DOT?this.handle_dot(t):t.type===h.EOF?this.handle_eof(t):(t.type,h.UNKNOWN,this.handle_unknown(t,e))},R.prototype.handle_whitespace_and_comments=function(t,e){var i=t.newlines,n=this._options.keep_array_indentation&&T(this._flags.mode);if(t.comments_before)for(var _=t.comments_before.next();_;)this.handle_whitespace_and_comments(_,e),this.handle_token(_,e),_=t.comments_before.next();if(n)for(var s=0;s<i;s+=1)this.print_newline(s>0,e);else if(this._options.max_preserve_newlines&&i>this._options.max_preserve_newlines&&(i=this._options.max_preserve_newlines),this._options.preserve_newlines&&i>1){this.print_newline(!1,e);for(var a=1;a<i;a+=1)this.print_newline(!0,e)}};var A=["async","break","continue","return","throw","yield"];R.prototype.allow_wrap_or_preserved_newline=function(t,e){if(e=void 0!==e&&e,!this._output.just_added_newline()){var i=this._options.preserve_newlines&&t.newlines||e;if(p(this._flags.last_token.text,o)||p(t.text,o)){var n=p(this._flags.last_token.text,o)&&p(this._options.operator_position,g)||p(t.text,o);i=i&&n}if(i)this.print_newline(!1,!0);else if(this._options.wrap_line_length){if(c(this._flags.last_token,A))return;this._output.set_wrap_point()}}},R.prototype.print_newline=function(t,e){if(!e&&";"!==this._flags.last_token.text&&","!==this._flags.last_token.text&&"="!==this._flags.last_token.text&&(this._flags.last_token.type!==h.OPERATOR||"--"===this._flags.last_token.text||"++"===this._flags.last_token.text))for(var i=this._tokens.peek();!(this._flags.mode!==m||this._flags.if_block&&l(i,"else")||this._flags.do_block);)this.restore_mode();this._output.add_new_line(t)&&(this._flags.multiline_frame=!0)},R.prototype.print_token_line_indentation=function(t){this._output.just_added_newline()&&(this._options.keep_array_indentation&&t.newlines&&("["===t.text||T(this._flags.mode))?(this._output.current_line.set_indent(-1),this._output.current_line.push(t.whitespace_before),this._output.space_before_token=!1):this._output.set_indent(this._flags.indentation_level,this._flags.alignment)&&(this._flags.line_indent_level=this._flags.indentation_level))},R.prototype.print_token=function(t){if(this._output.raw)this._output.add_raw_token(t);else{if(this._options.comma_first&&t.previous&&t.previous.type===h.COMMA&&this._output.just_added_newline()&&","===this._output.previous_line.last()){var e=this._output.previous_line.pop();this._output.previous_line.is_empty()&&(this._output.previous_line.push(e),this._output.trim(!0),this._output.current_line.pop(),this._output.trim()),this.print_token_line_indentation(t),this._output.add_token(","),this._output.space_before_token=!0}this.print_token_line_indentation(t),this._output.non_breaking_space=!0,this._output.add_token(t.text),this._output.previous_token_wrapped&&(this._flags.multiline_frame=!0)}},R.prototype.indent=function(){this._flags.indentation_level+=1,this._output.set_indent(this._flags.indentation_level,this._flags.alignment)},R.prototype.deindent=function(){this._flags.indentation_level>0&&(!this._flags.parent||this._flags.indentation_level>this._flags.parent.indentation_level)&&(this._flags.indentation_level-=1,this._output.set_indent(this._flags.indentation_level,this._flags.alignment))},R.prototype.set_mode=function(t){this._flags?(this._flag_store.push(this._flags),this._previous_flags=this._flags):this._previous_flags=this.create_flags(null,t),this._flags=this.create_flags(this._previous_flags,t),this._output.set_indent(this._flags.indentation_level,this._flags.alignment)},R.prototype.restore_mode=function(){this._flag_store.length>0&&(this._previous_flags=this._flags,this._flags=this._flag_store.pop(),this._previous_flags.mode===m&&E(this._output,this._previous_flags),this._output.set_indent(this._flags.indentation_level,this._flags.alignment))},R.prototype.start_of_object_property=function(){return this._flags.parent.mode===k&&this._flags.mode===m&&(":"===this._flags.last_token.text&&0===this._flags.ternary_depth||c(this._flags.last_token,["get","set"]))},R.prototype.start_of_statement=function(t){var e=!1;return!!(e=(e=(e=(e=(e=(e=(e=e||c(this._flags.last_token,["var","let","const"])&&t.type===h.WORD)||l(this._flags.last_token,"do"))||!(this._flags.parent.mode===k&&this._flags.mode===m)&&c(this._flags.last_token,A)&&!t.newlines)||l(this._flags.last_token,"else")&&!(l(t,"if")&&!t.comments_before))||this._flags.last_token.type===h.END_EXPR&&(this._previous_flags.mode===w||this._previous_flags.mode===x))||this._flags.last_token.type===h.WORD&&this._flags.mode===b&&!this._flags.in_case&&!("--"===t.text||"++"===t.text)&&"function"!==this._last_last_text&&t.type!==h.WORD&&t.type!==h.RESERVED)||this._flags.mode===k&&(":"===this._flags.last_token.text&&0===this._flags.ternary_depth||c(this._flags.last_token,["get","set"])))&&(this.set_mode(m),this.indent(),this.handle_whitespace_and_comments(t,!0),this.start_of_object_property()||this.allow_wrap_or_preserved_newline(t,c(t,["do","for","if","while"])),!0)},R.prototype.handle_start_expr=function(t){this.start_of_statement(t)||this.handle_whitespace_and_comments(t);var e=v;if("["===t.text){if(this._flags.last_token.type===h.WORD||")"===this._flags.last_token.text)return c(this._flags.last_token,r)&&(this._output.space_before_token=!0),this.print_token(t),this.set_mode(e),this.indent(),void(this._options.space_in_paren&&(this._output.space_before_token=!0));e=y,T(this._flags.mode)&&("["!==this._flags.last_token.text&&(","!==this._flags.last_token.text||"]"!==this._last_last_text&&"}"!==this._last_last_text)||this._options.keep_array_indentation||this.print_newline()),p(this._flags.last_token.type,[h.START_EXPR,h.END_EXPR,h.WORD,h.OPERATOR,h.DOT])||(this._output.space_before_token=!0)}else{if(this._flags.last_token.type===h.RESERVED)"for"===this._flags.last_token.text?(this._output.space_before_token=this._options.space_before_conditional,e=w):p(this._flags.last_token.text,["if","while","switch"])?(this._output.space_before_token=this._options.space_before_conditional,e=x):p(this._flags.last_word,["await","async"])?this._output.space_before_token=!0:"import"===this._flags.last_token.text&&""===t.whitespace_before?this._output.space_before_token=!1:(p(this._flags.last_token.text,r)||"catch"===this._flags.last_token.text)&&(this._output.space_before_token=!0);else if(this._flags.last_token.type===h.EQUALS||this._flags.last_token.type===h.OPERATOR)this.start_of_object_property()||this.allow_wrap_or_preserved_newline(t);else if(this._flags.last_token.type===h.WORD){this._output.space_before_token=!1;var i=this._tokens.peek(-3);if(this._options.space_after_named_function&&i){var n=this._tokens.peek(-4);c(i,["async","function"])||"*"===i.text&&c(n,["async","function"])?this._output.space_before_token=!0:this._flags.mode===k&&("{"!==i.text&&","!==i.text&&("*"!==i.text||"{"!==n.text&&","!==n.text)||(this._output.space_before_token=!0))}}else this.allow_wrap_or_preserved_newline(t);(this._flags.last_token.type===h.RESERVED&&("function"===this._flags.last_word||"typeof"===this._flags.last_word)||"*"===this._flags.last_token.text&&(p(this._last_last_text,["function","yield"])||this._flags.mode===k&&p(this._last_last_text,["{",","])))&&(this._output.space_before_token=this._options.space_after_anon_function)}";"===this._flags.last_token.text||this._flags.last_token.type===h.START_BLOCK?this.print_newline():this._flags.last_token.type!==h.END_EXPR&&this._flags.last_token.type!==h.START_EXPR&&this._flags.last_token.type!==h.END_BLOCK&&"."!==this._flags.last_token.text&&this._flags.last_token.type!==h.COMMA||this.allow_wrap_or_preserved_newline(t,t.newlines),this.print_token(t),this.set_mode(e),this._options.space_in_paren&&(this._output.space_before_token=!0),this.indent()},R.prototype.handle_end_expr=function(t){for(;this._flags.mode===m;)this.restore_mode();this.handle_whitespace_and_comments(t),this._flags.multiline_frame&&this.allow_wrap_or_preserved_newline(t,"]"===t.text&&T(this._flags.mode)&&!this._options.keep_array_indentation),this._options.space_in_paren&&(this._flags.last_token.type!==h.START_EXPR||this._options.space_in_empty_paren?this._output.space_before_token=!0:(this._output.trim(),this._output.space_before_token=!1)),this.deindent(),this.print_token(t),this.restore_mode(),E(this._output,this._previous_flags),this._flags.do_while&&this._previous_flags.mode===x&&(this._previous_flags.mode=v,this._flags.do_block=!1,this._flags.do_while=!1)},R.prototype.handle_start_block=function(t){this.handle_whitespace_and_comments(t);var e=this._tokens.peek(),i=this._tokens.peek(1);"switch"===this._flags.last_word&&this._flags.last_token.type===h.END_EXPR?(this.set_mode(b),this._flags.in_case_statement=!0):this._flags.case_body?this.set_mode(b):i&&(p(i.text,[":",","])&&p(e.type,[h.STRING,h.WORD,h.RESERVED])||p(e.text,["get","set","..."])&&p(i.type,[h.WORD,h.RESERVED]))?p(this._last_last_text,["class","interface"])?this.set_mode(b):this.set_mode(k):this._flags.last_token.type===h.OPERATOR&&"=>"===this._flags.last_token.text?this.set_mode(b):p(this._flags.last_token.type,[h.EQUALS,h.START_EXPR,h.COMMA,h.OPERATOR])||c(this._flags.last_token,["return","throw","import","default"])?this.set_mode(k):this.set_mode(b);var n=!e.comments_before&&"}"===e.text,_=n&&"function"===this._flags.last_word&&this._flags.last_token.type===h.END_EXPR;if(this._options.brace_preserve_inline){var s=0,a=null;this._flags.inline_frame=!0;do{if(s+=1,(a=this._tokens.peek(s-1)).newlines){this._flags.inline_frame=!1;break}}while(a.type!==h.EOF&&(a.type!==h.END_BLOCK||a.opened!==t))}("expand"===this._options.brace_style||"none"===this._options.brace_style&&t.newlines)&&!this._flags.inline_frame?this._flags.last_token.type!==h.OPERATOR&&(_||this._flags.last_token.type===h.EQUALS||c(this._flags.last_token,f)&&"else"!==this._flags.last_token.text)?this._output.space_before_token=!0:this.print_newline(!1,!0):(!T(this._previous_flags.mode)||this._flags.last_token.type!==h.START_EXPR&&this._flags.last_token.type!==h.COMMA||((this._flags.last_token.type===h.COMMA||this._options.space_in_paren)&&(this._output.space_before_token=!0),(this._flags.last_token.type===h.COMMA||this._flags.last_token.type===h.START_EXPR&&this._flags.inline_frame)&&(this.allow_wrap_or_preserved_newline(t),this._previous_flags.multiline_frame=this._previous_flags.multiline_frame||this._flags.multiline_frame,this._flags.multiline_frame=!1)),this._flags.last_token.type!==h.OPERATOR&&this._flags.last_token.type!==h.START_EXPR&&(this._flags.last_token.type!==h.START_BLOCK||this._flags.inline_frame?this._output.space_before_token=!0:this.print_newline())),this.print_token(t),this.indent(),n||this._options.brace_preserve_inline&&this._flags.inline_frame||this.print_newline()},R.prototype.handle_end_block=function(t){for(this.handle_whitespace_and_comments(t);this._flags.mode===m;)this.restore_mode();var e=this._flags.last_token.type===h.START_BLOCK;this._flags.inline_frame&&!e?this._output.space_before_token=!0:"expand"===this._options.brace_style?e||this.print_newline():e||(T(this._flags.mode)&&this._options.keep_array_indentation?(this._options.keep_array_indentation=!1,this.print_newline(),this._options.keep_array_indentation=!0):this.print_newline()),this.restore_mode(),this.print_token(t)},R.prototype.handle_word=function(t){if(t.type===h.RESERVED&&(p(t.text,["set","get"])&&this._flags.mode!==k||"import"===t.text&&"("===this._tokens.peek().text||p(t.text,["as","from"])&&!this._flags.import_block||this._flags.mode===k&&":"===this._tokens.peek().text)&&(t.type=h.WORD),this.start_of_statement(t)?c(this._flags.last_token,["var","let","const"])&&t.type===h.WORD&&(this._flags.declaration_statement=!0):!t.newlines||O(this._flags.mode)||this._flags.last_token.type===h.OPERATOR&&"--"!==this._flags.last_token.text&&"++"!==this._flags.last_token.text||this._flags.last_token.type===h.EQUALS||!this._options.preserve_newlines&&c(this._flags.last_token,["var","let","const","set","get"])?this.handle_whitespace_and_comments(t):(this.handle_whitespace_and_comments(t),this.print_newline()),this._flags.do_block&&!this._flags.do_while){if(l(t,"while"))return this._output.space_before_token=!0,this.print_token(t),this._output.space_before_token=!0,void(this._flags.do_while=!0);this.print_newline(),this._flags.do_block=!1}if(this._flags.if_block)if(!this._flags.else_block&&l(t,"else"))this._flags.else_block=!0;else{for(;this._flags.mode===m;)this.restore_mode();this._flags.if_block=!1,this._flags.else_block=!1}if(this._flags.in_case_statement&&c(t,["case","default"]))return this.print_newline(),this._flags.last_token.type!==h.END_BLOCK&&(this._flags.case_body||this._options.jslint_happy)&&this.deindent(),this._flags.case_body=!1,this.print_token(t),void(this._flags.in_case=!0);if(this._flags.last_token.type!==h.COMMA&&this._flags.last_token.type!==h.START_EXPR&&this._flags.last_token.type!==h.EQUALS&&this._flags.last_token.type!==h.OPERATOR||this.start_of_object_property()||this.allow_wrap_or_preserved_newline(t),l(t,"function"))return(p(this._flags.last_token.text,["}",";"])||this._output.just_added_newline()&&!p(this._flags.last_token.text,["(","[","{",":","=",","])&&this._flags.last_token.type!==h.OPERATOR)&&(this._output.just_added_blankline()||t.comments_before||(this.print_newline(),this.print_newline(!0))),this._flags.last_token.type===h.RESERVED||this._flags.last_token.type===h.WORD?c(this._flags.last_token,["get","set","new","export"])||c(this._flags.last_token,A)||l(this._flags.last_token,"default")&&"export"===this._last_last_text||"declare"===this._flags.last_token.text?this._output.space_before_token=!0:this.print_newline():this._flags.last_token.type===h.OPERATOR||"="===this._flags.last_token.text?this._output.space_before_token=!0:(this._flags.multiline_frame||!O(this._flags.mode)&&!T(this._flags.mode))&&this.print_newline(),this.print_token(t),void(this._flags.last_word=t.text);var e="NONE";this._flags.last_token.type===h.END_BLOCK?this._previous_flags.inline_frame?e="SPACE":c(t,["else","catch","finally","from"])?"expand"===this._options.brace_style||"end-expand"===this._options.brace_style||"none"===this._options.brace_style&&t.newlines?e="NEWLINE":(e="SPACE",this._output.space_before_token=!0):e="NEWLINE":this._flags.last_token.type===h.SEMICOLON&&this._flags.mode===b?e="NEWLINE":this._flags.last_token.type===h.SEMICOLON&&O(this._flags.mode)?e="SPACE":this._flags.last_token.type===h.STRING?e="NEWLINE":this._flags.last_token.type===h.RESERVED||this._flags.last_token.type===h.WORD||"*"===this._flags.last_token.text&&(p(this._last_last_text,["function","yield"])||this._flags.mode===k&&p(this._last_last_text,["{",","]))?e="SPACE":this._flags.last_token.type===h.START_BLOCK?e=this._flags.inline_frame?"SPACE":"NEWLINE":this._flags.last_token.type===h.END_EXPR&&(this._output.space_before_token=!0,e="NEWLINE"),c(t,r)&&")"!==this._flags.last_token.text&&(e=this._flags.inline_frame||"else"===this._flags.last_token.text||"export"===this._flags.last_token.text?"SPACE":"NEWLINE"),c(t,["else","catch","finally"])?(this._flags.last_token.type!==h.END_BLOCK||this._previous_flags.mode!==b||"expand"===this._options.brace_style||"end-expand"===this._options.brace_style||"none"===this._options.brace_style&&t.newlines)&&!this._flags.inline_frame?this.print_newline():(this._output.trim(!0),"}"!==this._output.current_line.last()&&this.print_newline(),this._output.space_before_token=!0):"NEWLINE"===e?c(this._flags.last_token,f)||"declare"===this._flags.last_token.text&&c(t,["var","let","const"])?this._output.space_before_token=!0:this._flags.last_token.type!==h.END_EXPR?this._flags.last_token.type===h.START_EXPR&&c(t,["var","let","const"])||":"===this._flags.last_token.text||(l(t,"if")&&l(t.previous,"else")?this._output.space_before_token=!0:this.print_newline()):c(t,r)&&")"!==this._flags.last_token.text&&this.print_newline():this._flags.multiline_frame&&T(this._flags.mode)&&","===this._flags.last_token.text&&"}"===this._last_last_text?this.print_newline():"SPACE"===e&&(this._output.space_before_token=!0),!t.previous||t.previous.type!==h.WORD&&t.previous.type!==h.RESERVED||(this._output.space_before_token=!0),this.print_token(t),this._flags.last_word=t.text,t.type===h.RESERVED&&("do"===t.text?this._flags.do_block=!0:"if"===t.text?this._flags.if_block=!0:"import"===t.text?this._flags.import_block=!0:this._flags.import_block&&l(t,"from")&&(this._flags.import_block=!1))},R.prototype.handle_semicolon=function(t){this.start_of_statement(t)?this._output.space_before_token=!1:this.handle_whitespace_and_comments(t);for(var e=this._tokens.peek();!(this._flags.mode!==m||this._flags.if_block&&l(e,"else")||this._flags.do_block);)this.restore_mode();this._flags.import_block&&(this._flags.import_block=!1),this.print_token(t)},R.prototype.handle_string=function(t){(!t.text.startsWith("`")||0!==t.newlines||""!==t.whitespace_before||")"!==t.previous.text&&this._flags.last_token.type!==h.WORD)&&(this.start_of_statement(t)?this._output.space_before_token=!0:(this.handle_whitespace_and_comments(t),this._flags.last_token.type===h.RESERVED||this._flags.last_token.type===h.WORD||this._flags.inline_frame?this._output.space_before_token=!0:this._flags.last_token.type===h.COMMA||this._flags.last_token.type===h.START_EXPR||this._flags.last_token.type===h.EQUALS||this._flags.last_token.type===h.OPERATOR?this.start_of_object_property()||this.allow_wrap_or_preserved_newline(t):!t.text.startsWith("`")||this._flags.last_token.type!==h.END_EXPR||"]"!==t.previous.text&&")"!==t.previous.text||0!==t.newlines?this.print_newline():this._output.space_before_token=!0)),this.print_token(t)},R.prototype.handle_equals=function(t){this.start_of_statement(t)||this.handle_whitespace_and_comments(t),this._flags.declaration_statement&&(this._flags.declaration_assignment=!0),this._output.space_before_token=!0,this.print_token(t),this._output.space_before_token=!0},R.prototype.handle_comma=function(t){this.handle_whitespace_and_comments(t,!0),this.print_token(t),this._output.space_before_token=!0,this._flags.declaration_statement?(O(this._flags.parent.mode)&&(this._flags.declaration_assignment=!1),this._flags.declaration_assignment?(this._flags.declaration_assignment=!1,this.print_newline(!1,!0)):this._options.comma_first&&this.allow_wrap_or_preserved_newline(t)):this._flags.mode===k||this._flags.mode===m&&this._flags.parent.mode===k?(this._flags.mode===m&&this.restore_mode(),this._flags.inline_frame||this.print_newline()):this._options.comma_first&&this.allow_wrap_or_preserved_newline(t)},R.prototype.handle_operator=function(t){var e="*"===t.text&&(c(this._flags.last_token,["function","yield"])||p(this._flags.last_token.type,[h.START_BLOCK,h.COMMA,h.END_BLOCK,h.SEMICOLON])),i=p(t.text,["-","+"])&&(p(this._flags.last_token.type,[h.START_BLOCK,h.START_EXPR,h.EQUALS,h.OPERATOR])||p(this._flags.last_token.text,r)||","===this._flags.last_token.text);if(this.start_of_statement(t));else{var n=!e;this.handle_whitespace_and_comments(t,n)}if(c(this._flags.last_token,f))return this._output.space_before_token=!0,void this.print_token(t);if("*"!==t.text||this._flags.last_token.type!==h.DOT)if("::"!==t.text){if(this._flags.last_token.type===h.OPERATOR&&p(this._options.operator_position,g)&&this.allow_wrap_or_preserved_newline(t),":"===t.text&&this._flags.in_case)return this.print_token(t),this._flags.in_case=!1,this._flags.case_body=!0,void(this._tokens.peek().type!==h.START_BLOCK?(this.indent(),this.print_newline()):this._output.space_before_token=!0);var _=!0,s=!0,a=!1;if(":"===t.text?0===this._flags.ternary_depth?_=!1:(this._flags.ternary_depth-=1,a=!0):"?"===t.text&&(this._flags.ternary_depth+=1),!i&&!e&&this._options.preserve_newlines&&p(t.text,o)){var u=":"===t.text,l=u&&a,k=u&&!a;switch(this._options.operator_position){case d.before_newline:return this._output.space_before_token=!k,this.print_token(t),u&&!l||this.allow_wrap_or_preserved_newline(t),void(this._output.space_before_token=!0);case d.after_newline:return this._output.space_before_token=!0,!u||l?this._tokens.peek().newlines?this.print_newline(!1,!0):this.allow_wrap_or_preserved_newline(t):this._output.space_before_token=!1,this.print_token(t),void(this._output.space_before_token=!0);case d.preserve_newline:return k||this.allow_wrap_or_preserved_newline(t),_=!(this._output.just_added_newline()||k),this._output.space_before_token=_,this.print_token(t),void(this._output.space_before_token=!0)}}if(e){this.allow_wrap_or_preserved_newline(t),_=!1;var y=this._tokens.peek();s=y&&p(y.type,[h.WORD,h.RESERVED])}else"..."===t.text?(this.allow_wrap_or_preserved_newline(t),_=this._flags.last_token.type===h.START_BLOCK,s=!1):(p(t.text,["--","++","!","~"])||i)&&(this._flags.last_token.type!==h.COMMA&&this._flags.last_token.type!==h.START_EXPR||this.allow_wrap_or_preserved_newline(t),_=!1,s=!1,!t.newlines||"--"!==t.text&&"++"!==t.text||this.print_newline(!1,!0),";"===this._flags.last_token.text&&O(this._flags.mode)&&(_=!0),this._flags.last_token.type===h.RESERVED?_=!0:this._flags.last_token.type===h.END_EXPR?_=!("]"===this._flags.last_token.text&&("--"===t.text||"++"===t.text)):this._flags.last_token.type===h.OPERATOR&&(_=p(t.text,["--","-","++","+"])&&p(this._flags.last_token.text,["--","-","++","+"]),p(t.text,["+","-"])&&p(this._flags.last_token.text,["--","++"])&&(s=!0)),(this._flags.mode!==b||this._flags.inline_frame)&&this._flags.mode!==m||"{"!==this._flags.last_token.text&&";"!==this._flags.last_token.text||this.print_newline());this._output.space_before_token=this._output.space_before_token||_,this.print_token(t),this._output.space_before_token=s}else this.print_token(t);else this.print_token(t)},R.prototype.handle_block_comment=function(t,e){return this._output.raw?(this._output.add_raw_token(t),void(t.directives&&"end"===t.directives.preserve&&(this._output.raw=this._options.test_output_raw))):t.directives?(this.print_newline(!1,e),this.print_token(t),"start"===t.directives.preserve&&(this._output.raw=!0),void this.print_newline(!1,!0)):s.newline.test(t.text)||t.newlines?void this.print_block_commment(t,e):(this._output.space_before_token=!0,this.print_token(t),void(this._output.space_before_token=!0))},R.prototype.print_block_commment=function(t,e){var i,n=function(t){for(var e=[],i=(t=t.replace(s.allLineBreaks,"\n")).indexOf("\n");-1!==i;)e.push(t.substring(0,i)),i=(t=t.substring(i+1)).indexOf("\n");return t.length&&e.push(t),e}(t.text),_=!1,a=!1,u=t.whitespace_before,r=u.length;if(this.print_newline(!1,e),this.print_token_line_indentation(t),this._output.add_token(n[0]),this.print_newline(!1,e),n.length>1){for(_=function(t,e){for(var i=0;i<t.length;i++)if("*"!==t[i].trim().charAt(0))return!1;return!0}(n=n.slice(1)),a=function(t,e){for(var i,n=0,_=t.length;n<_;n++)if((i=t[n])&&0!==i.indexOf(e))return!1;return!0}(n,u),_&&(this._flags.alignment=1),i=0;i<n.length;i++)_?(this.print_token_line_indentation(t),this._output.add_token(n[i].replace(/^\s+/g,""))):a&&n[i]?(this.print_token_line_indentation(t),this._output.add_token(n[i].substring(r))):(this._output.current_line.set_indent(-1),this._output.add_token(n[i])),this.print_newline(!1,e);this._flags.alignment=0}},R.prototype.handle_comment=function(t,e){t.newlines?this.print_newline(!1,e):this._output.trim(!0),this._output.space_before_token=!0,this.print_token(t),this.print_newline(!1,e)},R.prototype.handle_dot=function(t){this.start_of_statement(t)||this.handle_whitespace_and_comments(t,!0),c(this._flags.last_token,f)?this._output.space_before_token=!1:this.allow_wrap_or_preserved_newline(t,")"===this._flags.last_token.text&&this._options.break_chained_methods),this._options.unindent_chained_methods&&this._output.just_added_newline()&&this.deindent(),this.print_token(t)},R.prototype.handle_unknown=function(t,e){this.print_token(t),"\n"===t.text[t.text.length-1]&&this.print_newline(!1,e)},R.prototype.handle_eof=function(t){for(;this._flags.mode===m;)this.restore_mode();this.handle_whitespace_and_comments(t)},t.exports.K=R},82:function(t,e,i){var n=i(282).K,_=i(962).E;t.exports=function(t,e){return new n(t,e).beautify()},t.exports.defaultOptions=function(){return new _}},962:function(t,e,i){var n=i(915).Ei,_=["before-newline","after-newline","preserve-newline"];function s(t){n.call(this,t,"js");var e=this.raw_options.brace_style||null;"expand-strict"===e?this.raw_options.brace_style="expand":"collapse-preserve-inline"===e?this.raw_options.brace_style="collapse,preserve-inline":void 0!==this.raw_options.braces_on_own_line&&(this.raw_options.brace_style=this.raw_options.braces_on_own_line?"expand":"collapse");var i=this._get_selection_list("brace_style",["collapse","expand","end-expand","none","preserve-inline"]);this.brace_preserve_inline=!1,this.brace_style="collapse";for(var s=0;s<i.length;s++)"preserve-inline"===i[s]?this.brace_preserve_inline=!0:this.brace_style=i[s];this.unindent_chained_methods=this._get_boolean("unindent_chained_methods"),this.break_chained_methods=this._get_boolean("break_chained_methods"),this.space_in_paren=this._get_boolean("space_in_paren"),this.space_in_empty_paren=this._get_boolean("space_in_empty_paren"),this.jslint_happy=this._get_boolean("jslint_happy"),this.space_after_anon_function=this._get_boolean("space_after_anon_function"),this.space_after_named_function=this._get_boolean("space_after_named_function"),this.keep_array_indentation=this._get_boolean("keep_array_indentation"),this.space_before_conditional=this._get_boolean("space_before_conditional",!0),this.unescape_strings=this._get_boolean("unescape_strings"),this.e4x=this._get_boolean("e4x"),this.comma_first=this._get_boolean("comma_first"),this.operator_position=this._get_selection("operator_position",_),this.test_output_raw=this._get_boolean("test_output_raw"),this.jslint_happy&&(this.space_after_anon_function=!0)}s.prototype=new n,t.exports.E=s},76:function(t,e,i){var n=i(418).g,_=i(147).d,s=i(147).o,a=i(7).t,u=i(273),r=i(348).c,o=i(800).A;function h(t,e){return-1!==e.indexOf(t)}var p={START_EXPR:"TK_START_EXPR",END_EXPR:"TK_END_EXPR",START_BLOCK:"TK_START_BLOCK",END_BLOCK:"TK_END_BLOCK",WORD:"TK_WORD",RESERVED:"TK_RESERVED",SEMICOLON:"TK_SEMICOLON",STRING:"TK_STRING",EQUALS:"TK_EQUALS",OPERATOR:"TK_OPERATOR",COMMA:"TK_COMMA",BLOCK_COMMENT:"TK_BLOCK_COMMENT",COMMENT:"TK_COMMENT",DOT:"TK_DOT",UNKNOWN:"TK_UNKNOWN",START:s.START,RAW:s.RAW,EOF:s.EOF},l=new a(/\/\*/,/\*\//),c=/0[xX][0123456789abcdefABCDEF]*|0[oO][01234567]*|0[bB][01]*|\d+n|(?:\.\d+|\d+\.?\d*)(?:[eE][+-]?\d+)?/,f=/[0-9]/,d=/[^\d\.]/,g=">>> === !== << && >= ** != == <= >> || ?? |> < / - + > : & % ? ^ | *".split(" "),b=">>>= ... >>= <<= === >>> !== **= => ^= :: /= << <= == && -= >= >> != -- += ** || ?? ++ %= &= *= |= |> = ! ? > < : / ^ - + * & % ~ |";b=(b="\\?\\.(?!\\d) "+(b=b.replace(/[-[\]{}()*+?.,\\^$|#]/g,"\\$&"))).replace(/ /g,"|");var m,k=new RegExp(b),y="continue,try,throw,return,var,let,const,if,switch,case,default,for,while,break,function,import,export".split(","),w=y.concat(["do","in","of","else","get","set","new","catch","finally","typeof","yield","async","await","from","as"]),x=new RegExp("^(?:"+w.join("|")+")$"),v=function(t,e){_.call(this,t,e),this._patterns.whitespace=this._patterns.whitespace.matching(/\u00A0\u1680\u180e\u2000-\u200a\u202f\u205f\u3000\ufeff/.source,/\u2028\u2029/.source);var i=new r(this._input),n=new o(this._input).read_options(this._options);this.__patterns={template:n,identifier:n.starting_with(u.identifier).matching(u.identifierMatch),number:i.matching(c),punct:i.matching(k),comment:i.starting_with(/\/\//).until(/[\n\r\u2028\u2029]/),block_comment:i.starting_with(/\/\*/).until_after(/\*\//),html_comment_start:i.matching(/<!--/),html_comment_end:i.matching(/-->/),include:i.starting_with(/#include/).until_after(u.lineBreak),shebang:i.starting_with(/#!/).until_after(u.lineBreak),xml:i.matching(/[\s\S]*?<(\/?)([-a-zA-Z:0-9_.]+|{[\s\S]+?}|!\[CDATA\[[\s\S]*?\]\]|)(\s+{[\s\S]+?}|\s+[-a-zA-Z:0-9_.]+|\s+[-a-zA-Z:0-9_.]+\s*=\s*('[^']*'|"[^"]*"|{[\s\S]+?}))*\s*(\/?)\s*>/),single_quote:n.until(/['\\\n\r\u2028\u2029]/),double_quote:n.until(/["\\\n\r\u2028\u2029]/),template_text:n.until(/[`\\$]/),template_expression:n.until(/[`}\\]/)}};(v.prototype=new _)._is_comment=function(t){return t.type===p.COMMENT||t.type===p.BLOCK_COMMENT||t.type===p.UNKNOWN},v.prototype._is_opening=function(t){return t.type===p.START_BLOCK||t.type===p.START_EXPR},v.prototype._is_closing=function(t,e){return(t.type===p.END_BLOCK||t.type===p.END_EXPR)&&e&&("]"===t.text&&"["===e.text||")"===t.text&&"("===e.text||"}"===t.text&&"{"===e.text)},v.prototype._reset=function(){m=!1},v.prototype._get_next_token=function(t,e){var i=null;this._readWhitespace();var n=this._input.peek();return null===n?this._create_token(p.EOF,""):i=(i=(i=(i=(i=(i=(i=(i=(i=i||this._read_non_javascript(n))||this._read_string(n))||this._read_word(t))||this._read_singles(n))||this._read_comment(n))||this._read_regexp(n,t))||this._read_xml(n,t))||this._read_punctuation())||this._create_token(p.UNKNOWN,this._input.next())},v.prototype._read_word=function(t){var e;return""!==(e=this.__patterns.identifier.read())?(e=e.replace(u.allLineBreaks,"\n"),t.type!==p.DOT&&(t.type!==p.RESERVED||"set"!==t.text&&"get"!==t.text)&&x.test(e)?"in"===e||"of"===e?this._create_token(p.OPERATOR,e):this._create_token(p.RESERVED,e):this._create_token(p.WORD,e)):""!==(e=this.__patterns.number.read())?this._create_token(p.WORD,e):void 0},v.prototype._read_singles=function(t){var e=null;return"("===t||"["===t?e=this._create_token(p.START_EXPR,t):")"===t||"]"===t?e=this._create_token(p.END_EXPR,t):"{"===t?e=this._create_token(p.START_BLOCK,t):"}"===t?e=this._create_token(p.END_BLOCK,t):";"===t?e=this._create_token(p.SEMICOLON,t):"."===t&&d.test(this._input.peek(1))?e=this._create_token(p.DOT,t):","===t&&(e=this._create_token(p.COMMA,t)),e&&this._input.next(),e},v.prototype._read_punctuation=function(){var t=this.__patterns.punct.read();if(""!==t)return"="===t?this._create_token(p.EQUALS,t):"?."===t?this._create_token(p.DOT,t):this._create_token(p.OPERATOR,t)},v.prototype._read_non_javascript=function(t){var e="";if("#"===t){if(this._is_first_token()&&(e=this.__patterns.shebang.read()))return this._create_token(p.UNKNOWN,e.trim()+"\n");if(e=this.__patterns.include.read())return this._create_token(p.UNKNOWN,e.trim()+"\n");t=this._input.next();var i="#";if(this._input.hasNext()&&this._input.testChar(f)){do{i+=t=this._input.next()}while(this._input.hasNext()&&"#"!==t&&"="!==t);return"#"===t||("["===this._input.peek()&&"]"===this._input.peek(1)?(i+="[]",this._input.next(),this._input.next()):"{"===this._input.peek()&&"}"===this._input.peek(1)&&(i+="{}",this._input.next(),this._input.next())),this._create_token(p.WORD,i)}this._input.back()}else if("<"===t&&this._is_first_token()){if(e=this.__patterns.html_comment_start.read()){for(;this._input.hasNext()&&!this._input.testChar(u.newline);)e+=this._input.next();return m=!0,this._create_token(p.COMMENT,e)}}else if(m&&"-"===t&&(e=this.__patterns.html_comment_end.read()))return m=!1,this._create_token(p.COMMENT,e);return null},v.prototype._read_comment=function(t){var e=null;if("/"===t){var i="";if("*"===this._input.peek(1)){i=this.__patterns.block_comment.read();var n=l.get_directives(i);n&&"start"===n.ignore&&(i+=l.readIgnored(this._input)),i=i.replace(u.allLineBreaks,"\n"),(e=this._create_token(p.BLOCK_COMMENT,i)).directives=n}else"/"===this._input.peek(1)&&(i=this.__patterns.comment.read(),e=this._create_token(p.COMMENT,i))}return e},v.prototype._read_string=function(t){if("`"===t||"'"===t||'"'===t){var e=this._input.next();return this.has_char_escapes=!1,e+="`"===t?this._read_string_recursive("`",!0,"${"):this._read_string_recursive(t),this.has_char_escapes&&this._options.unescape_strings&&(e=function(t){for(var e="",i=0,_=new n(t),s=null;_.hasNext();)if((s=_.match(/([\s]|[^\\]|\\\\)+/g))&&(e+=s[0]),"\\"===_.peek()){if(_.next(),"x"===_.peek())s=_.match(/x([0-9A-Fa-f]{2})/g);else{if("u"!==_.peek()){e+="\\",_.hasNext()&&(e+=_.next());continue}s=_.match(/u([0-9A-Fa-f]{4})/g)}if(!s)return t;if((i=parseInt(s[1],16))>126&&i<=255&&0===s[0].indexOf("x"))return t;if(i>=0&&i<32){e+="\\"+s[0];continue}e+=34===i||39===i||92===i?"\\"+String.fromCharCode(i):String.fromCharCode(i)}return e}(e)),this._input.peek()===t&&(e+=this._input.next()),e=e.replace(u.allLineBreaks,"\n"),this._create_token(p.STRING,e)}return null},v.prototype._allow_regexp_or_xml=function(t){return t.type===p.RESERVED&&h(t.text,["return","case","throw","else","do","typeof","yield"])||t.type===p.END_EXPR&&")"===t.text&&t.opened.previous.type===p.RESERVED&&h(t.opened.previous.text,["if","while","for"])||h(t.type,[p.COMMENT,p.START_EXPR,p.START_BLOCK,p.START,p.END_BLOCK,p.OPERATOR,p.EQUALS,p.EOF,p.SEMICOLON,p.COMMA])},v.prototype._read_regexp=function(t,e){if("/"===t&&this._allow_regexp_or_xml(e)){for(var i=this._input.next(),n=!1,_=!1;this._input.hasNext()&&(n||_||this._input.peek()!==t)&&!this._input.testChar(u.newline);)i+=this._input.peek(),n?n=!1:(n="\\"===this._input.peek(),"["===this._input.peek()?_=!0:"]"===this._input.peek()&&(_=!1)),this._input.next();return this._input.peek()===t&&(i+=this._input.next(),i+=this._input.read(u.identifier)),this._create_token(p.STRING,i)}return null},v.prototype._read_xml=function(t,e){if(this._options.e4x&&"<"===t&&this._allow_regexp_or_xml(e)){var i="",n=this.__patterns.xml.read_match();if(n){for(var _=n[2].replace(/^{\s+/,"{").replace(/\s+}$/,"}"),s=0===_.indexOf("{"),a=0;n;){var r=!!n[1],o=n[2];if(!(n[n.length-1]||"![CDATA["===o.slice(0,8))&&(o===_||s&&o.replace(/^{\s+/,"{").replace(/\s+}$/,"}"))&&(r?--a:++a),i+=n[0],a<=0)break;n=this.__patterns.xml.read_match()}return n||(i+=this._input.match(/[\s\S]*/g)[0]),i=i.replace(u.allLineBreaks,"\n"),this._create_token(p.STRING,i)}}return null},v.prototype._read_string_recursive=function(t,e,i){var n,_;"'"===t?_=this.__patterns.single_quote:'"'===t?_=this.__patterns.double_quote:"`"===t?_=this.__patterns.template_text:"}"===t&&(_=this.__patterns.template_expression);for(var s=_.read(),a="";this._input.hasNext();){if((a=this._input.next())===t||!e&&u.newline.test(a)){this._input.back();break}"\\"===a&&this._input.hasNext()?("x"===(n=this._input.peek())||"u"===n?this.has_char_escapes=!0:"\r"===n&&"\n"===this._input.peek(1)&&this._input.next(),a+=this._input.next()):i&&("${"===i&&"$"===a&&"{"===this._input.peek()&&(a+=this._input.next()),i===a&&(a+="`"===t?this._read_string_recursive("}",e,"`"):this._read_string_recursive("`",e,"${"),this._input.hasNext()&&(a+=this._input.next()))),s+=a+=_.read()}return s},t.exports.d2=v,t.exports.o3=p,t.exports.L2=g.slice(),t.exports.O9=y.slice()}},e={};return function i(n){var _=e[n];if(void 0!==_)return _.exports;var s=e[n]={exports:{}};return t[n](s,s.exports,i),s.exports}(772)}()})); | //# sourceMappingURL=beautifier.min.js.map |
|
observer.js | describe('UNIT: Observer', function () {
var Observer = require('vue/src/observer'),
Emitter = require('emitter'),
DepsOb = require('vue/src/deps-parser').observer
describe('Observing Object', function () {
it('should not watch a ViewModel instance', function () {
var obj = new Vue(), ob = new Emitter()
Observer.observe(obj, 'test', ob)
assert.notOk(obj.__observer__)
})
it('should attach hidden observer and values to the object', function () {
var obj = {}, ob = new Emitter()
Observer.observe(obj, 'test', ob)
assert.ok(obj.__observer__ instanceof Emitter)
assert.ok(obj.__observer__.values)
})
it('should emit set events with correct path', setTestFactory({
obj: { a: 1, b: { c: 2 } },
expects: [
{ key: 'test.a', val: 1 },
{ key: 'test.b.c', val: 3 }
],
path: 'test'
}))
it('should emit multiple events when a nested object is set', setTestFactory({
obj: { a: 1, b: { c: 2 } },
expects: [
{ key: 'test.b', val: { c: 3 } },
{ key: 'test.b.c', val: 3, skip: true }
],
path: 'test'
}))
it('should emit get events on tip values', function () {
DepsOb.active = true
getTestFactory({
obj: { a: 1, b: { c: 2 } },
expects: [
'test.a',
'test.b.c'
],
path: 'test'
})()
DepsOb.active = false
})
it('should emit set when first observing', function () {
var obj = { a: 1, b: { c: 2} },
ob = new Emitter(), i = 0
var expects = [
{ key: 'test.a', val: obj.a },
{ key: 'test.b', val: obj.b },
{ key: 'test.b.c', val: obj.b.c }
]
ob.on('set', function (key, val) {
var exp = expects[i]
assert.strictEqual(key, exp.key)
assert.strictEqual(val, exp.val)
i++
})
Observer.observe(obj, 'test', ob)
assert.strictEqual(i, expects.length)
})
it('should emit set when watching an already observed object', function () {
var obj = { a: 1, b: { c: 2} },
ob1 = new Emitter(),
ob2 = new Emitter(),
i = 0
Observer.observe(obj, 'test', ob1) // watch first time
var expects = [
{ key: 'test.a', val: obj.a },
{ key: 'test.b', val: obj.b },
{ key: 'test.b.c', val: obj.b.c }
]
ob2.on('set', function (key, val) {
var exp = expects[i]
assert.strictEqual(key, exp.key)
assert.strictEqual(val, exp.val)
i++
})
Observer.observe(obj, 'test', ob2) // watch again
assert.strictEqual(i, expects.length)
})
})
describe('Observing Array', function () {
var arr = [],
ob = new Emitter()
Observer.observe(arr, 'test', ob)
it('should attach the hidden observer', function () {
assert.ok(arr.__observer__ instanceof Emitter)
})
it('should overwrite the native array mutator methods', function () {
['push', 'pop', 'shift', 'unshift', 'splice', 'sort', 'reverse'].forEach(function (method) {
assert.notStrictEqual(arr[method], Array.prototype[method])
})
})
it('should emit set for .length when it mutates', function () {
var emitted = false
ob.once('set', function (key, val) {
assert.strictEqual(key, 'test.length')
assert.strictEqual(val, 1)
emitted = true
})
arr.push(1)
assert.ok(emitted)
})
describe('Mutator Methods', function () {
it('push', function () {
var arg1 = 123,
arg2 = 234,
emitted = false
ob.once('mutate', function (key, array, mutation) {
assert.strictEqual(key, 'test')
assert.strictEqual(array, arr)
assert.strictEqual(array.length, 3)
assert.strictEqual(mutation.method, 'push')
assert.strictEqual(mutation.args.length, 2)
assert.strictEqual(mutation.args[0], arg1)
assert.strictEqual(mutation.args[1], arg2)
assert.strictEqual(mutation.result, arr.length)
emitted = true
})
var r = arr.push(arg1, arg2)
assert.ok(emitted)
assert.strictEqual(r, arr.length)
})
it('pop', function () {
var emitted = false,
expected = arr[arr.length - 1]
ob.once('mutate', function (key, array, mutation) {
assert.strictEqual(key, 'test')
assert.strictEqual(array, arr)
assert.strictEqual(array.length, 2)
assert.strictEqual(mutation.method, 'pop')
assert.strictEqual(mutation.args.length, 0)
assert.strictEqual(mutation.result, expected)
emitted = true
})
var r = arr.pop()
assert.ok(emitted)
assert.strictEqual(r, expected)
})
it('shift', function () {
var emitted = false,
expected = arr[0]
ob.once('mutate', function (key, array, mutation) {
assert.strictEqual(key, 'test')
assert.strictEqual(array, arr)
assert.strictEqual(array.length, 1)
assert.strictEqual(mutation.method, 'shift')
assert.strictEqual(mutation.args.length, 0)
assert.strictEqual(mutation.result, expected)
emitted = true
})
var r = arr.shift()
assert.ok(emitted)
assert.strictEqual(r, expected)
})
it('unshift', function () {
var emitted = false,
arg1 = 456,
arg2 = 678
ob.once('mutate', function (key, array, mutation) {
assert.strictEqual(key, 'test')
assert.strictEqual(array, arr)
assert.strictEqual(array.length, 3)
assert.strictEqual(mutation.method, 'unshift')
assert.strictEqual(mutation.args.length, 2)
assert.strictEqual(mutation.args[0], arg1)
assert.strictEqual(mutation.args[1], arg2)
assert.strictEqual(mutation.result, arr.length)
emitted = true
})
var r = arr.unshift(arg1, arg2)
assert.ok(emitted)
assert.strictEqual(r, arr.length)
})
it('splice', function () {
var emitted = false,
arg1 = 789,
arg2 = 910,
expected = arr[1]
ob.once('mutate', function (key, array, mutation) {
assert.strictEqual(key, 'test')
assert.strictEqual(array, arr)
assert.strictEqual(array.length, 4)
assert.strictEqual(mutation.method, 'splice')
assert.strictEqual(mutation.args.length, 4)
assert.strictEqual(mutation.args[0], 1)
assert.strictEqual(mutation.args[1], 1)
assert.strictEqual(mutation.args[2], arg1)
assert.strictEqual(mutation.args[3], arg2)
assert.strictEqual(mutation.result.length, 1)
assert.strictEqual(mutation.result[0], expected)
emitted = true
})
var r = arr.splice(1, 1, arg1, arg2)
assert.ok(emitted)
assert.strictEqual(r.length, 1)
assert.strictEqual(r[0], expected)
})
it('sort', function () {
var emitted = false,
sorter = function (a, b) {
return a > b ? -1 : 1
},
copy = arr.slice().sort(sorter)
ob.once('mutate', function (key, array, mutation) {
assert.strictEqual(key, 'test')
assert.strictEqual(array, arr)
assert.strictEqual(mutation.method, 'sort')
assert.strictEqual(mutation.args.length, 1)
assert.strictEqual(mutation.result, arr)
for (var i = 0; i < copy.length; i++) {
assert.strictEqual(array[i], copy[i])
}
emitted = true
})
var r = arr.sort(sorter)
assert.ok(emitted)
assert.strictEqual(r, arr)
})
it('reverse', function () {
var emitted = false,
copy = arr.slice().reverse()
ob.once('mutate', function (key, array, mutation) {
assert.strictEqual(key, 'test')
assert.strictEqual(array, arr)
assert.strictEqual(mutation.method, 'reverse')
assert.strictEqual(mutation.args.length, 0)
assert.strictEqual(mutation.result, arr)
for (var i = 0; i < copy.length; i++) {
assert.strictEqual(array[i], copy[i])
}
emitted = true
})
var r = arr.reverse()
assert.ok(emitted)
assert.strictEqual(r, arr)
})
})
describe('Augmentations', function () {
it('remove (index)', function () {
var emitted = false,
index = ~~(Math.random() * arr.length),
expected = arr[index] = { a: 1 }
ob.once('mutate', function (key, array, mutation) {
emitted = true
assert.strictEqual(mutation.method, 'splice')
assert.strictEqual(mutation.args.length, 2)
assert.strictEqual(mutation.args[0], index)
})
var r = arr.remove(index)
assert.ok(emitted)
assert.strictEqual(r, expected)
})
it('remove (object)', function () {
var emitted = false,
index = ~~(Math.random() * arr.length),
expected = arr[index] = { a: 1 }
ob.once('mutate', function (key, array, mutation) {
emitted = true
assert.strictEqual(mutation.method, 'splice')
assert.strictEqual(mutation.args.length, 2)
assert.strictEqual(mutation.args[0], index)
})
var r = arr.remove(expected)
assert.ok(emitted)
assert.strictEqual(r, expected)
})
it('remove (function)', function () {
var expected = [1001, 1002]
arr.push.apply(arr, expected)
var filter = function (e) {
return e > 1000
},
copy = arr.filter(function (e) {
return e <= 1000
})
var removed = arr.remove(filter)
assert.deepEqual(arr, copy)
assert.deepEqual(expected, removed)
})
it('replace (index)', function () {
var emitted = false,
index = ~~(Math.random() * arr.length),
expected = arr[index] = { a: 1 },
arg = 34567
ob.once('mutate', function (key, array, mutation) {
emitted = true
assert.strictEqual(mutation.method, 'splice')
assert.strictEqual(mutation.args.length, 3)
assert.strictEqual(mutation.args[0], index)
})
var r = arr.replace(index, arg)
assert.ok(emitted)
assert.strictEqual(r, expected)
assert.strictEqual(arr[index], arg)
})
it('replace (object)', function () {
var emitted = false,
index = ~~(Math.random() * arr.length),
expected = arr[index] = { a: 1 },
arg = 45678
ob.once('mutate', function (key, array, mutation) {
emitted = true
assert.strictEqual(mutation.method, 'splice')
assert.strictEqual(mutation.args.length, 3)
assert.strictEqual(mutation.args[0], index)
})
var r = arr.replace(expected, arg)
assert.ok(emitted)
assert.strictEqual(r, expected)
assert.strictEqual(arr[index], arg)
})
it('replace (function)', function () {
arr[0] = 1
arr[1] = 2
arr[2] = 3
var expected = [2, 3, 3],
expectRet = [1, 2]
var replaced = arr.replace(function (e) {
if (e < 3) return e + 1
})
assert.deepEqual(arr, expected)
assert.deepEqual(replaced, expectRet)
})
})
})
describe('Multiple observers', function () {
var ob1 = new Emitter(),
ob2 = new Emitter(),
obj = {a:1}
Observer.observe(obj, 'test', ob1)
Observer.observe(obj, 'test', ob2)
var ob1Called = false,
ob2Called = false
ob1.on('set', function () {
ob1Called = true
})
ob2.on('set', function () {
ob2Called = true
})
it('should trigger events for multiple observers observing the same object', function () {
obj.a = 2
assert.ok(ob1Called)
assert.ok(ob2Called)
})
})
describe('.unobserve()', function () {
var ob1 = new Emitter(),
ob2 = new Emitter(),
obj = {a:1}
Observer.observe(obj, 'test', ob1)
Observer.observe(obj, 'test', ob2)
Observer.unobserve(obj, 'test', ob1)
var ob1Called = false
ob1.on('set', function () {
ob1Called = true
})
var ob2Called = false
ob2.on('set', function () {
ob2Called = true
})
it('should set observer proxies path to null', function () {
assert.strictEqual(ob1.proxies['test.'], null)
})
it('should turn off corresponding event listeners', function () {
var callbacks = obj.__observer__._callbacks
for (var e in callbacks) {
assert.strictEqual(callbacks[e].length, 1)
}
})
it('should no longer emit events', function () {
obj.a = 2
assert.notOk(ob1Called)
assert.ok(ob2Called)
})
})
describe('.ensurePath()', function () {
it('should ensure a path can be accessed without error', function () {
var obj = {},
path = 'a.b.c'
Observer.ensurePath(obj, path)
assert.strictEqual(obj.a.b.c, undefined)
})
})
// describe('.copyPaths()', function () {
// it('should ensure path for all paths that start with the given key', function () {
// var key = 'a',
// obj = {},
// paths = {
// 'a.b.c': 1,
// 'a.d': 2,
// 'e.f': 3,
// 'g': 4
// }
// Observer.ensurePaths(key, obj, paths)
// assert.strictEqual(obj.b.c, undefined)
// assert.strictEqual(obj.d, undefined)
// assert.notOk('f' in obj)
// assert.strictEqual(Object.keys(obj).length, 2)
// })
// })
function setTestFactory (opts) {
return function () {
var ob = new Emitter(),
i = 0,
obj = opts.obj,
expects = opts.expects
Observer.observe(obj, opts.path, ob)
ob.on('set', function (key, val) {
var expect = expects[i]
assert.strictEqual(key, expect.key)
assert.strictEqual(val, expect.val)
i++
})
expects.forEach(function (expect) {
if (expect.skip) return
var path = expect.key.split('.'),
j = 1,
data = obj
while (j < path.length - 1) {
data = data[path[j]]
j++
}
data[path[j]] = expect.val
})
assert.strictEqual(i, expects.length)
}
}
function getTestFactory (opts) {
return function () {
var ob = new Emitter(),
i = 0,
obj = opts.obj,
expects = opts.expects | ob.on('get', function (key) {
var expected = expects[i]
assert.strictEqual(key, expected)
i++
})
expects.forEach(function (key) {
var path = key.split('.'),
j = 1,
data = obj
while (j < path.length) {
data = data[path[j]]
j++
}
})
assert.strictEqual(i, expects.length)
}
}
}) | Observer.observe(obj, opts.path, ob) |
word_test.go | package word
import (
"testing"
)
func | (t *testing.T) {
test := "gaap é o melhor software em GoLang!"
want := []string{"gaap", "é", "o", "melhor", "software", "em", "GoLang!"}
result := TextToWord(test)
if len(result) != len(want) {
t.Errorf("Length bytes results '%d', '%d' received", len(result), len(want))
}
for idx, text := range want {
if text != result[idx] {
t.Errorf("Position '%d' resturns ('%s'), expected ('%s')", idx, result[idx], text)
}
}
}
func BenchmarkTextToWord(b *testing.B) {
test := "gaap é o melhor software em GoLang!"
b.ResetTimer()
for i := 0; i < b.N; i++ {
TextToWord(test)
}
}
| TestTextToWord |
test_adapter_port_channel_settings_ref.py | # coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.adapter_port_channel_settings_ref import AdapterPortChannelSettingsRef # noqa: E501
from intersight.rest import ApiException
class TestAdapterPortChannelSettingsRef(unittest.TestCase):
"""AdapterPortChannelSettingsRef unit test stubs"""
def setUp(self): | pass
def testAdapterPortChannelSettingsRef(self):
"""Test AdapterPortChannelSettingsRef"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.adapter_port_channel_settings_ref.AdapterPortChannelSettingsRef() # noqa: E501
pass
if __name__ == '__main__':
unittest.main() | pass
def tearDown(self): |
tabs.tsx | /*
* Copyright (c) A11yWatch, LLC. and its affiliates.
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
**/
import { useCallback, useState } from 'react'
import { Tabs, Tab } from '@material-ui/core'
import { makeStyles } from '@material-ui/core/styles'
type TabPanelProps = {
children: any
index: any
value: any
className: any
}
const useStyles = makeStyles(() => ({
container: {
height: '100%',
width: '100%',
'& > section': {
height: '100% !important',
width: '100% !important',
},
},
wrapper: {
height: '100%',
width: '100%',
},
}))
function TabPanel(props: TabPanelProps) {
const { children, value, index, ...other } = props
return (
<div
role='tabpanel'
hidden={value !== index}
id={`a11y-tabpanel-${index}`}
aria-labelledby={`a11y-tab-${index}`}
{...other}
>
{value === index && children}
</div>
)
}
| id: `a11y-tab-${index}`,
'aria-controls': `a11y-tabpanel-${index}`,
}
}
export function WebsiteTabs({ issues, html, screenshot, playground }: any) {
const [value, setValue] = useState<number>(0)
const classes = useStyles()
const handleChange = useCallback((_: any, newValue: number) => {
setValue(newValue)
}, [])
return (
<div className={classes.wrapper}>
<Tabs
value={value}
onChange={handleChange}
aria-label='tabs to compare problems and fixes'
variant={'fullWidth'}
>
<Tab label='Insights' {...a11yProps(0)} />
<Tab label='HTML' {...a11yProps(1)} />
{screenshot ? <Tab label='Screenshots' {...a11yProps(2)} /> : null}
{playground ? <Tab label='Playground' {...a11yProps(3)} /> : null}
</Tabs>
<TabPanel value={value} index={0} className={classes.container}>
{issues}
</TabPanel>
<TabPanel value={value} index={1} className={classes.container}>
{html}
</TabPanel>
{screenshot ? (
<TabPanel value={value} index={2} className={classes.container}>
{screenshot}
</TabPanel>
) : null}
{playground ? (
<TabPanel value={value} index={3} className={classes.container}>
{playground}
</TabPanel>
) : null}
</div>
)
} | function a11yProps(index: number) {
return { |
report-assets.js | /**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
const fs = require('fs');
const flowReportAssets = require('./flow-report-assets.js');
const REPORT_TEMPLATE = fs.readFileSync(__dirname + '/../assets/standalone-template.html',
'utf8'); | const REPORT_CSS = fs.readFileSync(__dirname + '/../assets/styles.css', 'utf8');
// Changes to this export interface should be reflected in build/build-dt-report-resources.js
// and clients/devtools-report-assets.js
module.exports = {
REPORT_TEMPLATE,
REPORT_JAVASCRIPT,
REPORT_CSS,
// Flow report assets are not needed for every bundle.
// Ignoring flow-report-assets.js (e.g. `browserify.ignore`) will remove the flow assets from the bundle.
...flowReportAssets,
}; | const REPORT_JAVASCRIPT = fs.readFileSync(__dirname + '/../../dist/report/standalone.js', 'utf8'); |
description.js | var _ = require('../util').lodash,
marked = require('marked'),
sanitizeHtml = require('sanitize-html'),
escapeHtml = require('escape-html'),
| renderer: new marked.Renderer(),
gfm: true,
tables: true,
breaks: false,
pedantic: false,
sanitize: false,
smartLists: true,
smartypants: false
},
HTML_DEFAULT_OPTIONS = {
allowedTags: ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'blockquote', 'p', 'a', 'ul', 'ol', 'nl', 'li', 'b', 'i',
'strong', 'em', 'strike', 'code', 'hr', 'br', 'div', 'table', 'thead', 'caption', 'tbody', 'tr', 'th', 'td',
'pre', 'img', 'abbr', 'address', 'section', 'article', 'aside', 'dd', 'dl', 'dt', 'tfoot'],
allowedAttributes: {
a: ['href'],
img: ['src', 'width', 'height', 'alt'],
td: ['align'],
th: ['align']
}
},
Description;
// Set the default markdown options
marked.setOptions(MARKDOWN_DEFAULT_OPTIONS);
/**
* @typedef Description~definition
* @property {String} content
* @property {String} type
*/
/**
* This is one of the properties that are (if provided) processed by all other properties. Any property can have an
* instance of `Description` property assigned to it with the key name `description` and it should be treated as
* something that "describes" the property within which it belongs. Usually this property is used to generate
* documentation and other contextual information for a property in a Collection.
*
* @constructor
*
* @param {Description~definition|String} [definition] The content of the description can be passed as a string when it
* is in `text/plain` format or otherwise be sent as part of an object adhering to the {@link Description~definition}
* structure having `content` and `type`.
*
* @example <caption>Add a description to an instance of Collection</caption>
* var SDK = require('postman-collection'),
* Collection = SDK.Collection,
* Description = SDK.Description,
* mycollection;
*
* // create a blank collection
* myCollection = new Collection();
* myCollection.description = new Description({
* content: '<h1>Hello World</h1><p>I am a Collection</p>',
* type: 'text/html'
* });
*
* // alternatively, you could also use the `.describe` method of any property to set or update the description of the
* // property.
* myCollection.describe('Hey! This is a cool collection.');
*/
Description = function PostmanPropertyDescription (definition) {
// if the definition is a string, it implies that this is a get of URL
_.isString(definition) && (definition = {
content: definition,
type: DEFAULT_MIMETYPE
});
// populate the description
definition && this.update(definition);
};
_.assign(Description.prototype, /** @lends Description.prototype */ {
/**
* Updates the content of this description property.
*
* @param {String|Description~definition} content
* @param {String=} [type]
* @todo parse version of description
*/
update: function (content, type) {
_.isObject(content) && ((type = content.type), (content = content.content));
_.assign(this, /** @lends Description.prototype */ {
/**
* The raw content of the description
*
* @type {String}
*/
content: content,
/**
* The mime-type of the description.
*
* @type {String}
*/
type: type || DEFAULT_MIMETYPE
});
},
/**
* Processes the Description with the appropriate formatter as defined by {@link Description.type}
*
* @returns {String}
*/
toString: function () {
var formatter = Description.format[_.isString(this.type) && this.type.toLowerCase()];
return (formatter ? formatter : escapeHtml)(this.content || E);
},
/**
* Creates a JSON representation of the Description (as a plain Javascript object).
*
* @returns {{content: *, type: *, version: (string|*|string)}}
*/
toJSON: function () {
return {
content: this.content,
type: this.type
};
}
});
_.assign(Description, /** @lends Description */ {
/**
* Defines the name of this property for internal use.
* @private
* @readOnly
* @type {String}
*/
_postman_propertyName: 'Description',
/**
* The default and supported description format handlers.
* @readOnly
* @enum {Function}
*/
format: {
/**
* Escapes HTML characters in the description content, and returns the result.
*
* @param {String} content
* @returns {String}
*/
'text/plain': function (content) {
return escapeHtml(content); // do not allow HTML
},
/**
* Returns HTML string generated after rendering raw markdown.
*
* @param {String} content
* @returns {String}
*/
'text/markdown': function (content) {
return sanitizeHtml(marked(content));
},
/**
* Removes blacklisted HTML tags from the Description.
*
* @param {String} content
* @returns {String}
*/
'text/html': function (content) {
return sanitizeHtml(content, HTML_DEFAULT_OPTIONS);
}
},
/**
* Checks whether a property is an instance of Description object.
*
* @param {*} obj
* @returns {Boolean}
*/
isDescription: function (obj) {
return Boolean(obj) && ((obj instanceof Description) ||
_.inSuperChain(obj.constructor, '_postman_propertyName', Description._postman_propertyName));
}
});
module.exports = {
Description: Description
}; | E = '',
DEFAULT_MIMETYPE = 'text/plain',
MARKDOWN_DEFAULT_OPTIONS = { |
lib.rs | #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(in_band_lifetimes)]
#![feature(nll)]
#![recursion_limit = "256"]
#[macro_use]
extern crate syntax;
use rustc::bug;
use rustc::hir::def::{DefKind, Res};
use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc::hir::itemlikevisit::DeepVisitor;
use rustc::hir::{self, AssocItemKind, Node, PatKind};
use rustc::lint;
use rustc::middle::privacy::{AccessLevel, AccessLevels};
use rustc::ty::fold::TypeVisitor;
use rustc::ty::query::Providers;
use rustc::ty::subst::InternalSubsts;
use rustc::ty::{self, GenericParamDefKind, TraitRef, Ty, TyCtxt, TypeFoldable};
use rustc::util::nodemap::HirIdSet;
use rustc_data_structures::fx::FxHashSet;
use syntax::ast::Ident;
use syntax::attr;
use syntax::symbol::{kw, sym};
use syntax_pos::hygiene::Transparency;
use syntax_pos::Span;
use std::marker::PhantomData;
use std::{cmp, fmt, mem};
use rustc_error_codes::*;
////////////////////////////////////////////////////////////////////////////////
/// Generic infrastructure used to implement specific visitors below.
////////////////////////////////////////////////////////////////////////////////
/// Implemented to visit all `DefId`s in a type.
/// Visiting `DefId`s is useful because visibilities and reachabilities are attached to them.
/// The idea is to visit "all components of a type", as documented in
/// https://github.com/rust-lang/rfcs/blob/master/text/2145-type-privacy.md#how-to-determine-visibility-of-a-type.
/// The default type visitor (`TypeVisitor`) does most of the job, but it has some shortcomings.
/// First, it doesn't have overridable `fn visit_trait_ref`, so we have to catch trait `DefId`s
/// manually. Second, it doesn't visit some type components like signatures of fn types, or traits
/// in `impl Trait`, see individual comments in `DefIdVisitorSkeleton::visit_ty`.
trait DefIdVisitor<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx>;
fn shallow(&self) -> bool {
false
}
fn skip_assoc_tys(&self) -> bool {
false
}
fn visit_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool;
/// Not overridden, but used to actually visit types and traits.
fn skeleton(&mut self) -> DefIdVisitorSkeleton<'_, 'tcx, Self> {
DefIdVisitorSkeleton {
def_id_visitor: self,
visited_opaque_tys: Default::default(),
dummy: Default::default(),
}
}
fn visit(&mut self, ty_fragment: impl TypeFoldable<'tcx>) -> bool {
ty_fragment.visit_with(&mut self.skeleton())
}
fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool {
self.skeleton().visit_trait(trait_ref)
}
fn visit_predicates(&mut self, predicates: ty::GenericPredicates<'tcx>) -> bool {
self.skeleton().visit_predicates(predicates)
}
}
struct DefIdVisitorSkeleton<'v, 'tcx, V>
where
V: DefIdVisitor<'tcx> + ?Sized,
{
def_id_visitor: &'v mut V,
visited_opaque_tys: FxHashSet<DefId>,
dummy: PhantomData<TyCtxt<'tcx>>,
}
impl<'tcx, V> DefIdVisitorSkeleton<'_, 'tcx, V>
where
V: DefIdVisitor<'tcx> + ?Sized,
{
fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool {
let TraitRef { def_id, substs } = trait_ref;
self.def_id_visitor.visit_def_id(def_id, "trait", &trait_ref.print_only_trait_path())
|| (!self.def_id_visitor.shallow() && substs.visit_with(self))
}
fn visit_predicates(&mut self, predicates: ty::GenericPredicates<'tcx>) -> bool {
let ty::GenericPredicates { parent: _, predicates } = predicates;
for (predicate, _span) in predicates {
match predicate {
ty::Predicate::Trait(poly_predicate) => {
let ty::TraitPredicate { trait_ref } = *poly_predicate.skip_binder();
if self.visit_trait(trait_ref) {
return true;
}
}
ty::Predicate::Projection(poly_predicate) => {
let ty::ProjectionPredicate { projection_ty, ty } =
*poly_predicate.skip_binder();
if ty.visit_with(self) {
return true;
}
if self.visit_trait(projection_ty.trait_ref(self.def_id_visitor.tcx())) {
return true;
}
}
ty::Predicate::TypeOutlives(poly_predicate) => {
let ty::OutlivesPredicate(ty, _region) = *poly_predicate.skip_binder();
if ty.visit_with(self) {
return true;
}
}
ty::Predicate::RegionOutlives(..) => {}
_ => bug!("unexpected predicate: {:?}", predicate),
}
}
false
}
}
impl<'tcx, V> TypeVisitor<'tcx> for DefIdVisitorSkeleton<'_, 'tcx, V>
where
V: DefIdVisitor<'tcx> + ?Sized,
{
fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool {
let tcx = self.def_id_visitor.tcx();
// InternalSubsts are not visited here because they are visited below in `super_visit_with`.
match ty.kind {
ty::Adt(&ty::AdtDef { did: def_id, .. }, ..)
| ty::Foreign(def_id)
| ty::FnDef(def_id, ..)
| ty::Closure(def_id, ..)
| ty::Generator(def_id, ..) => {
if self.def_id_visitor.visit_def_id(def_id, "type", &ty) {
return true;
}
if self.def_id_visitor.shallow() {
return false;
}
// Default type visitor doesn't visit signatures of fn types.
// Something like `fn() -> Priv {my_func}` is considered a private type even if
// `my_func` is public, so we need to visit signatures.
if let ty::FnDef(..) = ty.kind {
if tcx.fn_sig(def_id).visit_with(self) {
return true;
}
}
// Inherent static methods don't have self type in substs.
// Something like `fn() {my_method}` type of the method
// `impl Pub<Priv> { pub fn my_method() {} }` is considered a private type,
// so we need to visit the self type additionally.
if let Some(assoc_item) = tcx.opt_associated_item(def_id) {
if let ty::ImplContainer(impl_def_id) = assoc_item.container {
if tcx.type_of(impl_def_id).visit_with(self) {
return true;
}
}
}
}
ty::Projection(proj) | ty::UnnormalizedProjection(proj) => {
if self.def_id_visitor.skip_assoc_tys() {
// Visitors searching for minimal visibility/reachability want to
// conservatively approximate associated types like `<Type as Trait>::Alias`
// as visible/reachable even if both `Type` and `Trait` are private.
// Ideally, associated types should be substituted in the same way as
// free type aliases, but this isn't done yet.
return false;
}
// This will also visit substs if necessary, so we don't need to recurse.
return self.visit_trait(proj.trait_ref(tcx));
}
ty::Dynamic(predicates, ..) => {
// All traits in the list are considered the "primary" part of the type
// and are visited by shallow visitors.
for predicate in *predicates.skip_binder() {
let trait_ref = match *predicate {
ty::ExistentialPredicate::Trait(trait_ref) => trait_ref,
ty::ExistentialPredicate::Projection(proj) => proj.trait_ref(tcx),
ty::ExistentialPredicate::AutoTrait(def_id) => {
ty::ExistentialTraitRef { def_id, substs: InternalSubsts::empty() }
}
};
let ty::ExistentialTraitRef { def_id, substs: _ } = trait_ref;
if self.def_id_visitor.visit_def_id(def_id, "trait", &trait_ref) {
return true;
}
}
}
ty::Opaque(def_id, ..) => {
// Skip repeated `Opaque`s to avoid infinite recursion.
if self.visited_opaque_tys.insert(def_id) {
// The intent is to treat `impl Trait1 + Trait2` identically to
// `dyn Trait1 + Trait2`. Therefore we ignore def-id of the opaque type itself
// (it either has no visibility, or its visibility is insignificant, like
// visibilities of type aliases) and recurse into predicates instead to go
// through the trait list (default type visitor doesn't visit those traits).
// All traits in the list are considered the "primary" part of the type
// and are visited by shallow visitors.
if self.visit_predicates(tcx.predicates_of(def_id)) {
return true;
}
}
}
// These types don't have their own def-ids (but may have subcomponents
// with def-ids that should be visited recursively).
ty::Bool
| ty::Char
| ty::Int(..)
| ty::Uint(..)
| ty::Float(..)
| ty::Str
| ty::Never
| ty::Array(..)
| ty::Slice(..)
| ty::Tuple(..)
| ty::RawPtr(..)
| ty::Ref(..)
| ty::FnPtr(..)
| ty::Param(..)
| ty::Error
| ty::GeneratorWitness(..) => {}
ty::Bound(..) | ty::Placeholder(..) | ty::Infer(..) => {
bug!("unexpected type: {:?}", ty)
}
}
!self.def_id_visitor.shallow() && ty.super_visit_with(self)
}
}
fn def_id_visibility<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
) -> (ty::Visibility, Span, &'static str) {
match tcx.hir().as_local_hir_id(def_id) {
Some(hir_id) => {
let vis = match tcx.hir().get(hir_id) {
Node::Item(item) => &item.vis,
Node::ForeignItem(foreign_item) => &foreign_item.vis,
Node::MacroDef(macro_def) => {
if attr::contains_name(¯o_def.attrs, sym::macro_export) {
return (ty::Visibility::Public, macro_def.span, "public");
} else {
¯o_def.vis
}
}
Node::TraitItem(..) | Node::Variant(..) => {
return def_id_visibility(tcx, tcx.hir().get_parent_did(hir_id));
}
Node::ImplItem(impl_item) => {
match tcx.hir().get(tcx.hir().get_parent_item(hir_id)) {
Node::Item(item) => match &item.kind {
hir::ItemKind::Impl(.., None, _, _) => &impl_item.vis,
hir::ItemKind::Impl(.., Some(trait_ref), _, _) => {
return def_id_visibility(tcx, trait_ref.path.res.def_id());
}
kind => bug!("unexpected item kind: {:?}", kind),
},
node => bug!("unexpected node kind: {:?}", node),
}
}
Node::Ctor(vdata) => {
let parent_hir_id = tcx.hir().get_parent_node(hir_id);
match tcx.hir().get(parent_hir_id) {
Node::Variant(..) => {
let parent_did = tcx.hir().local_def_id(parent_hir_id);
let (mut ctor_vis, mut span, mut descr) =
def_id_visibility(tcx, parent_did);
let adt_def = tcx.adt_def(tcx.hir().get_parent_did(hir_id));
let ctor_did = tcx.hir().local_def_id(vdata.ctor_hir_id().unwrap());
let variant = adt_def.variant_with_ctor_id(ctor_did);
if variant.is_field_list_non_exhaustive()
&& ctor_vis == ty::Visibility::Public
{
ctor_vis =
ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX));
let attrs = tcx.get_attrs(variant.def_id);
span =
attr::find_by_name(&attrs, sym::non_exhaustive).unwrap().span;
descr = "crate-visible";
}
return (ctor_vis, span, descr);
}
Node::Item(..) => {
let item = match tcx.hir().get(parent_hir_id) {
Node::Item(item) => item,
node => bug!("unexpected node kind: {:?}", node),
};
let (mut ctor_vis, mut span, mut descr) = (
ty::Visibility::from_hir(&item.vis, parent_hir_id, tcx),
item.vis.span,
item.vis.node.descr(),
);
for field in vdata.fields() {
let field_vis = ty::Visibility::from_hir(&field.vis, hir_id, tcx);
if ctor_vis.is_at_least(field_vis, tcx) {
ctor_vis = field_vis;
span = field.vis.span;
descr = field.vis.node.descr();
}
}
// If the structure is marked as non_exhaustive then lower the
// visibility to within the crate.
if ctor_vis == ty::Visibility::Public {
let adt_def = tcx.adt_def(tcx.hir().get_parent_did(hir_id));
if adt_def.non_enum_variant().is_field_list_non_exhaustive() {
ctor_vis =
ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX));
span = attr::find_by_name(&item.attrs, sym::non_exhaustive)
.unwrap()
.span;
descr = "crate-visible";
}
}
return (ctor_vis, span, descr);
}
node => bug!("unexpected node kind: {:?}", node),
}
}
Node::Expr(expr) => {
return (
ty::Visibility::Restricted(tcx.hir().get_module_parent(expr.hir_id)),
expr.span,
"private",
);
}
node => bug!("unexpected node kind: {:?}", node),
};
(ty::Visibility::from_hir(vis, hir_id, tcx), vis.span, vis.node.descr())
}
None => {
let vis = tcx.visibility(def_id);
let descr = if vis == ty::Visibility::Public { "public" } else { "private" };
(vis, tcx.def_span(def_id), descr)
}
}
}
// Set the correct `TypeckTables` for the given `item_id` (or an empty table if
// there is no `TypeckTables` for the item).
fn item_tables<'a, 'tcx>(
tcx: TyCtxt<'tcx>,
hir_id: hir::HirId,
empty_tables: &'a ty::TypeckTables<'tcx>,
) -> &'a ty::TypeckTables<'tcx> {
let def_id = tcx.hir().local_def_id(hir_id);
if tcx.has_typeck_tables(def_id) { tcx.typeck_tables_of(def_id) } else { empty_tables }
}
fn min(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'_>) -> ty::Visibility {
if vis1.is_at_least(vis2, tcx) { vis2 } else { vis1 }
}
////////////////////////////////////////////////////////////////////////////////
/// Visitor used to determine if pub(restricted) is used anywhere in the crate.
///
/// This is done so that `private_in_public` warnings can be turned into hard errors
/// in crates that have been updated to use pub(restricted).
////////////////////////////////////////////////////////////////////////////////
struct PubRestrictedVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
has_pub_restricted: bool,
}
impl Visitor<'tcx> for PubRestrictedVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::All(&self.tcx.hir())
}
fn visit_vis(&mut self, vis: &'tcx hir::Visibility) {
self.has_pub_restricted = self.has_pub_restricted || vis.node.is_pub_restricted();
}
}
////////////////////////////////////////////////////////////////////////////////
/// Visitor used to determine impl visibility and reachability.
////////////////////////////////////////////////////////////////////////////////
struct FindMin<'a, 'tcx, VL: VisibilityLike> {
tcx: TyCtxt<'tcx>,
access_levels: &'a AccessLevels,
min: VL,
}
impl<'a, 'tcx, VL: VisibilityLike> DefIdVisitor<'tcx> for FindMin<'a, 'tcx, VL> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn shallow(&self) -> bool {
VL::SHALLOW
}
fn skip_assoc_tys(&self) -> bool {
true
}
fn visit_def_id(&mut self, def_id: DefId, _kind: &str, _descr: &dyn fmt::Display) -> bool {
self.min = VL::new_min(self, def_id);
false
}
}
trait VisibilityLike: Sized {
const MAX: Self;
const SHALLOW: bool = false;
fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self;
// Returns an over-approximation (`skip_assoc_tys` = true) of visibility due to
// associated types for which we can't determine visibility precisely.
fn of_impl(hir_id: hir::HirId, tcx: TyCtxt<'_>, access_levels: &AccessLevels) -> Self {
let mut find = FindMin { tcx, access_levels, min: Self::MAX };
let def_id = tcx.hir().local_def_id(hir_id);
find.visit(tcx.type_of(def_id));
if let Some(trait_ref) = tcx.impl_trait_ref(def_id) {
find.visit_trait(trait_ref);
}
find.min
}
}
impl VisibilityLike for ty::Visibility {
const MAX: Self = ty::Visibility::Public;
fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self {
min(def_id_visibility(find.tcx, def_id).0, find.min, find.tcx)
}
}
impl VisibilityLike for Option<AccessLevel> {
const MAX: Self = Some(AccessLevel::Public);
// Type inference is very smart sometimes.
// It can make an impl reachable even some components of its type or trait are unreachable.
// E.g. methods of `impl ReachableTrait<UnreachableTy> for ReachableTy<UnreachableTy> { ... }`
// can be usable from other crates (#57264). So we skip substs when calculating reachability
// and consider an impl reachable if its "shallow" type and trait are reachable.
//
// The assumption we make here is that type-inference won't let you use an impl without knowing
// both "shallow" version of its self type and "shallow" version of its trait if it exists
// (which require reaching the `DefId`s in them).
const SHALLOW: bool = true;
fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self {
cmp::min(
if let Some(hir_id) = find.tcx.hir().as_local_hir_id(def_id) {
find.access_levels.map.get(&hir_id).cloned()
} else {
Self::MAX
},
find.min,
)
}
}
////////////////////////////////////////////////////////////////////////////////
/// The embargo visitor, used to determine the exports of the AST.
////////////////////////////////////////////////////////////////////////////////
struct EmbargoVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
/// Accessibility levels for reachable nodes.
access_levels: AccessLevels,
/// A set of pairs corresponding to modules, where the first module is
/// reachable via a macro that's defined in the second module. This cannot
/// be represented as reachable because it can't handle the following case:
///
/// pub mod n { // Should be `Public`
/// pub(crate) mod p { // Should *not* be accessible
/// pub fn f() -> i32 { 12 } // Must be `Reachable`
/// }
/// }
/// pub macro m() {
/// n::p::f()
/// }
macro_reachable: FxHashSet<(hir::HirId, DefId)>,
/// Previous accessibility level; `None` means unreachable.
prev_level: Option<AccessLevel>,
/// Has something changed in the level map?
changed: bool,
}
struct ReachEverythingInTheInterfaceVisitor<'a, 'tcx> {
access_level: Option<AccessLevel>,
item_def_id: DefId,
ev: &'a mut EmbargoVisitor<'tcx>,
}
impl EmbargoVisitor<'tcx> {
fn get(&self, id: hir::HirId) -> Option<AccessLevel> {
self.access_levels.map.get(&id).cloned()
}
/// Updates node level and returns the updated level.
fn update(&mut self, id: hir::HirId, level: Option<AccessLevel>) -> Option<AccessLevel> {
let old_level = self.get(id);
// Accessibility levels can only grow.
if level > old_level {
self.access_levels.map.insert(id, level.unwrap());
self.changed = true;
level
} else {
old_level
}
}
fn reach(
&mut self,
item_id: hir::HirId,
access_level: Option<AccessLevel>,
) -> ReachEverythingInTheInterfaceVisitor<'_, 'tcx> {
ReachEverythingInTheInterfaceVisitor {
access_level: cmp::min(access_level, Some(AccessLevel::Reachable)),
item_def_id: self.tcx.hir().local_def_id(item_id),
ev: self,
}
}
/// Updates the item as being reachable through a macro defined in the given
/// module. Returns `true` if the level has changed.
fn update_macro_reachable(&mut self, reachable_mod: hir::HirId, defining_mod: DefId) -> bool {
if self.macro_reachable.insert((reachable_mod, defining_mod)) {
self.update_macro_reachable_mod(reachable_mod, defining_mod);
true
} else {
false
}
}
fn update_macro_reachable_mod(&mut self, reachable_mod: hir::HirId, defining_mod: DefId) {
let module_def_id = self.tcx.hir().local_def_id(reachable_mod);
let module = self.tcx.hir().get_module(module_def_id).0;
for item_id in module.item_ids {
let hir_id = item_id.id;
let item_def_id = self.tcx.hir().local_def_id(hir_id);
if let Some(def_kind) = self.tcx.def_kind(item_def_id) {
let item = self.tcx.hir().expect_item(hir_id);
let vis = ty::Visibility::from_hir(&item.vis, hir_id, self.tcx);
self.update_macro_reachable_def(hir_id, def_kind, vis, defining_mod);
}
}
if let Some(exports) = self.tcx.module_exports(module_def_id) {
for export in exports {
if export.vis.is_accessible_from(defining_mod, self.tcx) {
if let Res::Def(def_kind, def_id) = export.res {
let vis = def_id_visibility(self.tcx, def_id).0;
if let Some(hir_id) = self.tcx.hir().as_local_hir_id(def_id) {
self.update_macro_reachable_def(hir_id, def_kind, vis, defining_mod);
}
}
}
}
}
}
fn update_macro_reachable_def(
&mut self,
hir_id: hir::HirId,
def_kind: DefKind,
vis: ty::Visibility,
module: DefId,
) {
let level = Some(AccessLevel::Reachable);
if let ty::Visibility::Public = vis {
self.update(hir_id, level);
}
match def_kind {
// No type privacy, so can be directly marked as reachable.
DefKind::Const
| DefKind::Macro(_)
| DefKind::Static
| DefKind::TraitAlias
| DefKind::TyAlias => {
if vis.is_accessible_from(module, self.tcx) {
self.update(hir_id, level);
}
}
// We can't use a module name as the final segment of a path, except
// in use statements. Since re-export checking doesn't consider
// hygiene these don't need to be marked reachable. The contents of
// the module, however may be reachable.
DefKind::Mod => {
if vis.is_accessible_from(module, self.tcx) {
self.update_macro_reachable(hir_id, module);
}
}
DefKind::Struct | DefKind::Union => {
// While structs and unions have type privacy, their fields do
// not.
if let ty::Visibility::Public = vis {
let item = self.tcx.hir().expect_item(hir_id);
if let hir::ItemKind::Struct(ref struct_def, _)
| hir::ItemKind::Union(ref struct_def, _) = item.kind
{
for field in struct_def.fields() {
let field_vis =
ty::Visibility::from_hir(&field.vis, field.hir_id, self.tcx);
if field_vis.is_accessible_from(module, self.tcx) {
self.reach(field.hir_id, level).ty();
}
}
} else {
bug!("item {:?} with DefKind {:?}", item, def_kind);
}
}
}
// These have type privacy, so are not reachable unless they're
// public
DefKind::AssocConst
| DefKind::AssocTy
| DefKind::AssocOpaqueTy
| DefKind::ConstParam
| DefKind::Ctor(_, _)
| DefKind::Enum
| DefKind::ForeignTy
| DefKind::Fn
| DefKind::OpaqueTy
| DefKind::Method
| DefKind::Trait
| DefKind::TyParam
| DefKind::Variant => (),
}
}
/// Given the path segments of a `ItemKind::Use`, then we need
/// to update the visibility of the intermediate use so that it isn't linted
/// by `unreachable_pub`.
///
/// This isn't trivial as `path.res` has the `DefId` of the eventual target
/// of the use statement not of the next intermediate use statement.
///
/// To do this, consider the last two segments of the path to our intermediate
/// use statement. We expect the penultimate segment to be a module and the
/// last segment to be the name of the item we are exporting. We can then
/// look at the items contained in the module for the use statement with that
/// name and update that item's visibility.
///
/// FIXME: This solution won't work with glob imports and doesn't respect
/// namespaces. See <https://github.com/rust-lang/rust/pull/57922#discussion_r251234202>.
fn update_visibility_of_intermediate_use_statements(&mut self, segments: &[hir::PathSegment]) {
if let Some([module, segment]) = segments.rchunks_exact(2).next() {
if let Some(item) = module
.res
.and_then(|res| res.mod_def_id())
.and_then(|def_id| self.tcx.hir().as_local_hir_id(def_id))
.map(|module_hir_id| self.tcx.hir().expect_item(module_hir_id))
{
if let hir::ItemKind::Mod(m) = &item.kind {
for item_id in m.item_ids.as_ref() {
let item = self.tcx.hir().expect_item(item_id.id);
let def_id = self.tcx.hir().local_def_id(item_id.id);
if !self.tcx.hygienic_eq(segment.ident, item.ident, def_id) {
continue;
}
if let hir::ItemKind::Use(..) = item.kind {
self.update(item.hir_id, Some(AccessLevel::Exported));
}
}
}
}
}
}
}
impl Visitor<'tcx> for EmbargoVisitor<'tcx> {
/// We want to visit items in the context of their containing
/// module and so forth, so supply a crate for doing a deep walk.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::All(&self.tcx.hir())
}
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
let inherited_item_level = match item.kind {
hir::ItemKind::Impl(..) => {
Option::<AccessLevel>::of_impl(item.hir_id, self.tcx, &self.access_levels)
}
// Foreign modules inherit level from parents.
hir::ItemKind::ForeignMod(..) => self.prev_level,
// Other `pub` items inherit levels from parents.
hir::ItemKind::Const(..)
| hir::ItemKind::Enum(..)
| hir::ItemKind::ExternCrate(..)
| hir::ItemKind::GlobalAsm(..)
| hir::ItemKind::Fn(..)
| hir::ItemKind::Mod(..)
| hir::ItemKind::Static(..)
| hir::ItemKind::Struct(..)
| hir::ItemKind::Trait(..)
| hir::ItemKind::TraitAlias(..)
| hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::Union(..)
| hir::ItemKind::Use(..) => {
if item.vis.node.is_pub() {
self.prev_level
} else {
None
}
}
};
// Update level of the item itself.
let item_level = self.update(item.hir_id, inherited_item_level);
// Update levels of nested things.
match item.kind {
hir::ItemKind::Enum(ref def, _) => {
for variant in def.variants {
let variant_level = self.update(variant.id, item_level);
if let Some(ctor_hir_id) = variant.data.ctor_hir_id() {
self.update(ctor_hir_id, item_level);
}
for field in variant.data.fields() {
self.update(field.hir_id, variant_level);
}
}
}
hir::ItemKind::Impl(.., ref trait_ref, _, impl_item_refs) => {
for impl_item_ref in impl_item_refs {
if trait_ref.is_some() || impl_item_ref.vis.node.is_pub() {
self.update(impl_item_ref.id.hir_id, item_level);
}
}
}
hir::ItemKind::Trait(.., trait_item_refs) => {
for trait_item_ref in trait_item_refs {
self.update(trait_item_ref.id.hir_id, item_level);
}
}
hir::ItemKind::Struct(ref def, _) | hir::ItemKind::Union(ref def, _) => {
if let Some(ctor_hir_id) = def.ctor_hir_id() {
self.update(ctor_hir_id, item_level);
}
for field in def.fields() {
if field.vis.node.is_pub() {
self.update(field.hir_id, item_level);
}
}
}
hir::ItemKind::ForeignMod(ref foreign_mod) => {
for foreign_item in foreign_mod.items {
if foreign_item.vis.node.is_pub() {
self.update(foreign_item.hir_id, item_level);
}
}
}
hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::Use(..)
| hir::ItemKind::Static(..)
| hir::ItemKind::Const(..)
| hir::ItemKind::GlobalAsm(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::Mod(..)
| hir::ItemKind::TraitAlias(..)
| hir::ItemKind::Fn(..)
| hir::ItemKind::ExternCrate(..) => {}
}
// Mark all items in interfaces of reachable items as reachable.
match item.kind {
// The interface is empty.
hir::ItemKind::ExternCrate(..) => {}
// All nested items are checked by `visit_item`.
hir::ItemKind::Mod(..) => {}
// Re-exports are handled in `visit_mod`. However, in order to avoid looping over
// all of the items of a mod in `visit_mod` looking for use statements, we handle
// making sure that intermediate use statements have their visibilities updated here.
hir::ItemKind::Use(ref path, _) => {
if item_level.is_some() {
self.update_visibility_of_intermediate_use_statements(path.segments.as_ref());
}
}
// The interface is empty.
hir::ItemKind::GlobalAsm(..) => {}
hir::ItemKind::OpaqueTy(..) => {
// FIXME: This is some serious pessimization intended to workaround deficiencies
// in the reachability pass (`middle/reachable.rs`). Types are marked as link-time
// reachable if they are returned via `impl Trait`, even from private functions.
let exist_level = cmp::max(item_level, Some(AccessLevel::ReachableFromImplTrait));
self.reach(item.hir_id, exist_level).generics().predicates().ty();
}
// Visit everything.
hir::ItemKind::Const(..)
| hir::ItemKind::Static(..)
| hir::ItemKind::Fn(..)
| hir::ItemKind::TyAlias(..) => {
if item_level.is_some() {
self.reach(item.hir_id, item_level).generics().predicates().ty();
}
}
hir::ItemKind::Trait(.., trait_item_refs) => {
if item_level.is_some() {
self.reach(item.hir_id, item_level).generics().predicates();
for trait_item_ref in trait_item_refs {
let mut reach = self.reach(trait_item_ref.id.hir_id, item_level);
reach.generics().predicates();
if trait_item_ref.kind == AssocItemKind::Type
&& !trait_item_ref.defaultness.has_value()
{
// No type to visit.
} else {
reach.ty();
}
}
}
}
hir::ItemKind::TraitAlias(..) => {
if item_level.is_some() {
self.reach(item.hir_id, item_level).generics().predicates();
}
}
// Visit everything except for private impl items.
hir::ItemKind::Impl(.., impl_item_refs) => {
if item_level.is_some() {
self.reach(item.hir_id, item_level).generics().predicates().ty().trait_ref();
for impl_item_ref in impl_item_refs {
let impl_item_level = self.get(impl_item_ref.id.hir_id);
if impl_item_level.is_some() {
self.reach(impl_item_ref.id.hir_id, impl_item_level)
.generics()
.predicates()
.ty();
}
}
}
}
// Visit everything, but enum variants have their own levels.
hir::ItemKind::Enum(ref def, _) => {
if item_level.is_some() {
self.reach(item.hir_id, item_level).generics().predicates();
}
for variant in def.variants {
let variant_level = self.get(variant.id);
if variant_level.is_some() {
for field in variant.data.fields() {
self.reach(field.hir_id, variant_level).ty();
}
// Corner case: if the variant is reachable, but its
// enum is not, make the enum reachable as well.
self.update(item.hir_id, variant_level);
}
}
}
// Visit everything, but foreign items have their own levels.
hir::ItemKind::ForeignMod(ref foreign_mod) => {
for foreign_item in foreign_mod.items {
let foreign_item_level = self.get(foreign_item.hir_id);
if foreign_item_level.is_some() {
self.reach(foreign_item.hir_id, foreign_item_level)
.generics()
.predicates()
.ty();
}
}
}
// Visit everything except for private fields.
hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => {
if item_level.is_some() {
self.reach(item.hir_id, item_level).generics().predicates();
for field in struct_def.fields() {
let field_level = self.get(field.hir_id);
if field_level.is_some() {
self.reach(field.hir_id, field_level).ty();
}
}
}
}
}
let orig_level = mem::replace(&mut self.prev_level, item_level);
intravisit::walk_item(self, item);
self.prev_level = orig_level;
}
fn visit_block(&mut self, b: &'tcx hir::Block) {
// Blocks can have public items, for example impls, but they always
// start as completely private regardless of publicity of a function,
// constant, type, field, etc., in which this block resides.
let orig_level = mem::replace(&mut self.prev_level, None);
intravisit::walk_block(self, b);
self.prev_level = orig_level;
}
fn visit_mod(&mut self, m: &'tcx hir::Mod<'tcx>, _sp: Span, id: hir::HirId) {
// This code is here instead of in visit_item so that the
// crate module gets processed as well.
if self.prev_level.is_some() {
let def_id = self.tcx.hir().local_def_id(id);
if let Some(exports) = self.tcx.module_exports(def_id) {
for export in exports.iter() {
if export.vis == ty::Visibility::Public {
if let Some(def_id) = export.res.opt_def_id() {
if let Some(hir_id) = self.tcx.hir().as_local_hir_id(def_id) {
self.update(hir_id, Some(AccessLevel::Exported));
}
}
}
}
}
}
intravisit::walk_mod(self, m, id);
}
fn visit_macro_def(&mut self, md: &'tcx hir::MacroDef<'tcx>) {
if attr::find_transparency(&md.attrs, md.legacy).0 != Transparency::Opaque {
self.update(md.hir_id, Some(AccessLevel::Public));
return;
}
let macro_module_def_id =
ty::DefIdTree::parent(self.tcx, self.tcx.hir().local_def_id(md.hir_id)).unwrap();
let mut module_id = match self.tcx.hir().as_local_hir_id(macro_module_def_id) {
Some(module_id) if self.tcx.hir().is_hir_id_module(module_id) => module_id,
// `module_id` doesn't correspond to a `mod`, return early (#63164, #65252).
_ => return,
};
let level = if md.vis.node.is_pub() { self.get(module_id) } else { None };
let new_level = self.update(md.hir_id, level);
if new_level.is_none() {
return;
}
loop {
let changed_reachability = self.update_macro_reachable(module_id, macro_module_def_id);
if changed_reachability || module_id == hir::CRATE_HIR_ID {
break;
}
module_id = self.tcx.hir().get_parent_node(module_id);
}
}
}
impl ReachEverythingInTheInterfaceVisitor<'_, 'tcx> {
fn generics(&mut self) -> &mut Self {
for param in &self.ev.tcx.generics_of(self.item_def_id).params {
match param.kind {
GenericParamDefKind::Lifetime => {}
GenericParamDefKind::Type { has_default, .. } => {
if has_default {
self.visit(self.ev.tcx.type_of(param.def_id));
}
}
GenericParamDefKind::Const => {
self.visit(self.ev.tcx.type_of(param.def_id));
}
}
}
self
}
fn predicates(&mut self) -> &mut Self {
self.visit_predicates(self.ev.tcx.predicates_of(self.item_def_id));
self
}
fn ty(&mut self) -> &mut Self {
self.visit(self.ev.tcx.type_of(self.item_def_id));
self
}
fn trait_ref(&mut self) -> &mut Self {
if let Some(trait_ref) = self.ev.tcx.impl_trait_ref(self.item_def_id) {
self.visit_trait(trait_ref);
}
self
}
}
impl DefIdVisitor<'tcx> for ReachEverythingInTheInterfaceVisitor<'_, 'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.ev.tcx
}
fn visit_def_id(&mut self, def_id: DefId, _kind: &str, _descr: &dyn fmt::Display) -> bool {
if let Some(hir_id) = self.ev.tcx.hir().as_local_hir_id(def_id) {
if let ((ty::Visibility::Public, ..), _)
| (_, Some(AccessLevel::ReachableFromImplTrait)) =
(def_id_visibility(self.tcx(), def_id), self.access_level)
{
self.ev.update(hir_id, self.access_level);
}
}
false
}
}
//////////////////////////////////////////////////////////////////////////////////////
/// Name privacy visitor, checks privacy and reports violations.
/// Most of name privacy checks are performed during the main resolution phase,
/// or later in type checking when field accesses and associated items are resolved.
/// This pass performs remaining checks for fields in struct expressions and patterns.
//////////////////////////////////////////////////////////////////////////////////////
struct NamePrivacyVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
current_item: hir::HirId,
empty_tables: &'a ty::TypeckTables<'tcx>,
}
impl<'a, 'tcx> NamePrivacyVisitor<'a, 'tcx> {
// Checks that a field in a struct constructor (expression or pattern) is accessible.
fn check_field(
&mut self,
use_ctxt: Span, // syntax context of the field name at the use site
span: Span, // span of the field pattern, e.g., `x: 0`
def: &'tcx ty::AdtDef, // definition of the struct or enum
field: &'tcx ty::FieldDef,
) {
// definition of the field
let ident = Ident::new(kw::Invalid, use_ctxt);
let current_hir = self.current_item;
let def_id = self.tcx.adjust_ident_and_get_scope(ident, def.did, current_hir).1;
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
struct_span_err!(
self.tcx.sess,
span,
E0451,
"field `{}` of {} `{}` is private",
field.ident,
def.variant_descr(),
self.tcx.def_path_str(def.did)
)
.span_label(span, format!("field `{}` is private", field.ident))
.emit();
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> {
/// We want to visit items in the context of their containing
/// module and so forth, so supply a crate for doing a deep walk.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::All(&self.tcx.hir())
}
fn visit_mod(&mut self, _m: &'tcx hir::Mod<'tcx>, _s: Span, _n: hir::HirId) {
// Don't visit nested modules, since we run a separate visitor walk
// for each module in `privacy_access_levels`
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
let orig_tables = mem::replace(&mut self.tables, self.tcx.body_tables(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
self.tables = orig_tables;
}
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
let orig_current_item = mem::replace(&mut self.current_item, item.hir_id);
let orig_tables =
mem::replace(&mut self.tables, item_tables(self.tcx, item.hir_id, self.empty_tables));
intravisit::walk_item(self, item);
self.current_item = orig_current_item;
self.tables = orig_tables;
}
fn visit_trait_item(&mut self, ti: &'tcx hir::TraitItem<'tcx>) {
let orig_tables =
mem::replace(&mut self.tables, item_tables(self.tcx, ti.hir_id, self.empty_tables));
intravisit::walk_trait_item(self, ti);
self.tables = orig_tables;
}
fn visit_impl_item(&mut self, ii: &'tcx hir::ImplItem<'tcx>) {
let orig_tables =
mem::replace(&mut self.tables, item_tables(self.tcx, ii.hir_id, self.empty_tables));
intravisit::walk_impl_item(self, ii);
self.tables = orig_tables;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
match expr.kind {
hir::ExprKind::Struct(ref qpath, ref fields, ref base) => {
let res = self.tables.qpath_res(qpath, expr.hir_id);
let adt = self.tables.expr_ty(expr).ty_adt_def().unwrap();
let variant = adt.variant_of_res(res);
if let Some(ref base) = *base {
// If the expression uses FRU we need to make sure all the unmentioned fields
// are checked for privacy (RFC 736). Rather than computing the set of
// unmentioned fields, just check them all.
for (vf_index, variant_field) in variant.fields.iter().enumerate() {
let field = fields
.iter()
.find(|f| self.tcx.field_index(f.hir_id, self.tables) == vf_index);
let (use_ctxt, span) = match field {
Some(field) => (field.ident.span, field.span),
None => (base.span, base.span),
};
self.check_field(use_ctxt, span, adt, variant_field);
}
} else {
for field in fields {
let use_ctxt = field.ident.span;
let index = self.tcx.field_index(field.hir_id, self.tables);
self.check_field(use_ctxt, field.span, adt, &variant.fields[index]);
}
}
}
_ => {}
}
intravisit::walk_expr(self, expr);
}
fn visit_pat(&mut self, pat: &'tcx hir::Pat) {
match pat.kind {
PatKind::Struct(ref qpath, ref fields, _) => {
let res = self.tables.qpath_res(qpath, pat.hir_id);
let adt = self.tables.pat_ty(pat).ty_adt_def().unwrap();
let variant = adt.variant_of_res(res);
for field in fields {
let use_ctxt = field.ident.span;
let index = self.tcx.field_index(field.hir_id, self.tables);
self.check_field(use_ctxt, field.span, adt, &variant.fields[index]);
}
}
_ => {}
}
intravisit::walk_pat(self, pat);
}
}
////////////////////////////////////////////////////////////////////////////////////////////
/// Type privacy visitor, checks types for privacy and reports violations.
/// Both explicitly written types and inferred types of expressions and patters are checked.
/// Checks are performed on "semantic" types regardless of names and their hygiene.
////////////////////////////////////////////////////////////////////////////////////////////
struct TypePrivacyVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
current_item: DefId,
in_body: bool,
span: Span,
empty_tables: &'a ty::TypeckTables<'tcx>,
}
impl<'a, 'tcx> TypePrivacyVisitor<'a, 'tcx> {
fn item_is_accessible(&self, did: DefId) -> bool {
def_id_visibility(self.tcx, did).0.is_accessible_from(self.current_item, self.tcx)
}
// Take node-id of an expression or pattern and check its type for privacy.
fn check_expr_pat_type(&mut self, id: hir::HirId, span: Span) -> bool {
self.span = span;
if self.visit(self.tables.node_type(id)) || self.visit(self.tables.node_substs(id)) {
return true;
}
if let Some(adjustments) = self.tables.adjustments().get(id) {
for adjustment in adjustments {
if self.visit(adjustment.target) {
return true;
}
}
}
false
}
fn check_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool {
let is_error = !self.item_is_accessible(def_id);
if is_error {
self.tcx.sess.span_err(self.span, &format!("{} `{}` is private", kind, descr));
}
is_error
}
}
impl<'a, 'tcx> Visitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> {
/// We want to visit items in the context of their containing
/// module and so forth, so supply a crate for doing a deep walk.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::All(&self.tcx.hir())
}
fn visit_mod(&mut self, _m: &'tcx hir::Mod<'tcx>, _s: Span, _n: hir::HirId) {
// Don't visit nested modules, since we run a separate visitor walk
// for each module in `privacy_access_levels`
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
let orig_tables = mem::replace(&mut self.tables, self.tcx.body_tables(body));
let orig_in_body = mem::replace(&mut self.in_body, true);
let body = self.tcx.hir().body(body);
self.visit_body(body);
self.tables = orig_tables;
self.in_body = orig_in_body;
}
fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty) {
self.span = hir_ty.span;
if self.in_body {
// Types in bodies.
if self.visit(self.tables.node_type(hir_ty.hir_id)) {
return;
}
} else {
// Types in signatures.
// FIXME: This is very ineffective. Ideally each HIR type should be converted
// into a semantic type only once and the result should be cached somehow.
if self.visit(rustc_typeck::hir_ty_to_ty(self.tcx, hir_ty)) {
return;
}
}
intravisit::walk_ty(self, hir_ty);
}
fn visit_trait_ref(&mut self, trait_ref: &'tcx hir::TraitRef) {
self.span = trait_ref.path.span;
if !self.in_body {
// Avoid calling `hir_trait_to_predicates` in bodies, it will ICE.
// The traits' privacy in bodies is already checked as a part of trait object types.
let bounds = rustc_typeck::hir_trait_to_predicates(self.tcx, trait_ref);
for (trait_predicate, _) in bounds.trait_bounds {
if self.visit_trait(*trait_predicate.skip_binder()) {
return;
}
}
for (poly_predicate, _) in bounds.projection_bounds {
let tcx = self.tcx;
if self.visit(poly_predicate.skip_binder().ty)
|| self.visit_trait(poly_predicate.skip_binder().projection_ty.trait_ref(tcx))
{
return;
}
}
}
intravisit::walk_trait_ref(self, trait_ref);
}
// Check types of expressions
fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
if self.check_expr_pat_type(expr.hir_id, expr.span) {
// Do not check nested expressions if the error already happened.
return;
}
match expr.kind {
hir::ExprKind::Assign(.., ref rhs) | hir::ExprKind::Match(ref rhs, ..) => {
// Do not report duplicate errors for `x = y` and `match x { ... }`.
if self.check_expr_pat_type(rhs.hir_id, rhs.span) {
return;
}
}
hir::ExprKind::MethodCall(_, span, _) => {
// Method calls have to be checked specially.
self.span = span;
if let Some(def_id) = self.tables.type_dependent_def_id(expr.hir_id) {
if self.visit(self.tcx.type_of(def_id)) {
return;
}
} else {
self.tcx
.sess
.delay_span_bug(expr.span, "no type-dependent def for method call");
}
}
_ => {}
}
intravisit::walk_expr(self, expr);
}
// Prohibit access to associated items with insufficient nominal visibility.
//
// Additionally, until better reachability analysis for macros 2.0 is available,
// we prohibit access to private statics from other crates, this allows to give
// more code internal visibility at link time. (Access to private functions
// is already prohibited by type privacy for function types.)
fn visit_qpath(&mut self, qpath: &'tcx hir::QPath, id: hir::HirId, span: Span) {
let def = match self.tables.qpath_res(qpath, id) {
Res::Def(kind, def_id) => Some((kind, def_id)),
_ => None,
};
let def = def.filter(|(kind, _)| match kind {
DefKind::Method
| DefKind::AssocConst
| DefKind::AssocTy
| DefKind::AssocOpaqueTy
| DefKind::Static => true,
_ => false,
});
if let Some((kind, def_id)) = def {
let is_local_static =
if let DefKind::Static = kind { def_id.is_local() } else { false };
if !self.item_is_accessible(def_id) && !is_local_static {
let name = match *qpath {
hir::QPath::Resolved(_, ref path) => path.to_string(),
hir::QPath::TypeRelative(_, ref segment) => segment.ident.to_string(),
};
let msg = format!("{} `{}` is private", kind.descr(def_id), name);
self.tcx.sess.span_err(span, &msg);
return;
}
}
intravisit::walk_qpath(self, qpath, id, span);
}
// Check types of patterns.
fn visit_pat(&mut self, pattern: &'tcx hir::Pat) {
if self.check_expr_pat_type(pattern.hir_id, pattern.span) {
// Do not check nested patterns if the error already happened.
return;
}
intravisit::walk_pat(self, pattern);
}
fn visit_local(&mut self, local: &'tcx hir::Local) {
if let Some(ref init) = local.init {
if self.check_expr_pat_type(init.hir_id, init.span) {
// Do not report duplicate errors for `let x = y`.
return;
}
}
intravisit::walk_local(self, local);
}
// Check types in item interfaces.
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
let orig_current_item =
mem::replace(&mut self.current_item, self.tcx.hir().local_def_id(item.hir_id));
let orig_in_body = mem::replace(&mut self.in_body, false);
let orig_tables =
mem::replace(&mut self.tables, item_tables(self.tcx, item.hir_id, self.empty_tables));
intravisit::walk_item(self, item);
self.tables = orig_tables;
self.in_body = orig_in_body;
self.current_item = orig_current_item;
}
fn visit_trait_item(&mut self, ti: &'tcx hir::TraitItem<'tcx>) {
let orig_tables =
mem::replace(&mut self.tables, item_tables(self.tcx, ti.hir_id, self.empty_tables));
intravisit::walk_trait_item(self, ti);
self.tables = orig_tables;
}
fn visit_impl_item(&mut self, ii: &'tcx hir::ImplItem<'tcx>) {
let orig_tables =
mem::replace(&mut self.tables, item_tables(self.tcx, ii.hir_id, self.empty_tables));
intravisit::walk_impl_item(self, ii);
self.tables = orig_tables;
}
}
impl DefIdVisitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool {
self.check_def_id(def_id, kind, descr)
}
}
///////////////////////////////////////////////////////////////////////////////
/// Obsolete visitors for checking for private items in public interfaces.
/// These visitors are supposed to be kept in frozen state and produce an
/// "old error node set". For backward compatibility the new visitor reports
/// warnings instead of hard errors when the erroneous node is not in this old set.
///////////////////////////////////////////////////////////////////////////////
struct ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
access_levels: &'a AccessLevels,
in_variant: bool,
// Set of errors produced by this obsolete visitor.
old_error_set: HirIdSet,
}
struct ObsoleteCheckTypeForPrivatenessVisitor<'a, 'b, 'tcx> {
inner: &'a ObsoleteVisiblePrivateTypesVisitor<'b, 'tcx>,
/// Whether the type refers to private types.
contains_private: bool,
/// Whether we've recurred at all (i.e., if we're pointing at the
/// first type on which `visit_ty` was called).
at_outer_type: bool,
/// Whether that first type is a public path.
outer_type_is_public_path: bool,
}
impl<'a, 'tcx> ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
fn path_is_private_type(&self, path: &hir::Path) -> bool {
let did = match path.res {
Res::PrimTy(..) | Res::SelfTy(..) | Res::Err => return false,
res => res.def_id(),
};
// A path can only be private if:
// it's in this crate...
if let Some(hir_id) = self.tcx.hir().as_local_hir_id(did) {
// .. and it corresponds to a private type in the AST (this returns
// `None` for type parameters).
match self.tcx.hir().find(hir_id) {
Some(Node::Item(ref item)) => !item.vis.node.is_pub(),
Some(_) | None => false,
}
} else {
return false;
}
}
fn trait_is_public(&self, trait_id: hir::HirId) -> bool {
// FIXME: this would preferably be using `exported_items`, but all
// traits are exported currently (see `EmbargoVisitor.exported_trait`).
self.access_levels.is_public(trait_id)
}
fn check_generic_bound(&mut self, bound: &hir::GenericBound) {
if let hir::GenericBound::Trait(ref trait_ref, _) = *bound {
if self.path_is_private_type(&trait_ref.trait_ref.path) {
self.old_error_set.insert(trait_ref.trait_ref.hir_ref_id);
}
}
}
fn item_is_public(&self, id: &hir::HirId, vis: &hir::Visibility) -> bool {
self.access_levels.is_reachable(*id) || vis.node.is_pub()
}
}
impl<'a, 'b, 'tcx, 'v> Visitor<'v> for ObsoleteCheckTypeForPrivatenessVisitor<'a, 'b, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v> {
NestedVisitorMap::None
}
fn visit_ty(&mut self, ty: &hir::Ty) {
if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = ty.kind {
if self.inner.path_is_private_type(path) {
self.contains_private = true;
// Found what we're looking for, so let's stop working.
return;
}
}
if let hir::TyKind::Path(_) = ty.kind {
if self.at_outer_type {
self.outer_type_is_public_path = true;
}
}
self.at_outer_type = false;
intravisit::walk_ty(self, ty)
}
// Don't want to recurse into `[, .. expr]`.
fn visit_expr(&mut self, _: &hir::Expr) {}
}
impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
/// We want to visit items in the context of their containing
/// module and so forth, so supply a crate for doing a deep walk.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::All(&self.tcx.hir())
}
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
match item.kind {
// Contents of a private mod can be re-exported, so we need
// to check internals.
hir::ItemKind::Mod(_) => {}
// An `extern {}` doesn't introduce a new privacy
// namespace (the contents have their own privacies).
hir::ItemKind::ForeignMod(_) => {}
hir::ItemKind::Trait(.., ref bounds, _) => {
if !self.trait_is_public(item.hir_id) {
return;
}
for bound in bounds.iter() {
self.check_generic_bound(bound)
}
}
// Impls need some special handling to try to offer useful
// error messages without (too many) false positives
// (i.e., we could just return here to not check them at
// all, or some worse estimation of whether an impl is
// publicly visible).
hir::ItemKind::Impl(.., ref g, ref trait_ref, ref self_, impl_item_refs) => {
// `impl [... for] Private` is never visible.
let self_contains_private;
// `impl [... for] Public<...>`, but not `impl [... for]
// Vec<Public>` or `(Public,)`, etc.
let self_is_public_path;
// Check the properties of the `Self` type:
{
let mut visitor = ObsoleteCheckTypeForPrivatenessVisitor {
inner: self,
contains_private: false,
at_outer_type: true,
outer_type_is_public_path: false,
};
visitor.visit_ty(&self_);
self_contains_private = visitor.contains_private;
self_is_public_path = visitor.outer_type_is_public_path;
}
// Miscellaneous info about the impl:
// `true` iff this is `impl Private for ...`.
let not_private_trait = trait_ref.as_ref().map_or(
true, // no trait counts as public trait
|tr| {
let did = tr.path.res.def_id();
if let Some(hir_id) = self.tcx.hir().as_local_hir_id(did) {
self.trait_is_public(hir_id)
} else {
true // external traits must be public
}
},
);
// `true` iff this is a trait impl or at least one method is public.
//
// `impl Public { $( fn ...() {} )* }` is not visible.
//
// This is required over just using the methods' privacy
// directly because we might have `impl<T: Foo<Private>> ...`,
// and we shouldn't warn about the generics if all the methods
// are private (because `T` won't be visible externally).
let trait_or_some_public_method = trait_ref.is_some()
|| impl_item_refs.iter().any(|impl_item_ref| {
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
match impl_item.kind {
hir::ImplItemKind::Const(..) | hir::ImplItemKind::Method(..) => {
self.access_levels.is_reachable(impl_item_ref.id.hir_id)
}
hir::ImplItemKind::OpaqueTy(..) | hir::ImplItemKind::TyAlias(_) => {
false
}
}
});
if !self_contains_private && not_private_trait && trait_or_some_public_method {
intravisit::walk_generics(self, g);
match *trait_ref {
None => {
for impl_item_ref in impl_item_refs {
// This is where we choose whether to walk down
// further into the impl to check its items. We
// should only walk into public items so that we
// don't erroneously report errors for private
// types in private items.
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
match impl_item.kind {
hir::ImplItemKind::Const(..)
| hir::ImplItemKind::Method(..)
if self
.item_is_public(&impl_item.hir_id, &impl_item.vis) =>
{
intravisit::walk_impl_item(self, impl_item)
}
hir::ImplItemKind::TyAlias(..) => {
intravisit::walk_impl_item(self, impl_item)
}
_ => {}
}
}
}
Some(ref tr) => {
// Any private types in a trait impl fall into three
// categories.
// 1. mentioned in the trait definition
// 2. mentioned in the type params/generics
// 3. mentioned in the associated types of the impl
//
// Those in 1. can only occur if the trait is in
// this crate and will've been warned about on the
// trait definition (there's no need to warn twice
// so we don't check the methods).
//
// Those in 2. are warned via walk_generics and this
// call here.
intravisit::walk_path(self, &tr.path);
// Those in 3. are warned with this call.
for impl_item_ref in impl_item_refs {
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
if let hir::ImplItemKind::TyAlias(ref ty) = impl_item.kind {
self.visit_ty(ty);
}
}
}
}
} else if trait_ref.is_none() && self_is_public_path {
// `impl Public<Private> { ... }`. Any public static
// methods will be visible as `Public::foo`.
let mut found_pub_static = false;
for impl_item_ref in impl_item_refs {
if self.item_is_public(&impl_item_ref.id.hir_id, &impl_item_ref.vis) {
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
match impl_item_ref.kind {
AssocItemKind::Const => {
found_pub_static = true;
intravisit::walk_impl_item(self, impl_item);
}
AssocItemKind::Method { has_self: false } => {
found_pub_static = true;
intravisit::walk_impl_item(self, impl_item);
}
_ => {}
}
}
}
if found_pub_static {
intravisit::walk_generics(self, g)
}
}
return;
}
// `type ... = ...;` can contain private types, because
// we're introducing a new name.
hir::ItemKind::TyAlias(..) => return,
// Not at all public, so we don't care.
_ if !self.item_is_public(&item.hir_id, &item.vis) => {
return;
}
_ => {}
}
// We've carefully constructed it so that if we're here, then
// any `visit_ty`'s will be called on things that are in
// public signatures, i.e., things that we're interested in for
// this visitor.
intravisit::walk_item(self, item);
}
fn visit_generics(&mut self, generics: &'tcx hir::Generics) {
for param in &generics.params {
for bound in ¶m.bounds {
self.check_generic_bound(bound);
}
}
for predicate in &generics.where_clause.predicates {
match predicate {
hir::WherePredicate::BoundPredicate(bound_pred) => {
for bound in bound_pred.bounds.iter() {
self.check_generic_bound(bound)
}
}
hir::WherePredicate::RegionPredicate(_) => {}
hir::WherePredicate::EqPredicate(eq_pred) => {
self.visit_ty(&eq_pred.rhs_ty);
}
}
}
}
fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) {
if self.access_levels.is_reachable(item.hir_id) {
intravisit::walk_foreign_item(self, item)
}
}
fn visit_ty(&mut self, t: &'tcx hir::Ty) {
if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = t.kind {
if self.path_is_private_type(path) {
self.old_error_set.insert(t.hir_id);
}
}
intravisit::walk_ty(self, t)
}
fn visit_variant(
&mut self,
v: &'tcx hir::Variant<'tcx>,
g: &'tcx hir::Generics,
item_id: hir::HirId,
) {
if self.access_levels.is_reachable(v.id) {
self.in_variant = true;
intravisit::walk_variant(self, v, g, item_id);
self.in_variant = false;
}
}
fn visit_struct_field(&mut self, s: &'tcx hir::StructField<'tcx>) {
if s.vis.node.is_pub() || self.in_variant {
intravisit::walk_struct_field(self, s);
}
}
// We don't need to introspect into these at all: an
// expression/block context can't possibly contain exported things.
// (Making them no-ops stops us from traversing the whole AST without
// having to be super careful about our `walk_...` calls above.)
fn visit_block(&mut self, _: &'tcx hir::Block) {}
fn visit_expr(&mut self, _: &'tcx hir::Expr) {}
}
///////////////////////////////////////////////////////////////////////////////
/// SearchInterfaceForPrivateItemsVisitor traverses an item's interface and
/// finds any private components in it.
/// PrivateItemsInPublicInterfacesVisitor ensures there are no private types
/// and traits in public interfaces.
///////////////////////////////////////////////////////////////////////////////
struct SearchInterfaceForPrivateItemsVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
item_id: hir::HirId,
item_def_id: DefId,
span: Span,
/// The visitor checks that each component type is at least this visible.
required_visibility: ty::Visibility,
has_pub_restricted: bool,
has_old_errors: bool,
in_assoc_ty: bool,
}
impl SearchInterfaceForPrivateItemsVisitor<'tcx> {
fn generics(&mut self) -> &mut Self {
for param in &self.tcx.generics_of(self.item_def_id).params {
match param.kind {
GenericParamDefKind::Lifetime => {}
GenericParamDefKind::Type { has_default, .. } => {
if has_default {
self.visit(self.tcx.type_of(param.def_id));
}
}
GenericParamDefKind::Const => {
self.visit(self.tcx.type_of(param.def_id));
}
}
}
self
}
fn predicates(&mut self) -> &mut Self {
// N.B., we use `explicit_predicates_of` and not `predicates_of`
// because we don't want to report privacy errors due to where
// clauses that the compiler inferred. We only want to
// consider the ones that the user wrote. This is important
// for the inferred outlives rules; see
// `src/test/ui/rfc-2093-infer-outlives/privacy.rs`.
self.visit_predicates(self.tcx.explicit_predicates_of(self.item_def_id));
self
}
fn ty(&mut self) -> &mut Self {
self.visit(self.tcx.type_of(self.item_def_id));
self
}
fn check_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool {
if self.leaks_private_dep(def_id) {
self.tcx.lint_hir(
lint::builtin::EXPORTED_PRIVATE_DEPENDENCIES,
self.item_id,
self.span,
&format!(
"{} `{}` from private dependency '{}' in public \
interface",
kind,
descr,
self.tcx.crate_name(def_id.krate)
),
);
}
let hir_id = match self.tcx.hir().as_local_hir_id(def_id) {
Some(hir_id) => hir_id,
None => return false,
};
let (vis, vis_span, vis_descr) = def_id_visibility(self.tcx, def_id);
if !vis.is_at_least(self.required_visibility, self.tcx) {
let msg = format!("{} {} `{}` in public interface", vis_descr, kind, descr);
if self.has_pub_restricted || self.has_old_errors || self.in_assoc_ty {
let mut err = if kind == "trait" {
struct_span_err!(self.tcx.sess, self.span, E0445, "{}", msg)
} else {
struct_span_err!(self.tcx.sess, self.span, E0446, "{}", msg)
};
err.span_label(self.span, format!("can't leak {} {}", vis_descr, kind));
err.span_label(vis_span, format!("`{}` declared as {}", descr, vis_descr));
err.emit();
} else {
let err_code = if kind == "trait" { "E0445" } else { "E0446" };
self.tcx.lint_hir(
lint::builtin::PRIVATE_IN_PUBLIC,
hir_id,
self.span,
&format!("{} (error {})", msg, err_code),
);
}
}
false
}
/// An item is 'leaked' from a private dependency if all
/// of the following are true:
/// 1. It's contained within a public type
/// 2. It comes from a private crate
fn leaks_private_dep(&self, item_id: DefId) -> bool {
let ret = self.required_visibility == ty::Visibility::Public
&& self.tcx.is_private_dep(item_id.krate);
log::debug!("leaks_private_dep(item_id={:?})={}", item_id, ret);
return ret;
}
}
impl DefIdVisitor<'tcx> for SearchInterfaceForPrivateItemsVisitor<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool {
self.check_def_id(def_id, kind, descr)
}
}
struct PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
has_pub_restricted: bool,
old_error_set: &'a HirIdSet,
}
impl<'a, 'tcx> PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> {
fn check(
&self,
item_id: hir::HirId,
required_visibility: ty::Visibility,
) -> SearchInterfaceForPrivateItemsVisitor<'tcx> {
let mut has_old_errors = false;
// Slow path taken only if there any errors in the crate.
for &id in self.old_error_set {
// Walk up the nodes until we find `item_id` (or we hit a root).
let mut id = id;
loop {
if id == item_id {
has_old_errors = true;
break;
}
let parent = self.tcx.hir().get_parent_node(id);
if parent == id {
break;
}
id = parent;
}
if has_old_errors {
break;
}
}
SearchInterfaceForPrivateItemsVisitor {
tcx: self.tcx,
item_id,
item_def_id: self.tcx.hir().local_def_id(item_id),
span: self.tcx.hir().span(item_id),
required_visibility,
has_pub_restricted: self.has_pub_restricted,
has_old_errors,
in_assoc_ty: false,
}
}
fn check_assoc_item(
&self,
hir_id: hir::HirId,
assoc_item_kind: AssocItemKind,
defaultness: hir::Defaultness,
vis: ty::Visibility,
) {
let mut check = self.check(hir_id, vis);
let (check_ty, is_assoc_ty) = match assoc_item_kind {
AssocItemKind::Const | AssocItemKind::Method { .. } => (true, false),
AssocItemKind::Type => (defaultness.has_value(), true),
// `ty()` for opaque types is the underlying type,
// it's not a part of interface, so we skip it.
AssocItemKind::OpaqueTy => (false, true),
};
check.in_assoc_ty = is_assoc_ty;
check.generics().predicates();
if check_ty {
check.ty();
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
let tcx = self.tcx;
let item_visibility = ty::Visibility::from_hir(&item.vis, item.hir_id, tcx);
match item.kind {
// Crates are always public.
hir::ItemKind::ExternCrate(..) => {}
// All nested items are checked by `visit_item`.
hir::ItemKind::Mod(..) => {}
// Checked in resolve.
hir::ItemKind::Use(..) => {}
// No subitems.
hir::ItemKind::GlobalAsm(..) => {}
// Subitems of these items have inherited publicity.
hir::ItemKind::Const(..)
| hir::ItemKind::Static(..)
| hir::ItemKind::Fn(..)
| hir::ItemKind::TyAlias(..) => {
self.check(item.hir_id, item_visibility).generics().predicates().ty();
}
hir::ItemKind::OpaqueTy(..) => {
// `ty()` for opaque types is the underlying type,
// it's not a part of interface, so we skip it.
self.check(item.hir_id, item_visibility).generics().predicates();
}
hir::ItemKind::Trait(.., trait_item_refs) => {
self.check(item.hir_id, item_visibility).generics().predicates();
for trait_item_ref in trait_item_refs {
self.check_assoc_item(
trait_item_ref.id.hir_id,
trait_item_ref.kind,
trait_item_ref.defaultness,
item_visibility,
);
}
}
hir::ItemKind::TraitAlias(..) => {
self.check(item.hir_id, item_visibility).generics().predicates();
}
hir::ItemKind::Enum(ref def, _) => {
self.check(item.hir_id, item_visibility).generics().predicates();
for variant in def.variants {
for field in variant.data.fields() {
self.check(field.hir_id, item_visibility).ty();
}
}
}
// Subitems of foreign modules have their own publicity.
hir::ItemKind::ForeignMod(ref foreign_mod) => {
for foreign_item in foreign_mod.items {
let vis = ty::Visibility::from_hir(&foreign_item.vis, item.hir_id, tcx);
self.check(foreign_item.hir_id, vis).generics().predicates().ty();
}
}
// Subitems of structs and unions have their own publicity.
hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => {
self.check(item.hir_id, item_visibility).generics().predicates();
for field in struct_def.fields() {
let field_visibility = ty::Visibility::from_hir(&field.vis, item.hir_id, tcx);
self.check(field.hir_id, min(item_visibility, field_visibility, tcx)).ty();
}
}
// An inherent impl is public when its type is public
// Subitems of inherent impls have their own publicity.
// A trait impl is public when both its type and its trait are public
// Subitems of trait impls have inherited publicity.
hir::ItemKind::Impl(.., ref trait_ref, _, impl_item_refs) => {
let impl_vis = ty::Visibility::of_impl(item.hir_id, tcx, &Default::default());
self.check(item.hir_id, impl_vis).generics().predicates();
for impl_item_ref in impl_item_refs {
let impl_item = tcx.hir().impl_item(impl_item_ref.id);
let impl_item_vis = if trait_ref.is_none() {
min(
ty::Visibility::from_hir(&impl_item.vis, item.hir_id, tcx),
impl_vis,
tcx,
)
} else {
impl_vis
};
self.check_assoc_item(
impl_item_ref.id.hir_id,
impl_item_ref.kind,
impl_item_ref.defaultness,
impl_item_vis,
);
}
}
}
}
}
pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
privacy_access_levels,
check_private_in_public,
check_mod_privacy,
..*providers
};
}
fn | (tcx: TyCtxt<'_>, module_def_id: DefId) {
let empty_tables = ty::TypeckTables::empty(None);
// Check privacy of names not checked in previous compilation stages.
let mut visitor = NamePrivacyVisitor {
tcx,
tables: &empty_tables,
current_item: hir::DUMMY_HIR_ID,
empty_tables: &empty_tables,
};
let (module, span, hir_id) = tcx.hir().get_module(module_def_id);
intravisit::walk_mod(&mut visitor, module, hir_id);
// Check privacy of explicitly written types and traits as well as
// inferred types of expressions and patterns.
let mut visitor = TypePrivacyVisitor {
tcx,
tables: &empty_tables,
current_item: module_def_id,
in_body: false,
span,
empty_tables: &empty_tables,
};
intravisit::walk_mod(&mut visitor, module, hir_id);
}
fn privacy_access_levels(tcx: TyCtxt<'_>, krate: CrateNum) -> &AccessLevels {
assert_eq!(krate, LOCAL_CRATE);
// Build up a set of all exported items in the AST. This is a set of all
// items which are reachable from external crates based on visibility.
let mut visitor = EmbargoVisitor {
tcx,
access_levels: Default::default(),
macro_reachable: Default::default(),
prev_level: Some(AccessLevel::Public),
changed: false,
};
loop {
intravisit::walk_crate(&mut visitor, tcx.hir().krate());
if visitor.changed {
visitor.changed = false;
} else {
break;
}
}
visitor.update(hir::CRATE_HIR_ID, Some(AccessLevel::Public));
tcx.arena.alloc(visitor.access_levels)
}
fn check_private_in_public(tcx: TyCtxt<'_>, krate: CrateNum) {
assert_eq!(krate, LOCAL_CRATE);
let access_levels = tcx.privacy_access_levels(LOCAL_CRATE);
let krate = tcx.hir().krate();
let mut visitor = ObsoleteVisiblePrivateTypesVisitor {
tcx,
access_levels: &access_levels,
in_variant: false,
old_error_set: Default::default(),
};
intravisit::walk_crate(&mut visitor, krate);
let has_pub_restricted = {
let mut pub_restricted_visitor = PubRestrictedVisitor { tcx, has_pub_restricted: false };
intravisit::walk_crate(&mut pub_restricted_visitor, krate);
pub_restricted_visitor.has_pub_restricted
};
// Check for private types and traits in public interfaces.
let mut visitor = PrivateItemsInPublicInterfacesVisitor {
tcx,
has_pub_restricted,
old_error_set: &visitor.old_error_set,
};
krate.visit_all_item_likes(&mut DeepVisitor::new(&mut visitor));
}
| check_mod_privacy |
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub mod availability_sets {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
subscription_id: &str,
) -> std::result::Result<AvailabilitySet, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AvailabilitySet =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
parameters: &AvailabilitySet,
subscription_id: &str,
) -> std::result::Result<AvailabilitySet, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AvailabilitySet =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
create_or_update::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
delete::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<AvailabilitySetListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AvailabilitySetListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_available_sizes(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineSizeListResult, list_available_sizes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}/vmSizes",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).context(list_available_sizes::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_available_sizes::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_available_sizes::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_available_sizes::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineSizeListResult =
serde_json::from_slice(rsp_body).context(list_available_sizes::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_available_sizes::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_available_sizes {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod virtual_machine_extension_images {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
type_: &str,
version: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineExtensionImage, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmextension/types/{}/versions/{}",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
type_,
version
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineExtensionImage =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_types(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
subscription_id: &str,
) -> std::result::Result<Vec<VirtualMachineExtensionImage>, list_types::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmextension/types",
operation_config.base_path(),
subscription_id,
location,
publisher_name
);
let mut url = url::Url::parse(url_str).context(list_types::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_types::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_types::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_types::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<VirtualMachineExtensionImage> =
serde_json::from_slice(rsp_body).context(list_types::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_types::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_types {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_versions(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
type_: &str,
filter: Option<&str>,
top: Option<i32>,
orderby: Option<&str>,
subscription_id: &str,
) -> std::result::Result<Vec<VirtualMachineExtensionImage>, list_versions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmextension/types/{}/versions",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
type_
);
let mut url = url::Url::parse(url_str).context(list_versions::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_versions::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
if let Some(orderby) = orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_versions::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_versions::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<VirtualMachineExtensionImage> =
serde_json::from_slice(rsp_body).context(list_versions::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_versions::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_versions {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod virtual_machine_extensions {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<VirtualMachineExtension, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineExtension =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
extension_parameters: &VirtualMachineExtension,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineExtension =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineExtension =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
create_or_update::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(VirtualMachineExtension),
Created201(VirtualMachineExtension),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
extension_parameters: &VirtualMachineExtensionUpdate,
subscription_id: &str,
) -> std::result::Result<VirtualMachineExtension, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).context(update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(update::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineExtension =
serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
update::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
delete::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod virtual_machine_images {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
offer: &str,
skus: &str,
version: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineImage, get::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers/{}/skus/{}/versions/{}" , operation_config . base_path () , subscription_id , location , publisher_name , offer , skus , version) ;
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineImage =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
offer: &str,
skus: &str,
expand: Option<&str>,
top: Option<i32>,
orderby: Option<&str>,
subscription_id: &str,
) -> std::result::Result<Vec<VirtualMachineImageResource>, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers/{}/skus/{}/versions",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
offer,
skus
);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
if let Some(orderby) = orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<VirtualMachineImageResource> =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_offers(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
subscription_id: &str,
) -> std::result::Result<Vec<VirtualMachineImageResource>, list_offers::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers",
operation_config.base_path(),
subscription_id,
location,
publisher_name
);
let mut url = url::Url::parse(url_str).context(list_offers::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_offers::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_offers::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_offers::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<VirtualMachineImageResource> =
serde_json::from_slice(rsp_body).context(list_offers::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_offers::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_offers {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_publishers(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<Vec<VirtualMachineImageResource>, list_publishers::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).context(list_publishers::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_publishers::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_publishers::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_publishers::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<VirtualMachineImageResource> =
serde_json::from_slice(rsp_body).context(list_publishers::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_publishers::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_publishers {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_skus(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
offer: &str,
subscription_id: &str,
) -> std::result::Result<Vec<VirtualMachineImageResource>, list_skus::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers/{}/skus",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
offer
);
let mut url = url::Url::parse(url_str).context(list_skus::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_skus::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_skus::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_skus::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<VirtualMachineImageResource> =
serde_json::from_slice(rsp_body).context(list_skus::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_skus::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_skus {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod usage {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<ListUsagesResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/usages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ListUsagesResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod virtual_machine_sizes {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineSizeListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/vmSizes",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineSizeListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod virtual_machines {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn capture(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
parameters: &VirtualMachineCaptureParameters,
subscription_id: &str,
) -> std::result::Result<capture::Response, capture::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/capture",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(capture::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(capture::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(capture::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(capture::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineCaptureResult =
serde_json::from_slice(rsp_body).context(capture::DeserializeError { body: rsp_body.clone() })?;
Ok(capture::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(capture::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
capture::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod capture {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(VirtualMachineCaptureResult),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<VirtualMachine, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachine =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
parameters: &VirtualMachine,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachine =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachine =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
create_or_update::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(VirtualMachine),
Created201(VirtualMachine),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
delete::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn deallocate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<deallocate::Response, deallocate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/deallocate",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(deallocate::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(deallocate::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(deallocate::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(deallocate::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(deallocate::DeserializeError { body: rsp_body.clone() })?;
Ok(deallocate::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(deallocate::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
deallocate::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod deallocate {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn generalize(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<OperationStatusResponse, generalize::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/generalize",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(generalize::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(generalize::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(generalize::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(generalize::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(generalize::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
generalize::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod generalize {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_all(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<VirtualMachineListResult, list_all::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/virtualMachines",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).context(list_all::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_all::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_all::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_all::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineListResult =
serde_json::from_slice(rsp_body).context(list_all::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_all::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_all {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_available_sizes(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineSizeListResult, list_available_sizes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/vmSizes",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(list_available_sizes::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_available_sizes::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_available_sizes::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_available_sizes::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineSizeListResult =
serde_json::from_slice(rsp_body).context(list_available_sizes::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_available_sizes::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_available_sizes {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn power_off(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<power_off::Response, power_off::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/powerOff",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(power_off::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(power_off::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(power_off::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(power_off::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(power_off::DeserializeError { body: rsp_body.clone() })?;
Ok(power_off::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(power_off::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
power_off::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod power_off {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/restart",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(restart::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(restart::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(restart::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(restart::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(restart::DeserializeError { body: rsp_body.clone() })?;
Ok(restart::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
restart::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod restart {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(start::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(start::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(start::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(start::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(start::DeserializeError { body: rsp_body.clone() })?;
Ok(start::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
start::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod start {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn redeploy(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<redeploy::Response, redeploy::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/redeploy",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).context(redeploy::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(redeploy::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(redeploy::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(redeploy::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(redeploy::DeserializeError { body: rsp_body.clone() })?;
Ok(redeploy::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(redeploy::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
redeploy::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod redeploy {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod virtual_machine_scale_sets {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSet, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSet =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
parameters: &VirtualMachineScaleSet,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSet =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSet =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
create_or_update::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(VirtualMachineScaleSet),
Created201(VirtualMachineScaleSet),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> |
pub mod delete {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn deallocate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<deallocate::Response, deallocate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/deallocate",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(deallocate::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(deallocate::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
azure_core::to_json(vm_instance_i_ds).context(deallocate::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(deallocate::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(deallocate::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(deallocate::DeserializeError { body: rsp_body.clone() })?;
Ok(deallocate::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(deallocate::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
deallocate::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod deallocate {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete_instances(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: &VirtualMachineScaleSetVmInstanceRequiredIDs,
subscription_id: &str,
) -> std::result::Result<delete_instances::Response, delete_instances::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/delete",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(delete_instances::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete_instances::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete_instances::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(delete_instances::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(delete_instances::DeserializeError { body: rsp_body.clone() })?;
Ok(delete_instances::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete_instances::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
delete_instances::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete_instances {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_instance_view(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSetInstanceView, get_instance_view::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/instanceView",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(get_instance_view::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get_instance_view::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get_instance_view::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(get_instance_view::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSetInstanceView =
serde_json::from_slice(rsp_body).context(get_instance_view::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get_instance_view::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get_instance_view {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSetListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSetListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_all(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSetListWithLinkResult, list_all::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/virtualMachineScaleSets",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).context(list_all::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_all::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_all::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_all::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSetListWithLinkResult =
serde_json::from_slice(rsp_body).context(list_all::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_all::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_all {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_skus(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSetListSkusResult, list_skus::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/skus",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(list_skus::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_skus::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_skus::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_skus::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSetListSkusResult =
serde_json::from_slice(rsp_body).context(list_skus::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_skus::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_skus {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn power_off(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<power_off::Response, power_off::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/poweroff",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(power_off::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(power_off::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
azure_core::to_json(vm_instance_i_ds).context(power_off::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(power_off::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(power_off::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(power_off::DeserializeError { body: rsp_body.clone() })?;
Ok(power_off::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(power_off::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
power_off::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod power_off {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/restart",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(restart::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(restart::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
azure_core::to_json(vm_instance_i_ds).context(restart::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(restart::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(restart::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(restart::DeserializeError { body: rsp_body.clone() })?;
Ok(restart::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
restart::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod restart {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(start::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(start::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
azure_core::to_json(vm_instance_i_ds).context(start::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(start::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(start::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(start::DeserializeError { body: rsp_body.clone() })?;
Ok(start::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
start::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod start {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update_instances(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: &VirtualMachineScaleSetVmInstanceRequiredIDs,
subscription_id: &str,
) -> std::result::Result<update_instances::Response, update_instances::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/manualupgrade",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(update_instances::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(update_instances::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(update_instances::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(update_instances::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(update_instances::DeserializeError { body: rsp_body.clone() })?;
Ok(update_instances::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update_instances::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
update_instances::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod update_instances {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn reimage(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<reimage::Response, reimage::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/reimage",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(reimage::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(reimage::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(reimage::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(reimage::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(reimage::DeserializeError { body: rsp_body.clone() })?;
Ok(reimage::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(reimage::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
reimage::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod reimage {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod virtual_machine_scale_set_v_ms {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn reimage(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<reimage::Response, reimage::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/reimage",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(reimage::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(reimage::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(reimage::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(reimage::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(reimage::DeserializeError { body: rsp_body.clone() })?;
Ok(reimage::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(reimage::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
reimage::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod reimage {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn deallocate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<deallocate::Response, deallocate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/deallocate",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(deallocate::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(deallocate::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(deallocate::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(deallocate::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(deallocate::DeserializeError { body: rsp_body.clone() })?;
Ok(deallocate::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(deallocate::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
deallocate::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod deallocate {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSetVm, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSetVm =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
delete::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_instance_view(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSetVmInstanceView, get_instance_view::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/instanceView",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(get_instance_view::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get_instance_view::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get_instance_view::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(get_instance_view::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSetVmInstanceView =
serde_json::from_slice(rsp_body).context(get_instance_view::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get_instance_view::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get_instance_view {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
virtual_machine_scale_set_name: &str,
filter: Option<&str>,
select: Option<&str>,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<VirtualMachineScaleSetVmListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualMachines",
operation_config.base_path(),
subscription_id,
resource_group_name,
virtual_machine_scale_set_name
);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(select) = select {
url.query_pairs_mut().append_pair("$select", select);
}
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: VirtualMachineScaleSetVmListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn power_off(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<power_off::Response, power_off::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/poweroff",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(power_off::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(power_off::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(power_off::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(power_off::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(power_off::DeserializeError { body: rsp_body.clone() })?;
Ok(power_off::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(power_off::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
power_off::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod power_off {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/restart",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(restart::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(restart::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(restart::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(restart::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(restart::DeserializeError { body: rsp_body.clone() })?;
Ok(restart::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
restart::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod restart {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).context(start::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(start::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(start::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(start::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(start::DeserializeError { body: rsp_body.clone() })?;
Ok(start::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
start::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod start {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(OperationStatusResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
| {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationStatusResponse =
serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
delete::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
} |
analysis.py | import numpy as np
import torch
import matplotlib.pyplot as plt
import os
import math
import scipy.stats as stats
import lsdr.envs.environment_sampler as env_sampler
from enum import IntEnum
############################
# Optimization Loss Opt
############################
class | (IntEnum):
REWARDS = 1
KL_OPT = 2
REW_AND_KL = 3
def reward_function(x):
return np.exp(-(x-20)**2)
def reward_function_v2(x):
return np.sin(np.sqrt(x**2))
def calculate_reward(x):
return reward_function(x)
def setup_distributions():
##############################
# Initial distribution configs
##############################
test_params = [
np.array([-30.0, 50.0])
]
# This can be modified for the initial distributions
# to be different.
ranges = np.asarray(test_params)
mean = ranges.mean(-1)
covar = (((ranges[:, 1] - ranges[:, 0])**2.0) / 12.0) * np.eye(
ranges.shape[0])
mu_train, L_train = mean, np.linalg.cholesky(covar)
dist_params = [mu_train, L_train]
sampler = env_sampler.init_env_sampler(
'hopper',
seed=0,
experiment_id='test_kl_div_loss_0',
init_dist_params=dist_params,
dist_type='gaussian',
test_dist_params=None)
############################
# Train Distribution
############################
p_train = sampler.train_dist
############################
# Test Distribution
############################
ranges = np.asarray(test_params)
mean = ranges.mean(-1)
covar = (((ranges[:, 1] - ranges[:, 0])**2.0) / 12.0) * np.eye(
ranges.shape[0])
mu_test, L_test = mean, np.linalg.cholesky(covar)
mu_test = torch.tensor(mu_test)
L_test = torch.tensor(L_test)
mu_test = mu_test.float().detach().requires_grad_(False)
L_test = L_test.float().detach().requires_grad_(False)
p_test = torch.distributions.MultivariateNormal(mu_test,
scale_tril=L_test)
train_mean = p_train.mean.detach()
train_std = (p_train._unbroadcasted_scale_tril).diag().detach()
test_mean = p_test.mean.detach()
test_std = (p_test._unbroadcasted_scale_tril).diag().detach()
print('Initial Distributions')
print('Train Distribution Mean ', train_mean)
print('Train Distribution STD ', train_std)
print('Test Distribution Mean ', test_mean)
print('Test Distribution STD ', test_std)
############################
# Plot Initial Distribution
############################
plot_distrs(train_mean, train_std,
test_mean, test_std,
plot_name='initial_train_distr')
return sampler, p_train, p_test
def plot_distrs(train_mean, train_var,
test_mean, test_var,
plot_name='distributions'):
plt.figure()
mu = train_mean
variance = train_var
sigma = math.sqrt(variance)
x = np.linspace(mu - 3*sigma, mu + 3*sigma, 100)
plt.plot(x, stats.norm.pdf(x, mu, sigma), color='green',
label='$p_{\phi}(z)$',
linestyle='-.')
mu = test_mean
variance = test_var
sigma = math.sqrt(variance)
x = np.linspace(mu - 3*sigma, mu + 3*sigma, 100)
plt.plot(x, stats.norm.pdf(x, mu, sigma), color='red', label='$p(z)$')
rew_func_range = np.arange(-20, 50, 1)
plt.plot(rew_func_range, calculate_reward(rew_func_range),
color='orange',
label='$R(\Theta, z)$')
plt.legend(loc='upper left')
res_dir = 'grad_analysis'
if not os.path.exists(res_dir):
os.makedirs(res_dir)
plotname = res_dir + '/' + plot_name + '.png'
plt.savefig(plotname)
def optimize_distribution(sampler, p_train, p_test, objective_opt):
epochs, n_samples = 10000, 1000
alpha = 1e-5
opt = torch.optim.Adam(sampler.params, 1e-2)
mu_grads = []
var_grads = []
def store_mu_grad_rew(grad):
mu_grads.append(np.copy(grad))
def store_tril_grad_rew(grad):
var_grads.append(np.copy(grad))
for _ in range(epochs):
opt.zero_grad()
####################
# Sample from p_test
####################
z = p_test.sample(torch.Size([n_samples]))
contexts = p_train.sample(torch.Size([n_samples]))
################
# Eval Log probs
################
log_p_train = p_train.log_prob(z)
log_p_test = p_test.log_prob(z)
################
# Calculate KL
################
kl_samples = log_p_test - log_p_train
kl_loss = kl_samples.mean(0)
#######################
# Calculate Reward term
#######################
log_probs_context = p_train.log_prob(contexts)
reward_loss = (calculate_reward(contexts) * log_probs_context).mean(0)
if objective_opt == Objectives.REWARDS:
# For this to converge to the reward function,
# need to change `z` sampling to be from train
# distribution.
total_loss = - reward_loss
elif objective_opt == Objectives.KL_OPT:
total_loss = kl_loss
elif objective_opt == Objectives.REW_AND_KL:
total_loss = (-(reward_loss) + (alpha*kl_loss))
else:
raise ValueError('Invalid op')
total_loss.mean().backward()
opt.step()
train_mean = p_train.mean.detach()
train_std = (p_train._unbroadcasted_scale_tril).diag().detach()
test_mean = p_test.mean.detach()
test_std = (p_test._unbroadcasted_scale_tril).diag().detach()
print('Updated Distributions')
print('######################')
print('Train Distribution Mean ', train_mean)
print('Train Distribution STD ', train_std)
print('Test Distribution Mean ', test_mean)
print('Test Distribution STD ', test_std)
plot_distrs(train_mean, train_std,
test_mean, test_std,
plot_name='final_distributions')
if __name__ == '__main__':
sampler, p_train, p_test = setup_distributions()
# objective_opt = Objectives.REWARDS
# objective_opt = Objectives.KL_OPT
objective_opt = Objectives.REW_AND_KL
optimize_distribution(sampler,
p_train,
p_test,
objective_opt)
| Objectives |
template.actions.ts | import { utils } from '@common/utils';
import { IRequest, EStoreActions, IntPayload, IntServerPayload, IError } from '@interfaces';
import { ITemplatesBody, ITemplatesModel, TemplatesModel, FaIcons } from '@models';
export const templateupdate: (req: IRequest) => Promise<IRequest | any> = async (
req: IRequest
): Promise<IRequest | any> => {
const hrstart: [number, number] = process.hrtime();
const body: ITemplatesBody = { ...req.body };
body.email = req.user.email;
body.username = req.user.name;
body.transid = req.transid; | body.org = req.user.org;
let id: string;
let isnew = false;
let doc: ITemplatesModel | null;
if (!body.id) {
isnew = true;
doc = new TemplatesModel({});
doc.annotations = {
createdbyemail: body.email,
createdbyname: body.email,
createddate: new Date(),
modifiedbyemail: body.email,
modifiedbyname: body.email,
modifieddate: new Date(),
transid: body.transid,
};
doc.project = {
org: req.user.org,
};
id = doc._id.toString();
} else {
id = body.id;
doc = await TemplatesModel.findOne({ 'project.org': body.org, _id: id }, {}).exec();
if (doc === null) {
return Promise.reject({
close: false,
displayerr: `No Document with id ${id} Found`,
});
}
doc.annotations.modifiedbyname = body.username;
doc.annotations.modifiedbyemail = body.email;
doc.annotations.modifieddate = new Date();
doc.annotations.transid = body.transid;
}
doc.name = body.name;
doc.annotations.transid = body.transid;
doc.template = body.template;
doc.description = body.description;
doc.type = body.type;
await doc.save().catch(async (err) => {
void utils.logError(`$templates (templatesupdate): templates updated failed for doc ${body.name} failed`, err);
return Promise.reject({
close: false,
displayerr: err.message,
});
});
utils.logInfo(
`$templates (templatesupdate): templates updated - email: ${body.email}`,
req.transid,
process.hrtime(hrstart)
);
const payload: IntPayload[] = [
{
key: 'id',
loaded: true,
store: body.store /** not used as forcestore is enabled */,
type: isnew ? EStoreActions.ADD_STORE_RCD : EStoreActions.UPD_STORE_RCD,
values: {
viewicon: `${FaIcons.viewicon}`,
type: doc.type,
template: doc.template,
org: doc.project.org,
name: doc.name,
id,
editicon: `${FaIcons.editicon}`,
description: doc.description,
deleteicon: `${FaIcons.deleteicon}`,
createddate: doc.annotations.createddate.toISOString(),
createdby: doc.annotations.createdbyemail,
},
},
];
const serverPayload: IntServerPayload = {
message: 'Templates Updated',
payload,
success: true /** just display a success message */,
};
return Promise.resolve(serverPayload);
};
export const templatedelete: (req: IRequest) => Promise<IRequest | any> = async (
req: IRequest
): Promise<IRequest | any> => {
const hrstart: [number, number] = process.hrtime();
const body: ITemplatesBody = { ...req.body };
body.org = req.user.org;
if (!body.id) {
return Promise.reject({
close: false,
displayerr: 'No Id Found',
});
}
const id: string = body.id;
await TemplatesModel.deleteOne({ 'project.org': body.org, _id: id })
.exec()
.catch(async (err: IError) => {
err.trace = ['@at $templates (templatedelete)'];
err.message = 'The document to delete could not be found';
err.id = body.id;
return Promise.reject(err);
});
utils.logInfo(
`$templates (templatedelete): template updated - email: ${body.email}`,
req.transid,
process.hrtime(hrstart)
);
const payload: IntPayload[] = [
{
key: 'id',
loaded: true,
options: {
field: 'templates',
},
store: body.store /** not used as forcestore is enabled */,
type: EStoreActions.REM_STORE_RCD,
values: {
id,
},
},
];
const serverPayload: IntServerPayload = {
message: 'Templates Updated',
payload,
success: true /** just display a success message */,
};
return Promise.resolve(serverPayload);
}; | body.sessionid = req.sessionid; |
aws_dynamodb_test.py | """Tests for perfkitbenchmarker.providers.aws.aws_dynamodb."""
import json
import unittest
from absl import flags
from absl.testing import flagsaver
from absl.testing import parameterized
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker.providers.aws import aws_dynamodb
from perfkitbenchmarker.providers.aws import util
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_DESCRIBE_TABLE_OUTPUT = """
{
"Table": {
"AttributeDefinitions": [
{
"AttributeName": "test",
"AttributeType": "S"
}
],
"TableName": "test",
"KeySchema": [
{
"AttributeName": "test",
"KeyType": "HASH"
}
],
"TableStatus": "ACTIVE",
"CreationDateTime": 1611605356.518,
"ProvisionedThroughput": {
"NumberOfDecreasesToday": 0,
"ReadCapacityUnits": 5,
"WriteCapacityUnits": 0
},
"TableSizeBytes": 0,
"ItemCount": 0,
"TableArn": "arn:aws:dynamodb:us-east-2:835761027970:table/test",
"TableId": "ecf0a60a-f18d-4666-affc-525ca6e1d207"
}
}
"""
@flagsaver.flagsaver
def GetTestDynamoDBInstance(table_name='test_table'):
FLAGS.zone = ['us-east-1a']
return aws_dynamodb.AwsDynamoDBInstance(table_name)
class AwsDynamodbTest(pkb_common_test_case.PkbCommonTestCase):
def assertArgumentInCommand(self, mock_cmd, arg):
"""Given an AWS command, checks that the argument is present."""
command = ' '.join(mock_cmd.call_args[0][0])
self.assertIn(arg, command)
@flagsaver.flagsaver
def testInitTableName(self):
test_instance = GetTestDynamoDBInstance('dynamo_test_table')
self.assertEqual(test_instance.table_name, 'dynamo_test_table')
@flagsaver.flagsaver
def testInitLocation(self):
FLAGS.zone = ['us-east-1a']
test_instance = aws_dynamodb.AwsDynamoDBInstance('test_table')
self.assertEqual(test_instance.zone, 'us-east-1a')
self.assertEqual(test_instance.region, 'us-east-1')
@flagsaver.flagsaver
def testInitKeysAndAttributes(self):
FLAGS.aws_dynamodb_primarykey = 'test_primary_key'
FLAGS.aws_dynamodb_sortkey = 'test_sort_key'
FLAGS.aws_dynamodb_attributetype = 'test_attribute_type'
test_instance = GetTestDynamoDBInstance()
self.assertEqual(test_instance.primary_key,
'{"AttributeName": "test_primary_key","KeyType": "HASH"}')
self.assertEqual(test_instance.sort_key,
'{"AttributeName": "test_sort_key","KeyType": "RANGE"}')
self.assertEqual(
test_instance.part_attributes,
'{"AttributeName": "test_primary_key","AttributeType": "test_attribute_type"}'
)
self.assertEqual(
test_instance.sort_attributes,
'{"AttributeName": "test_sort_key","AttributeType": "test_attribute_type"}'
)
@flagsaver.flagsaver
def testInitThroughput(self):
FLAGS.aws_dynamodb_read_capacity = 1
FLAGS.aws_dynamodb_write_capacity = 2
test_instance = GetTestDynamoDBInstance()
self.assertEqual(test_instance.throughput,
'ReadCapacityUnits=1,WriteCapacityUnits=2')
@flagsaver.flagsaver
def testGetResourceMetadata(self):
FLAGS.zone = ['us-east-1a']
FLAGS.aws_dynamodb_primarykey = 'test_primary_key'
FLAGS.aws_dynamodb_use_sort = 'test_use_sort'
FLAGS.aws_dynamodb_sortkey = 'test_sortkey'
FLAGS.aws_dynamodb_attributetype = 'test_attribute_type'
FLAGS.aws_dynamodb_read_capacity = 1
FLAGS.aws_dynamodb_write_capacity = 2
FLAGS.aws_dynamodb_lsi_count = 3
FLAGS.aws_dynamodb_gsi_count = 4
FLAGS.aws_dynamodb_ycsb_consistentReads = 5
FLAGS.aws_dynamodb_connectMax = 6
test_instance = aws_dynamodb.AwsDynamoDBInstance('test_table')
actual_metadata = test_instance.GetResourceMetadata()
expected_metadata = {
'aws_dynamodb_primarykey': 'test_primary_key',
'aws_dynamodb_use_sort': 'test_use_sort',
'aws_dynamodb_sortkey': 'test_sortkey',
'aws_dynamodb_attributetype': 'test_attribute_type',
'aws_dynamodb_read_capacity': 1,
'aws_dynamodb_write_capacity': 2,
'aws_dynamodb_lsi_count': 3,
'aws_dynamodb_gsi_count': 4,
'aws_dynamodb_consistentReads': 5,
'aws_dynamodb_connectMax': 6,
}
self.assertEqual(actual_metadata, expected_metadata)
@parameterized.named_parameters({
'testcase_name': 'ValidOutput',
'output': json.loads(_DESCRIBE_TABLE_OUTPUT)['Table'],
'expected': True
}, {
'testcase_name': 'EmptyOutput',
'output': {},
'expected': False
})
def | (self, output, expected):
test_instance = GetTestDynamoDBInstance()
self.enter_context(
mock.patch.object(
test_instance,
'_DescribeTable',
return_value=output))
actual = test_instance._Exists()
self.assertEqual(actual, expected)
def testSetThroughput(self):
test_instance = GetTestDynamoDBInstance(table_name='throughput_table')
cmd = self.enter_context(
mock.patch.object(
util,
'IssueRetryableCommand'))
self.enter_context(mock.patch.object(test_instance, '_IsReady'))
test_instance.SetThroughput(5, 5)
self.assertArgumentInCommand(cmd, '--table-name throughput_table')
self.assertArgumentInCommand(cmd, '--region us-east-1')
self.assertArgumentInCommand(
cmd,
'--provisioned-throughput ReadCapacityUnits=5,WriteCapacityUnits=5')
def testGetThroughput(self):
test_instance = GetTestDynamoDBInstance()
output = json.loads(_DESCRIBE_TABLE_OUTPUT)['Table']
self.enter_context(
mock.patch.object(
test_instance,
'_DescribeTable',
return_value=output))
actual_rcu, actual_wcu = test_instance._GetThroughput()
self.assertEqual(actual_rcu, 5)
self.assertEqual(actual_wcu, 0)
def testTagResourceFailsWithNonExistentResource(self):
test_instance = GetTestDynamoDBInstance()
# Mark instance as non-existing.
self.enter_context(
mock.patch.object(test_instance, '_Exists', return_value=False))
with self.assertRaises(errors.Resource.CreationError):
test_instance._GetTagResourceCommand(['test', 'tag'])
def testUpdateWithDefaultTags(self):
test_instance = GetTestDynamoDBInstance()
test_instance.resource_arn = 'test_arn'
cmd = self.enter_context(mock.patch.object(util, 'IssueRetryableCommand'))
# Mark instance as existing.
self.enter_context(
mock.patch.object(test_instance, '_Exists', return_value=True))
test_instance.UpdateWithDefaultTags()
self.assertArgumentInCommand(cmd, '--region us-east-1')
self.assertArgumentInCommand(cmd, '--resource-arn test_arn')
def testUpdateTimeout(self):
test_instance = GetTestDynamoDBInstance()
test_instance.resource_arn = 'test_arn'
# Mock the aws util tags function.
self.enter_context(
mock.patch.object(
util,
'MakeDefaultTags',
autospec=True,
return_value={'timeout_utc': 60}))
# Mock the actual call to the CLI
cmd = self.enter_context(mock.patch.object(util, 'IssueRetryableCommand'))
# Mark instance as existing.
self.enter_context(
mock.patch.object(test_instance, '_Exists', return_value=True))
test_instance.UpdateTimeout(timeout_minutes=60)
self.assertArgumentInCommand(cmd, '--tags Key=timeout_utc,Value=60')
@parameterized.named_parameters(
{
'testcase_name': 'OnlyRcu',
'rcu': 5,
'wcu': 500,
}, {
'testcase_name': 'OnlyWcu',
'rcu': 500,
'wcu': 5,
}, {
'testcase_name': 'Both',
'rcu': 500,
'wcu': 500,
})
def testFreezeLowersThroughputToFreeTier(self, rcu, wcu):
test_instance = GetTestDynamoDBInstance()
self.enter_context(
mock.patch.object(
test_instance, '_GetThroughput', return_value=(rcu, wcu)))
mock_set_throughput = self.enter_context(
mock.patch.object(test_instance, 'SetThroughput', autospec=True))
test_instance._Freeze()
mock_set_throughput.assert_called_once_with(
rcu=aws_dynamodb._FREE_TIER_RCU, wcu=aws_dynamodb._FREE_TIER_WCU)
def testFreezeDoesNotLowerThroughputIfAlreadyAtFreeTier(self):
test_instance = GetTestDynamoDBInstance()
self.enter_context(
mock.patch.object(test_instance, '_GetThroughput', return_value=(5, 5)))
mock_set_throughput = self.enter_context(
mock.patch.object(test_instance, 'SetThroughput', autospec=True))
test_instance._Freeze()
mock_set_throughput.assert_not_called()
def testRestoreSetsThroughputBackToOriginalLevels(self):
test_instance = GetTestDynamoDBInstance()
test_instance.rcu = 5000
test_instance.wcu = 1000
mock_set_throughput = self.enter_context(
mock.patch.object(test_instance, 'SetThroughput', autospec=True))
test_instance._Restore()
mock_set_throughput.assert_called_once_with(
rcu=5000, wcu=1000)
if __name__ == '__main__':
unittest.main()
| testExists |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::encrypted::{EncNetworkAddress, Key, KeyVersion};
use diem_crypto::{
traits::{CryptoMaterialError, ValidCryptoMaterialStringExt},
x25519,
};
use move_core_types::account_address::AccountAddress;
#[cfg(any(test, feature = "fuzzing"))]
use proptest::{collection::vec, prelude::*};
#[cfg(any(test, feature = "fuzzing"))]
use proptest_derive::Arbitrary;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use std::{
convert::{Into, TryFrom},
fmt,
iter::IntoIterator,
net::{self, IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, ToSocketAddrs},
num,
str::FromStr,
string::ToString,
};
use thiserror::Error;
pub mod encrypted;
const MAX_DNS_NAME_SIZE: usize = 255;
/// ## Overview
///
/// Diem `NetworkAddress` is a compact, efficient, self-describing and
/// future-proof network address represented as a stack of protocols. Essentially
/// libp2p's [multiaddr] but using [`bcs`] to describe the binary format.
///
/// Most validators will advertise a network address like:
///
/// `/dns/example.com/tcp/6180/ln-noise-ik/<x25519-pubkey>/ln-handshake/1`
///
/// Unpacking, the above effectively means:
///
/// 1. Resolve the DNS name "example.com" to an ip address, `addr`.
/// 2. Open a TCP connection to `(addr, 6180)`.
/// 3. Perform a Noise IK handshake and assume the peer's static pubkey is
/// `<x25519-pubkey>`. After this step, we will have a secure, authenticated
/// connection with the peer.
/// 4. Perform a DiemNet version negotiation handshake (version 1).
///
/// ## Self-describing, Upgradable
///
/// One key concept behind `NetworkAddress` is that it is fully self-describing,
/// which allows us to easily "pre-negotiate" protocols while also allowing for
/// future upgrades. For example, it is generally unsafe to negotiate a secure
/// transport in-band. Instead, with `NetworkAddress` we can advertise (via
/// discovery) the specific secure transport protocol and public key that we
/// support (and even advertise multiple incompatible versions). When a peer
/// wishes to establish a connection with us, they already know which secure
/// transport protocol to use; in this sense, the secure transport protocol is
/// "pre-negotiated" by the dialier selecting which advertised protocol to use.
///
/// Each network address is encoded with the length of the encoded `NetworkAddress`
/// and then the serialized protocol slices to allow for transparent upgradeability.
/// For example, if the current software cannot decode a `NetworkAddress` within
/// a `Vec<NetworkAddress>` it can still decode the underlying `Vec<u8>` and
/// retrieve the remaining `Vec<NetworkAddress>`.
///
/// ## Transport
///
/// In addition, `NetworkAddress` is integrated with the DiemNet concept of a
/// [`Transport`], which takes a `NetworkAddress` when dialing and peels off
/// [`Protocol`]s to establish a connection and perform initial handshakes.
/// Similarly, the [`Transport`] takes `NetworkAddress` to listen on, which tells
/// it what protocols to expect on the socket.
///
/// ## Example
///
/// An example of a serialized `NetworkAddress`:
///
/// ```rust
/// // human-readable format:
/// //
/// // "/ip4/10.0.0.16/tcp/80"
/// //
/// // serialized NetworkAddress:
/// //
/// // [ 09 02 00 0a 00 00 10 05 80 00 ]
/// // \ \ \ \ \ \
/// // \ \ \ \ \ '-- u16 tcp port
/// // \ \ \ \ '-- uvarint protocol id for /tcp
/// // \ \ \ '-- u32 ipv4 address
/// // \ \ '-- uvarint protocol id for /ip4
/// // \ '-- uvarint number of protocols
/// // '-- length of encoded network address
///
/// use diem_network_address::NetworkAddress;
/// use bcs;
/// use std::{str::FromStr, convert::TryFrom};
///
/// let addr = NetworkAddress::from_str("/ip4/10.0.0.16/tcp/80").unwrap();
/// let actual_ser_addr = bcs::to_bytes(&addr).unwrap();
///
/// let expected_ser_addr: Vec<u8> = [9, 2, 0, 10, 0, 0, 16, 5, 80, 0].to_vec();
///
/// assert_eq!(expected_ser_addr, actual_ser_addr);
/// ```
///
/// [multiaddr]: https://multiformats.io/multiaddr/
/// [`Transport`]: ../netcore/transport/trait.Transport.html
#[derive(Clone, Eq, Hash, PartialEq)]
pub struct NetworkAddress(Vec<Protocol>);
/// A single protocol in the [`NetworkAddress`] protocol stack.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
#[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))]
pub enum Protocol {
Ip4(Ipv4Addr),
Ip6(Ipv6Addr),
Dns(DnsName),
Dns4(DnsName),
Dns6(DnsName),
Tcp(u16),
Memory(u16),
// human-readable x25519::PublicKey is lower-case hex encoded
NoiseIK(x25519::PublicKey),
// TODO(philiphayes): use actual handshake::MessagingProtocolVersion. we
// probably need to move network wire into its own crate to avoid circular
// dependency b/w network and types.
Handshake(u8),
}
/// A minimally parsed DNS name. We don't really do any checking other than
/// enforcing:
///
/// 1. it is not an empty string
/// 2. it is not larger than 255 bytes
/// 3. it does not contain any forward slash '/' characters
///
/// From the [DNS name syntax RFC](https://tools.ietf.org/html/rfc2181#page-13),
/// the standard rules are:
///
/// 1. the total size <= 255 bytes
/// 2. each label <= 63 bytes
/// 3. any binary string is valid
///
/// So the restrictions we're adding are (1) no '/' characters and (2) the name
/// is a valid unicode string. We do this because '/' characters are already our
/// protocol delimiter and Rust's [`std::net::ToSocketAddrs`] API requires a
/// `&str`.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)]
pub struct DnsName(String);
/// Possible errors when parsing a human-readable [`NetworkAddress`].
#[derive(Error, Debug)]
pub enum ParseError {
#[error("unknown protocol type: '{0}'")]
UnknownProtocolType(String),
#[error("unexpected end of protocol string")]
UnexpectedEnd,
#[error("error parsing ip4/ip6 address: {0}")]
ParseAddrError(#[from] net::AddrParseError),
#[error("error parsing integer: {0}")]
ParseIntError(#[from] num::ParseIntError),
#[error("error parsing x25519 public key: {0}")]
ParseX25519PubkeyError(#[from] CryptoMaterialError),
#[error("network address cannot be empty")]
EmptyProtocolString,
#[error("protocol string must start with '/'")]
InvalidProtocolString,
#[error("dns name cannot be empty")]
EmptyDnsNameString,
#[error("dns name cannot contain '/' characters")]
InvalidDnsNameCharacter,
#[error("dns name is too long: len: {0} bytes, max len: 255 bytes")]
DnsNameTooLong(usize),
#[error("error decrypting network address")]
DecryptError,
#[error("bcs error: {0}")]
BCSError(#[from] bcs::Error),
}
#[derive(Error, Debug)]
#[error("network address cannot be empty")]
pub struct EmptyError;
////////////////////
// NetworkAddress //
////////////////////
impl NetworkAddress {
fn new(protocols: Vec<Protocol>) -> Self {
Self(protocols)
}
pub fn as_slice(&self) -> &[Protocol] {
self.0.as_slice()
}
pub fn push(mut self, proto: Protocol) -> Self {
self.0.push(proto);
self
}
pub fn extend_from_slice(mut self, protos: &[Protocol]) -> Self {
self.0.extend_from_slice(protos);
self
}
/// See [`EncNetworkAddress::encrypt`].
pub fn encrypt(
self,
shared_val_netaddr_key: &Key,
key_version: KeyVersion,
account: &AccountAddress,
seq_num: u64,
addr_idx: u32,
) -> Result<EncNetworkAddress, ParseError> {
EncNetworkAddress::encrypt(
self,
shared_val_netaddr_key,
key_version,
account,
seq_num,
addr_idx,
)
}
/// Given a base `NetworkAddress`, append production protocols and
/// return the modified `NetworkAddress`.
///
/// ### Example
///
/// ```rust
/// use diem_crypto::{traits::ValidCryptoMaterialStringExt, x25519};
/// use diem_network_address::NetworkAddress;
/// use std::str::FromStr;
///
/// let pubkey_str = "080e287879c918794170e258bfaddd75acac5b3e350419044655e4983a487120";
/// let pubkey = x25519::PublicKey::from_encoded_string(pubkey_str).unwrap();
/// let addr = NetworkAddress::from_str("/dns/example.com/tcp/6180").unwrap();
/// let addr = addr.append_prod_protos(pubkey, 0);
/// assert_eq!(
/// addr.to_string(),
/// "/dns/example.com/tcp/6180/ln-noise-ik/080e287879c918794170e258bfaddd75acac5b3e350419044655e4983a487120/ln-handshake/0",
/// );
/// ```
// TODO(philiphayes): use handshake version enum
pub fn append_prod_protos(
self,
network_pubkey: x25519::PublicKey,
handshake_version: u8,
) -> Self {
self.push(Protocol::NoiseIK(network_pubkey))
.push(Protocol::Handshake(handshake_version))
}
/// Check that a `NetworkAddress` looks like a typical DiemNet address with
/// associated protocols.
///
/// "typical" DiemNet addresses begin with a transport protocol:
///
/// `"/ip4/<addr>/tcp/<port>"` or
/// `"/ip6/<addr>/tcp/<port>"` or
/// `"/dns4/<domain>/tcp/<port>"` or
/// `"/dns6/<domain>/tcp/<port>"` or
/// `"/dns/<domain>/tcp/<port>"` or
/// cfg!(test) `"/memory/<port>"`
///
/// followed by transport upgrade handshake protocols:
///
/// `"/ln-noise-ik/<pubkey>/ln-handshake/<version>"`
///
/// ### Example
///
/// ```rust
/// use diem_network_address::NetworkAddress;
/// use std::str::FromStr;
///
/// let addr_str = "/ip4/1.2.3.4/tcp/6180/ln-noise-ik/080e287879c918794170e258bfaddd75acac5b3e350419044655e4983a487120/ln-handshake/0";
/// let addr = NetworkAddress::from_str(addr_str).unwrap();
/// assert!(addr.is_diemnet_addr());
/// ```
pub fn is_diemnet_addr(&self) -> bool {
parse_diemnet_protos(self.as_slice()).is_some()
}
/// Retrieves the IP address from the network address
pub fn find_ip_addr(&self) -> Option<IpAddr> {
self.0.iter().find_map(|proto| match proto {
Protocol::Ip4(addr) => Some(IpAddr::V4(*addr)),
Protocol::Ip6(addr) => Some(IpAddr::V6(*addr)),
_ => None,
})
}
/// A temporary, hacky function to parse out the first `/ln-noise-ik/<pubkey>` from
/// a `NetworkAddress`. We can remove this soon, when we move to the interim
/// "monolithic" transport model.
pub fn find_noise_proto(&self) -> Option<x25519::PublicKey> {
self.0.iter().find_map(|proto| match proto {
Protocol::NoiseIK(pubkey) => Some(*pubkey),
_ => None,
})
}
/// A function to rotate public keys for `NoiseIK` protocols
pub fn rotate_noise_public_key(
&mut self,
to_replace: &x25519::PublicKey,
new_public_key: &x25519::PublicKey,
) {
for protocol in self.0.iter_mut() {
// Replace the public key in any Noise protocols that match the key
if let Protocol::NoiseIK(public_key) = protocol {
if public_key == to_replace {
*protocol = Protocol::NoiseIK(*new_public_key);
}
}
}
}
#[cfg(any(test, feature = "fuzzing"))]
pub fn mock() -> Self {
NetworkAddress::new(vec![Protocol::Memory(1234)])
}
}
impl IntoIterator for NetworkAddress {
type Item = Protocol;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl FromStr for NetworkAddress {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.is_empty() {
return Err(ParseError::EmptyProtocolString);
}
let mut protocols = Vec::new();
let mut parts_iter = s.split('/');
// the first character must be '/'
if parts_iter.next() != Some("") {
return Err(ParseError::InvalidProtocolString);
}
// parse all `Protocol`s
while let Some(protocol_type) = parts_iter.next() {
protocols.push(Protocol::parse(protocol_type, &mut parts_iter)?);
}
Ok(NetworkAddress::new(protocols))
}
}
impl ToSocketAddrs for NetworkAddress {
type Iter = std::vec::IntoIter<SocketAddr>;
fn to_socket_addrs(&self) -> Result<Self::Iter, std::io::Error> {
if let Some(((ipaddr, port), _)) = parse_ip_tcp(self.as_slice()) {
Ok(vec![SocketAddr::new(ipaddr, port)].into_iter())
} else if let Some(((ip_filter, dns_name, port), _)) = parse_dns_tcp(self.as_slice()) {
format!("{}:{}", dns_name, port).to_socket_addrs().map(|v| {
v.filter(|addr| ip_filter.matches(addr.ip()))
.collect::<Vec<_>>()
.into_iter()
})
} else {
Ok(vec![].into_iter())
}
}
}
impl TryFrom<Vec<Protocol>> for NetworkAddress {
type Error = EmptyError;
fn try_from(value: Vec<Protocol>) -> Result<Self, Self::Error> {
if value.is_empty() {
Err(EmptyError)
} else {
Ok(NetworkAddress::new(value))
}
}
}
impl From<Protocol> for NetworkAddress {
fn from(proto: Protocol) -> NetworkAddress {
NetworkAddress::new(vec![proto])
}
}
impl From<SocketAddr> for NetworkAddress {
fn from(sockaddr: SocketAddr) -> NetworkAddress {
let ip_proto = Protocol::from(sockaddr.ip());
let tcp_proto = Protocol::Tcp(sockaddr.port());
NetworkAddress::new(vec![ip_proto, tcp_proto])
}
}
impl fmt::Display for NetworkAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for protocol in self.0.iter() {
protocol.fmt(f)?;
}
Ok(())
}
}
impl fmt::Debug for NetworkAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl Serialize for NetworkAddress {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if serializer.is_human_readable() {
serializer.serialize_str(&self.to_string())
} else {
#[derive(Serialize)]
#[serde(rename = "NetworkAddress")]
struct Wrapper<'a>(#[serde(with = "serde_bytes")] &'a [u8]);
bcs::to_bytes(&self.as_slice())
.map_err(serde::ser::Error::custom)
.and_then(|v| Wrapper(&v).serialize(serializer))
}
}
}
impl<'de> Deserialize<'de> for NetworkAddress {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
if deserializer.is_human_readable() {
let s = <String>::deserialize(deserializer)?;
NetworkAddress::from_str(s.as_str()).map_err(de::Error::custom)
} else {
#[derive(Deserialize)]
#[serde(rename = "NetworkAddress")]
struct Wrapper(#[serde(with = "serde_bytes")] Vec<u8>);
Wrapper::deserialize(deserializer)
.and_then(|v| bcs::from_bytes(&v.0).map_err(de::Error::custom))
.and_then(|v: Vec<Protocol>| NetworkAddress::try_from(v).map_err(de::Error::custom))
}
}
}
#[cfg(any(test, feature = "fuzzing"))]
impl Arbitrary for NetworkAddress {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
vec(any::<Protocol>(), 1..10)
.prop_map(NetworkAddress::new)
.boxed()
}
}
#[cfg(any(test, feature = "fuzzing"))]
pub fn arb_diemnet_addr() -> impl Strategy<Value = NetworkAddress> {
let arb_transport_protos = prop_oneof![
any::<u16>().prop_map(|port| vec![Protocol::Memory(port)]),
any::<(Ipv4Addr, u16)>()
.prop_map(|(addr, port)| vec![Protocol::Ip4(addr), Protocol::Tcp(port)]),
any::<(Ipv6Addr, u16)>()
.prop_map(|(addr, port)| vec![Protocol::Ip6(addr), Protocol::Tcp(port)]),
any::<(DnsName, u16)>()
.prop_map(|(name, port)| vec![Protocol::Dns(name), Protocol::Tcp(port)]),
any::<(DnsName, u16)>()
.prop_map(|(name, port)| vec![Protocol::Dns4(name), Protocol::Tcp(port)]),
any::<(DnsName, u16)>()
.prop_map(|(name, port)| vec![Protocol::Dns6(name), Protocol::Tcp(port)]),
];
let arb_diemnet_protos = any::<(x25519::PublicKey, u8)>()
.prop_map(|(pubkey, hs)| vec![Protocol::NoiseIK(pubkey), Protocol::Handshake(hs)]);
(arb_transport_protos, arb_diemnet_protos).prop_map(
|(mut transport_protos, mut diemnet_protos)| {
transport_protos.append(&mut diemnet_protos);
NetworkAddress::new(transport_protos)
},
)
}
//////////////
// Protocol //
//////////////
impl fmt::Display for Protocol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use self::Protocol::*;
match self {
Ip4(addr) => write!(f, "/ip4/{}", addr),
Ip6(addr) => write!(f, "/ip6/{}", addr),
Dns(domain) => write!(f, "/dns/{}", domain),
Dns4(domain) => write!(f, "/dns4/{}", domain),
Dns6(domain) => write!(f, "/dns6/{}", domain),
Tcp(port) => write!(f, "/tcp/{}", port),
Memory(port) => write!(f, "/memory/{}", port),
NoiseIK(pubkey) => write!(
f,
"/ln-noise-ik/{}",
pubkey
.to_encoded_string()
.expect("ValidCryptoMaterialStringExt::to_encoded_string is infallible")
),
Handshake(version) => write!(f, "/ln-handshake/{}", version),
}
}
}
fn parse_one<'a, T>(args: &mut impl Iterator<Item = &'a str>) -> Result<T, ParseError>
where
T: FromStr,
T::Err: Into<ParseError>,
{
let next_arg = args.next().ok_or(ParseError::UnexpectedEnd)?;
next_arg.parse().map_err(Into::into)
}
impl Protocol {
fn parse<'a>(
protocol_type: &str,
args: &mut impl Iterator<Item = &'a str>,
) -> Result<Protocol, ParseError> {
let protocol = match protocol_type {
"ip4" => Protocol::Ip4(parse_one(args)?),
"ip6" => Protocol::Ip6(parse_one(args)?),
"dns" => Protocol::Dns(parse_one(args)?),
"dns4" => Protocol::Dns4(parse_one(args)?),
"dns6" => Protocol::Dns6(parse_one(args)?),
"tcp" => Protocol::Tcp(parse_one(args)?),
"memory" => Protocol::Memory(parse_one(args)?),
"ln-noise-ik" => Protocol::NoiseIK(x25519::PublicKey::from_encoded_string(
args.next().ok_or(ParseError::UnexpectedEnd)?,
)?),
"ln-handshake" => Protocol::Handshake(parse_one(args)?),
unknown => return Err(ParseError::UnknownProtocolType(unknown.to_string())),
};
Ok(protocol)
}
}
impl From<IpAddr> for Protocol {
fn from(addr: IpAddr) -> Protocol {
match addr {
IpAddr::V4(addr) => Protocol::Ip4(addr),
IpAddr::V6(addr) => Protocol::Ip6(addr),
}
}
}
/////////////
// DnsName //
/////////////
impl DnsName {
fn validate(s: &str) -> Result<(), ParseError> {
if s.is_empty() {
Err(ParseError::EmptyDnsNameString)
} else if s.as_bytes().len() > MAX_DNS_NAME_SIZE {
Err(ParseError::DnsNameTooLong(s.as_bytes().len()))
} else if s.contains('/') {
Err(ParseError::InvalidDnsNameCharacter)
} else {
Ok(())
}
}
}
impl Into<String> for DnsName {
fn into(self) -> String {
self.0
}
}
impl AsRef<str> for DnsName {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl TryFrom<String> for DnsName {
type Error = ParseError;
fn try_from(s: String) -> Result<Self, Self::Error> {
DnsName::validate(s.as_str()).map(|_| DnsName(s))
}
}
impl FromStr for DnsName {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
DnsName::validate(s).map(|_| DnsName(s.to_owned()))
}
}
impl fmt::Display for DnsName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<'de> Deserialize<'de> for DnsName {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(rename = "DnsName")]
struct DeserializeWrapper(String);
let wrapper = DeserializeWrapper::deserialize(deserializer)?;
let name = DnsName::try_from(wrapper.0).map_err(de::Error::custom)?;
Ok(name)
}
}
#[cfg(any(test, feature = "fuzzing"))]
impl Arbitrary for DnsName {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
// generate arbitrary unicode strings
// + without '/'
// + without control characters (so we can print them easily)
// + between 1-255 bytes in length
r"[^/\pC]{1,255}"
// need this filter b/c the number of unicode characters does not
// necessarily equal the number of bytes.
.prop_filter_map("string too long", |s| {
if s.as_bytes().len() > MAX_DNS_NAME_SIZE {
None
} else {
Some(DnsName(s))
}
})
.boxed()
}
}
/////////////
// Parsing //
/////////////
/// parse the `&[Protocol]` into the `"/memory/<port>"` prefix and unparsed
/// `&[Protocol]` suffix.
pub fn parse_memory(protos: &[Protocol]) -> Option<(u16, &[Protocol])> {
protos
.split_first()
.and_then(|(first, suffix)| match first {
Protocol::Memory(port) => Some((*port, suffix)),
_ => None,
})
}
/// parse the `&[Protocol]` into the `"/ip4/<addr>/tcp/<port>"` or
/// `"/ip6/<addr>/tcp/<port>"` prefix and unparsed `&[Protocol]` suffix.
pub fn parse_ip_tcp(protos: &[Protocol]) -> Option<((IpAddr, u16), &[Protocol])> {
use Protocol::*;
if protos.len() < 2 {
return None;
}
let (prefix, suffix) = protos.split_at(2);
match prefix {
[Ip4(ip), Tcp(port)] => Some(((IpAddr::V4(*ip), *port), suffix)),
[Ip6(ip), Tcp(port)] => Some(((IpAddr::V6(*ip), *port), suffix)),
_ => None,
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum IpFilter {
Any,
OnlyIp4,
OnlyIp6,
}
impl IpFilter {
pub fn matches(&self, ipaddr: IpAddr) -> bool {
match self {
IpFilter::Any => true,
IpFilter::OnlyIp4 => ipaddr.is_ipv4(),
IpFilter::OnlyIp6 => ipaddr.is_ipv6(),
}
}
}
/// parse the `&[Protocol]` into the `"/dns/<domain>/tcp/<port>"`,
/// `"/dns4/<domain>/tcp/<port>"`, or `"/dns6/<domain>/tcp/<port>"` prefix and
/// unparsed `&[Protocol]` suffix.
pub fn parse_dns_tcp(protos: &[Protocol]) -> Option<((IpFilter, &DnsName, u16), &[Protocol])> |
pub fn parse_tcp(protos: &[Protocol]) -> Option<((String, u16), &[Protocol])> {
use Protocol::*;
if protos.len() < 2 {
return None;
}
let (prefix, suffix) = protos.split_at(2);
match prefix {
[Ip4(ip), Tcp(port)] => Some(((ip.to_string(), *port), suffix)),
[Ip6(ip), Tcp(port)] => Some(((ip.to_string(), *port), suffix)),
[Dns(name), Tcp(port)] => Some(((name.to_string(), *port), suffix)),
[Dns4(name), Tcp(port)] => Some(((name.to_string(), *port), suffix)),
[Dns6(name), Tcp(port)] => Some(((name.to_string(), *port), suffix)),
_ => None,
}
}
/// parse the `&[Protocol]` into the `"/ln-noise-ik/<pubkey>"` prefix and
/// unparsed `&[Protocol]` suffix.
pub fn parse_noise_ik(protos: &[Protocol]) -> Option<(&x25519::PublicKey, &[Protocol])> {
match protos.split_first() {
Some((Protocol::NoiseIK(pubkey), suffix)) => Some((pubkey, suffix)),
_ => None,
}
}
/// parse the `&[Protocol]` into the `"/ln-handshake/<version>"` prefix and
/// unparsed `&[Protocol]` suffix.
pub fn parse_handshake(protos: &[Protocol]) -> Option<(u8, &[Protocol])> {
match protos.split_first() {
Some((Protocol::Handshake(version), suffix)) => Some((*version, suffix)),
_ => None,
}
}
/// parse canonical diemnet protocols
///
/// See: [`NetworkAddress::is_diemnet_addr`]
fn parse_diemnet_protos(protos: &[Protocol]) -> Option<&[Protocol]> {
// parse base transport layer
// ---
// parse_ip_tcp
// <or> parse_dns_tcp
// <or> cfg!(test) parse_memory
let transport_suffix = parse_ip_tcp(protos)
.map(|x| x.1)
.or_else(|| parse_dns_tcp(protos).map(|x| x.1))
.or_else(|| {
if cfg!(test) {
parse_memory(protos).map(|x| x.1)
} else {
None
}
})?;
// parse authentication layer
// ---
// parse_noise_ik
let auth_suffix = parse_noise_ik(transport_suffix).map(|x| x.1)?;
// parse handshake layer
let handshake_suffix = parse_handshake(auth_suffix).map(|x| x.1)?;
// ensure no trailing protos after handshake
if handshake_suffix.is_empty() {
Some(protos)
} else {
None
}
}
///////////
// Tests //
///////////
#[cfg(test)]
mod test {
use super::*;
use anyhow::format_err;
use bcs::test_helpers::assert_canonical_encode_decode;
#[test]
fn test_network_address_display() {
use super::Protocol::*;
let addr = NetworkAddress::new(vec![Memory(1234), Handshake(0)]);
assert_eq!("/memory/1234/ln-handshake/0", addr.to_string());
}
#[test]
fn test_network_address_parse_success() {
use super::Protocol::*;
let pubkey_str = "080e287879c918794170e258bfaddd75acac5b3e350419044655e4983a487120";
let pubkey = x25519::PublicKey::from_encoded_string(pubkey_str).unwrap();
let noise_addr_str = format!(
"/dns/example.com/tcp/1234/ln-noise-ik/{}/ln-handshake/5",
pubkey_str
);
let test_cases = [
(
"/memory/1234/ln-handshake/0",
vec![Memory(1234), Handshake(0)],
),
(
"/ip4/12.34.56.78/tcp/1234/ln-handshake/123",
vec![
Ip4(Ipv4Addr::new(12, 34, 56, 78)),
Tcp(1234),
Handshake(123),
],
),
(
"/ip6/::1/tcp/0",
vec![Ip6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), Tcp(0)],
),
(
"/ip6/dead:beef::c0de/tcp/8080",
vec![
Ip6(Ipv6Addr::new(0xdead, 0xbeef, 0, 0, 0, 0, 0, 0xc0de)),
Tcp(8080),
],
),
(
"/dns/example.com/tcp/80",
vec![Dns(DnsName("example.com".to_owned())), Tcp(80)],
),
(
&noise_addr_str,
vec![
Dns(DnsName("example.com".to_owned())),
Tcp(1234),
NoiseIK(pubkey),
Handshake(5),
],
),
];
for (addr_str, expected_address) in &test_cases {
let actual_address = NetworkAddress::from_str(addr_str)
.map_err(|err| format_err!("failed to parse: input: '{}', err: {}", addr_str, err))
.unwrap();
let expected_address = NetworkAddress::new(expected_address.clone());
assert_eq!(actual_address, expected_address);
}
}
#[test]
fn test_network_address_parse_fail() {
let test_cases = [
"",
"/",
"/foobar",
"/tcp",
"tcp/1234",
"/tcp/1234/",
"/tcp/1234/foobar/5",
"/tcp/99999",
"/ip4/1.1.1",
"/ip4/1.1.1.1.",
"/ip4/1.1.1.1.1",
"/ip4/1.1.1.999.1",
];
for &addr_str in &test_cases {
if let Ok(addr) = NetworkAddress::from_str(addr_str) {
panic!(
"parsing should fail: input: '{}', output: '{}'",
addr_str, addr
);
}
}
}
#[test]
fn test_parse_memory() {
let addr = NetworkAddress::from_str("/memory/123").unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_memory(addr.as_slice()).unwrap(),
(123, expected_suffix)
);
let addr = NetworkAddress::from_str("/memory/123/tcp/999").unwrap();
let expected_suffix: &[Protocol] = &[Protocol::Tcp(999)];
assert_eq!(
parse_memory(addr.as_slice()).unwrap(),
(123, expected_suffix)
);
let addr = NetworkAddress::from_str("/tcp/999/memory/123").unwrap();
assert_eq!(None, parse_memory(addr.as_slice()));
}
#[test]
fn test_parse_ip_tcp() {
let addr = NetworkAddress::from_str("/ip4/1.2.3.4/tcp/123").unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_ip_tcp(addr.as_slice()).unwrap(),
((IpAddr::from_str("1.2.3.4").unwrap(), 123), expected_suffix)
);
let addr = NetworkAddress::from_str("/ip6/::1/tcp/123").unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_ip_tcp(addr.as_slice()).unwrap(),
((IpAddr::from_str("::1").unwrap(), 123), expected_suffix)
);
let addr = NetworkAddress::from_str("/ip6/::1/tcp/123/memory/999").unwrap();
let expected_suffix: &[Protocol] = &[Protocol::Memory(999)];
assert_eq!(
parse_ip_tcp(addr.as_slice()).unwrap(),
((IpAddr::from_str("::1").unwrap(), 123), expected_suffix)
);
let addr = NetworkAddress::from_str("/tcp/999/memory/123").unwrap();
assert_eq!(None, parse_ip_tcp(addr.as_slice()));
}
#[test]
fn test_parse_dns_tcp() {
let dns_name = DnsName::from_str("example.com").unwrap();
let addr = NetworkAddress::from_str("/dns/example.com/tcp/123").unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_dns_tcp(addr.as_slice()).unwrap(),
((IpFilter::Any, &dns_name, 123), expected_suffix)
);
let addr = NetworkAddress::from_str("/dns4/example.com/tcp/123").unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_dns_tcp(addr.as_slice()).unwrap(),
((IpFilter::OnlyIp4, &dns_name, 123), expected_suffix)
);
let addr = NetworkAddress::from_str("/dns6/example.com/tcp/123").unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_dns_tcp(addr.as_slice()).unwrap(),
((IpFilter::OnlyIp6, &dns_name, 123), expected_suffix)
);
let addr = NetworkAddress::from_str("/dns/example.com/tcp/123/memory/44").unwrap();
let expected_suffix: &[Protocol] = &[Protocol::Memory(44)];
assert_eq!(
parse_dns_tcp(addr.as_slice()).unwrap(),
((IpFilter::Any, &dns_name, 123), expected_suffix)
);
let addr = NetworkAddress::from_str("/tcp/999/memory/123").unwrap();
assert_eq!(None, parse_dns_tcp(addr.as_slice()));
}
#[test]
fn test_parse_noise_ik() {
let pubkey_str = "080e287879c918794170e258bfaddd75acac5b3e350419044655e4983a487120";
let pubkey = x25519::PublicKey::from_encoded_string(pubkey_str).unwrap();
let addr = NetworkAddress::from_str(&format!("/ln-noise-ik/{}", pubkey_str)).unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_noise_ik(addr.as_slice()).unwrap(),
(&pubkey, expected_suffix)
);
let addr =
NetworkAddress::from_str(&format!("/ln-noise-ik/{}/tcp/999", pubkey_str)).unwrap();
let expected_suffix: &[Protocol] = &[Protocol::Tcp(999)];
assert_eq!(
parse_noise_ik(addr.as_slice()).unwrap(),
(&pubkey, expected_suffix)
);
let addr = NetworkAddress::from_str("/tcp/999/memory/123").unwrap();
assert_eq!(None, parse_noise_ik(addr.as_slice()));
}
#[test]
fn test_parse_handshake() {
let addr = NetworkAddress::from_str("/ln-handshake/0").unwrap();
let expected_suffix: &[Protocol] = &[];
assert_eq!(
parse_handshake(addr.as_slice()).unwrap(),
(0, expected_suffix),
);
let addr = NetworkAddress::from_str("/ln-handshake/0/tcp/999").unwrap();
let expected_suffix: &[Protocol] = &[Protocol::Tcp(999)];
assert_eq!(
parse_handshake(addr.as_slice()).unwrap(),
(0, expected_suffix),
);
let addr = NetworkAddress::from_str("/tcp/999/memory/123").unwrap();
assert_eq!(None, parse_handshake(addr.as_slice()));
}
proptest! {
#[test]
fn test_network_address_canonical_serialization(addr in any::<NetworkAddress>()) {
assert_canonical_encode_decode(addr);
}
#[test]
fn test_network_address_display_roundtrip(addr in any::<NetworkAddress>()) {
let addr_str = addr.to_string();
let addr_parsed = NetworkAddress::from_str(&addr_str).unwrap();
assert_eq!(addr, addr_parsed);
}
#[test]
fn test_is_diemnet_addr(addr in arb_diemnet_addr()) {
assert!(addr.is_diemnet_addr(), "addr.is_diemnet_addr() = false; addr: '{}'", addr);
}
#[test]
fn test_is_not_diemnet_addr_with_trailing(
addr in arb_diemnet_addr(),
addr_suffix in any::<NetworkAddress>(),
) {
// A valid DiemNet addr w/ unexpected trailing protocols should not parse.
let addr = addr.extend_from_slice(addr_suffix.as_slice());
assert!(!addr.is_diemnet_addr(), "addr.is_diemnet_addr() = true; addr: '{}'", addr);
}
}
}
| {
use Protocol::*;
if protos.len() < 2 {
return None;
}
let (prefix, suffix) = protos.split_at(2);
match prefix {
[Dns(name), Tcp(port)] => Some(((IpFilter::Any, name, *port), suffix)),
[Dns4(name), Tcp(port)] => Some(((IpFilter::OnlyIp4, name, *port), suffix)),
[Dns6(name), Tcp(port)] => Some(((IpFilter::OnlyIp6, name, *port), suffix)),
_ => None,
}
} |
updatePost.dto.ts | import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty, IsNumber, IsOptional, IsString } from 'class-validator';
export default class | {
@ApiProperty({ required: false })
@IsNumber()
@IsOptional()
id: number;
@ApiProperty({ required: false })
@IsString()
@IsNotEmpty()
@IsOptional()
content: string;
@ApiProperty({ required: false })
@IsString()
@IsNotEmpty()
@IsOptional()
title: string;
}
| UpdatePostDto |
index.ts | export * from './serverGroupReader.service';
export * from './serverGroupWriter.service'; | export * from './configure/common/serverGroupCommandBuilder.service';
export * from './configure/common/serverGroupCommandRegistry.provider';
export * from './templates'; | export * from './details/serverGroupWarningMessage.service';
export * from './metrics/cloudMetrics.read.service'; |
struct.rs | // Structs are a convenient way to wrap up related data into one neatly packaged structure.
// Let's consider the example of a Struct that allows us to package together data about a person.
#[test]
fn our_first_struct() {
struct Person {
name: &'static str,
age: u32,
}
let jim = Person {
name: "Jim",
age: 57,
};
assert_eq!(jim.name, "Jim");
assert_eq!(jim.age, 57);
}
// Let's try another example
#[test]
fn one_more_struct() |
// Structs, like anything in Rust, are immutable by default.
// If we create a mutable instance of a Struct, we can reassign its attributes.
#[test]
fn mutable_structs() {
struct Language {
version: &'static str,
}
let mut rust = Language { version: "1.3.0" };
rust.version = "1.4.0";
assert_eq!(rust.version, "1.4.0");
}
// We can also decide to temporarily allow a Struct to be mutable
#[test]
fn revoking_mutability() {
struct Language {
version: &'static str,
}
let mut rust = Language { version: "1.3.0" };
rust.version = "1.4.0";
assert_eq!(rust.version, "1.4.0");
let rust = rust;
//rust.version = "1.5.0";
assert_eq!(rust.version, "1.4.0");
}
// There may be cases where you want to create a new instance of a Struct
// that is only slightly different from an existing one
#[test]
fn dot_merging() {
struct Account {
holder: &'static str,
account_number: &'static str,
balance: f64,
}
let broke = Account {
holder: "Morgan Stanley",
account_number: "00021948523756312",
balance: 0.00,
};
let rich = Account { balance: 1000000.00, ..broke };
assert_eq!(rich.holder, broke.holder);
assert_eq!(rich.balance, 1000000.00);
}
| {
struct Movie {
title: &'static str,
runtime: u32,
}
let movie = Movie {
title: "Star Wars",
runtime: 121,
};
assert_eq!(movie.title, "Star Wars");
assert_eq!(movie.runtime, 121);
} |
MalformedTimestampHeaderEpochCommand.ts | import { RestJsonProtocolClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../RestJsonProtocolClient";
import { MalformedTimestampHeaderEpochInput } from "../models/models_0";
import {
deserializeAws_restJson1MalformedTimestampHeaderEpochCommand,
serializeAws_restJson1MalformedTimestampHeaderEpochCommand,
} from "../protocols/Aws_restJson1";
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
export interface MalformedTimestampHeaderEpochCommandInput extends MalformedTimestampHeaderEpochInput {}
export interface MalformedTimestampHeaderEpochCommandOutput extends __MetadataBearer {}
export class MalformedTimestampHeaderEpochCommand extends $Command<
MalformedTimestampHeaderEpochCommandInput,
MalformedTimestampHeaderEpochCommandOutput,
RestJsonProtocolClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
| super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: RestJsonProtocolClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<MalformedTimestampHeaderEpochCommandInput, MalformedTimestampHeaderEpochCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "RestJsonProtocolClient";
const commandName = "MalformedTimestampHeaderEpochCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: MalformedTimestampHeaderEpochInput.filterSensitiveLog,
outputFilterSensitiveLog: (output: any) => output,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: MalformedTimestampHeaderEpochCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_restJson1MalformedTimestampHeaderEpochCommand(input, context);
}
private deserialize(
output: __HttpResponse,
context: __SerdeContext
): Promise<MalformedTimestampHeaderEpochCommandOutput> {
return deserializeAws_restJson1MalformedTimestampHeaderEpochCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
} | constructor(readonly input: MalformedTimestampHeaderEpochCommandInput) {
// Start section: command_constructor |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.