file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
test_eval.py
import xml.etree.ElementTree as ET from programy.parser.template.nodes.base import TemplateNode from programy.parser.template.nodes.word import TemplateWordNode from programy.parser.template.nodes.get import TemplateGetNode from programy.parser.template.nodes.eval import TemplateEvalNode from programytest.parser.template.graph_tests.graph_test_client import TemplateGraphTestClient class TemplateGraphEvalTests(TemplateGraphTestClient): def
(self): template = ET.fromstring(""" <template> <eval>Text</eval> </template> """) root = self._graph.parse_template_expression(template) self.assertIsNotNone(root) self.assertIsInstance(root, TemplateNode) self.assertIsNotNone(root.children) self.assertEqual(len(root.children), 1) node = root.children[0] self.assertIsNotNone(node) self.assertIsInstance(node, TemplateEvalNode) def test_eval_node_from_xml_multi_words(self): template = ET.fromstring(""" <template> <eval>Some Text</eval> </template> """) root = self._graph.parse_template_expression(template) self.assertIsNotNone(root) self.assertIsInstance(root, TemplateNode) self.assertIsNotNone(root.children) self.assertEqual(len(root.children), 1) node = root.children[0] self.assertIsNotNone(node) self.assertIsInstance(node, TemplateEvalNode) self.assertEqual(len(node.children), 2) self.assertIsInstance(node.children[0], TemplateWordNode) self.assertEqual(node.children[0].word, "Some") self.assertIsInstance(node.children[1], TemplateWordNode) self.assertEqual(node.children[1].word, "Text") def test_eval_node_from_xml_multi_words(self): template = ET.fromstring(""" <template> <eval>Some <get name="SomeGet" /> Text</eval> </template> """) root = self._graph.parse_template_expression(template) self.assertIsNotNone(root) self.assertIsInstance(root, TemplateNode) self.assertIsNotNone(root.children) self.assertEqual(len(root.children), 1) node = root.children[0] self.assertIsNotNone(node) self.assertIsInstance(node, TemplateEvalNode) self.assertEqual(len(node.children), 3) self.assertIsInstance(node.children[0], TemplateWordNode) self.assertEqual(node.children[0].word, "Some") self.assertIsInstance(node.children[1], TemplateGetNode) self.assertIsInstance(node.children[2], TemplateWordNode) self.assertEqual(node.children[2].word, "Text")
test_eval_node_from_xml_single_word
DeepMSPeptide.py
import warnings warnings.simplefilter(action='ignore', category=FutureWarning) import argparse import numpy as np import tensorflow as tf from tensorflow import keras parser = argparse.ArgumentParser(description='''Predicts the detectability of input peptides using a single dimension Convolutionar Neural Network, based on Tensorflow 1.13.1 Requierements: Tensorflow 1.13.1''') parser.add_argument('infile', metavar='F', type=str, nargs='+', help='File containing the peptides to be predicted, one per line (max length= 81)') args = parser.parse_args() def load_pep_and_codify(file, max_len):
print('Loading model...') model_2_1D = keras.models.load_model('model_2_1D.h5') print('Loading input peptides') predict_data, skipped, lines = load_pep_and_codify(args.infile[0], 81) print('Succesfully loaded {0} peptides and skipped {1}'.format(len(lines), str(skipped))) print('Making predictions') model_2_1D_pred = model_2_1D.predict(predict_data) model_2_1D_pred = np.hstack((np.array(lines).reshape(len(lines), 1),model_2_1D_pred)).tolist() Pred_output = [] for pred in model_2_1D_pred: if float(pred[1]) > 0.5: # pred.extend('0') Pred_output.append([pred[0], str(1-float(pred[1])), '0']) else: Pred_output.append([pred[0], str(1-float(pred[1])), '1']) # pred.extend('1') outFile = '{0}_Predictions.txt'.format(args.infile[0].split('.')[0]) print('Saving predictions to file {}'.format(outFile)) with open(outFile, 'w') as outf: outf.write('Peptide\tProb\tDetectability\n') outf.writelines('\t'.join(i) + '\n' for i in Pred_output)
aa_dict={'A':1,'R':2,'N':3,'D':4,'C':5,'Q':6,'E':7,'G':8,'H':9,'I':10,'L':11,'K':12,'M':13,'F':14, 'P':15,'O':16,'S':17,'U':18,'T':19,'W':20,'Y':21,'V':22} with open(file, 'r') as inf: lines = inf.read().splitlines() pep_codes=[] long_pep_counter = 0 newLines = [] for pep in lines: if not len(pep) > max_len: current_pep=[] for aa in pep: current_pep.append(aa_dict[aa]) pep_codes.append(current_pep) newLines.extend([pep]) else: long_pep_counter += 1 predict_data = keras.preprocessing.sequence.pad_sequences(pep_codes, value=0, padding='post', maxlen=max_len) return predict_data, long_pep_counter, newLines
nat_helpers.py
import re import os import time import logging import json from collections import namedtuple import ptf.mask as mask import ptf.packet as packet import ptf.testutils as testutils from tests.common.errors import RunAnsibleModuleFail from tests.common.helpers.assertions import pytest_assert from jinja2 import Environment, FileSystemLoader from tests.common.config_reload import config_reload BASE_DIR = os.path.dirname(os.path.realpath(__file__)) DUT_TMP_DIR = os.path.join('tmp', os.path.basename(BASE_DIR)) NAT_CONF_J2_TEMPLATE = "templates/create_nat_binding.j2" FILES_DIR = os.path.join(BASE_DIR, 'files') TEMPLATE_DIR = os.path.join(BASE_DIR, 'templates') NAT_GLOBAL_TEMPLATE = 'global_nat_table_config.j2' NAT_STATIC_TEMPLATE = 'static_nat_napt_table_config.j2' ZONES_TEMPLATE = 'nat_zone_table_config.j2' NAT_ADMIN_MODE = "enabled" STATIC_NAT_TABLE_NAME = "STATIC_NAT" STATIC_NAPT_TABLE_NAME = "STATIC_NAPT" ACL_TEMPLATE = 'create_acl_rule.j2' GLOBAL_NAT_TIMEOUT = 300 GLOBAL_UDP_NAPT_TIMEOUT = 120 GLOBAL_TCP_NAPT_TIMEOUT = 300 TCP_GLOBAL_PORT = 3700 UDP_GLOBAL_PORT = 3000 TCP_LOCAL_PORT = 80 UDP_LOCAL_PORT = 161 POOL_RANGE_START_PORT = 5000 POOL_RANGE_END_PORT = 6000 logger = logging.getLogger(__name__) DYNAMIC_POOL_NAME = "test_pool" ACL_TABLE_GLOBAL_NAME = "test_acl_table" DYNAMIC_BINDING_NAME = "test_binding" ACL_SUBNET = "192.168.0.0/24" BR_MAC = ["22:22:22:22:22:21"] VRF = {"red": {"ip": "11.1.0.2", "id": "1", "mask": "30", "gw": "11.1.0.1", "dut_iface": "PortChannel0001", "port_id": {"t0": ["28"], "t0-64": ["0", "1"], "t0-64-32": ["0", "1"] } }, "blue": {"ip": "192.168.0.101", "id": "2", "mask": "24", "gw": "192.168.0.1", "port_id": "6"}, "yellow": {"ip": "192.168.0.201", "id": "3", "mask": "24", "gw": "192.168.0.1", "port_id": "7"} } SETUP_CONF = {"loopback": {"vrf": VRF, "acl_subnet": ACL_SUBNET}, "port_in_lag": {"vrf": VRF, "acl_subnet": ACL_SUBNET} } DIRECTION_PARAMS = ['host-tor', 'leaf-tor'] FULL_CONE_TEST_IP = "172.20.1.2" FULL_CONE_TEST_SUBNET = "172.20.1.0/24" REBOOT_MAP = {'cold': {"timeout": 300}, 'fast': {"timeout": 180}, 'warm': {"timeout": 180}} PTF_NETWORK_DATA = namedtuple('PTF_NETWORK_DATA', ['outer_ports', 'inner_ports', 'eth_dst', 'eth_src', 'ip_src', 'ip_dst', 'public_ip', 'private_ip', 'exp_src_ip', 'exp_dst_ip']) L4_PORTS_DATA = namedtuple('L4_PORTS_DATA', ['src_port', 'dst_port', 'exp_src_port', 'exp_dst_port']) def check_peers_by_ping(duthost): for vrf in VRF: duthost.command("ping {0} -c 5".format(VRF[vrf]['ip'])) def configure_nat_over_cli(duthost, action, nat_type, global_ip, local_ip, proto=None, global_port=None, local_port=None): """ static NAT/NAPT CLI wrapper :param duthost: DUT host object :param action: string rule action :param nat_type: string static nat type :param global_ip: string global IP address value :param local_ip: string local IP address value :param proto: string protocol type :param global_port: string global l4 port :param local_port: string local l4 port :return : dict with rule parameters """ action_type_map = {'add': '-nat_type dnat', 'remove': ''} if nat_type == 'static_nat': duthost.command("sudo config nat {} static basic {} {} {}".format(action, global_ip, local_ip, action_type_map[action])) return { global_ip: {'local_ip': local_ip, 'nat_type': 'dnat'} } elif nat_type == 'static_napt': duthost.command("sudo config nat {} static {} {} {} {} {} {}".format(action, proto.lower(), global_ip, global_port, local_ip, local_port, action_type_map[action])) return { "{}|{}|{}".format(global_ip, proto.upper(), global_port): {'local_ip': local_ip, 'local_port': "{}".format(local_port), 'nat_type': 'dnat' } } return "Unkown NAT type" def nat_statistics(duthost, show=False, clear=False): """ NAT CLI helper which gets or clears NAT statistics :param duthost: DUT host object :param show: bool :param clear: bool :return : formatted CLI output """ if show: output_cli = exec_command(duthost, ["show nat statistics"]) if output_cli["rc"]: raise Exception('Return code is {} not 0'.format(output_cli["rc"])) output = {} entries = output_cli["stdout"].split()[10:] if entries: num_entries = len(entries[::5]) keys = output_cli["stdout"].split()[:5] for num in range(0, num_entries): entry_values = entries[(num * 5):(num * 5) + 5] key = entry_values[1] if entry_values[1] != "---" else entry_values[2] output[key] = {keys[i]: entry_values[i] for i in range(0, len(keys))} return output elif clear: output_cli = exec_command(duthost, ["sudo sonic-clear nat statistics"]) if output_cli["rc"]: raise Exception('Return code is {} not 0'.format(output_cli["rc"])) return output_cli["stdout"].lstrip() return None def dut_nat_iptables_status(duthost): """ NAT CLI helper gets DUT's iptables entries :param duthost: DUT host object :return : dict with nat PREROUTING/POSTROUTING iptables entries """ nat_table_status = {} output_cli = exec_command(duthost, ["sudo iptables -nL -t nat"]) if output_cli["rc"]: raise Exception('Return code is {} not 0'.format(output_cli["rc"])) entries = output_cli["stdout"].split("\n") index_prerouting = [i for i in range(0, len(entries)) if "PREROUTING" in entries[i]][0] + 2 index_input = [i for i in range(0, len(entries)) if "INPUT" in entries[i]][0] index_postrouting = [i for i in range(0, len(entries)) if 'POSTROUTING' in entries[i]][0] + 2 if any(['DOCKER' in entry for entry in entries]): index_docker = [i for i in range(0, len(entries)) if 'DOCKER' in entries[i]][0] postrouting = [el for el in entries[index_postrouting:index_docker] if len(el) > 1] else: postrouting = [el for el in entries[index_postrouting:] if len(el) > 1] prerouting = [el for el in entries[index_prerouting:index_input] if len(el) > 0] nat_table_status["prerouting"] = [" ".join([s.strip() for s in el.split() if len(el) > 0]) for el in prerouting] nat_table_status["postrouting"] = [" ".join([s.strip() for s in el.split() if len(el) > 0]) for el in postrouting] return nat_table_status def dut_interface_status(duthost, interface_name): """ NAT CLI helper gets DUT's interface status :param duthost: DUT host object :param interface_name: string interface to configure :return : string formatted CLI output with interface current operstatus """ return duthost.show_interface(command='status', interfaces=interface_name)['ansible_facts']['int_status'][interface_name]['oper_state'] def dut_interface_control(duthost, action, interface_name, ip_addr=""): """ NAT CLI helper enable/disable DUT's interface :param duthost: DUT host object :param action: string action to configure interface :param interface_name: string interface to configure :return : formatted CLI output with interface current operstatus """ interface_actions = {"disable": "shutdown {}".format(interface_name), "enable": "startup {}".format(interface_name), "ip remove": "{} {}".format(action, ip_addr), "ip add": "{} {}".format(action, ip_addr) } expected_operstatus = {"disable": "down", "enable": "up", "ip remove": "up", "ip add": "up"} output_cli = exec_command(duthost, ["sudo config interface {}".format(interface_actions[action])]) if output_cli["rc"]: raise Exception('Return code is {} not 0'.format(output_cli["rc"])) attempts = 3 current_operstatus = dut_interface_status(duthost, interface_name) while current_operstatus != expected_operstatus[action]: if attempts == 0: break time.sleep(15) current_operstatus = dut_interface_status(duthost, interface_name) attempts -= 1 return current_operstatus def nat_translations(duthost, show=False, clear=False): """ NAT CLI helper which gets or clears NAT translations :param duthost: DUT host object :param show: bool :param clear: bool :return : formatted CLI output """ if show: output_cli = exec_command(duthost, ["show nat translations"]) if output_cli["rc"]: raise Exception('Return code is {} not 0'.format(output_cli["rc"])) output = {} entries = output_cli["stdout"].split('\n')[15:] splited_entries = [] for el in entries: splited_entries.extend(el.split()) if splited_entries: num_entries = len(splited_entries[::5]) keys = [el.strip() for el in output_cli["stdout"].split("\n")[13].split(" ") if el] for num in range(0, num_entries): entry_values = splited_entries[(num * 5):(num * 5) + 5] key = entry_values[1] if entry_values[1] != "---" else entry_values[2] output[key] = {keys[i]: entry_values[i] for i in range(0, len(keys))} return output elif clear: output_cli = exec_command(duthost, ["sudo sonic-clear nat translations"]) if output_cli["rc"]: raise Exception('Return code is {} not 0'.format(output_cli["rc"])) return output_cli["stdout"].lstrip() return None def
(duthost, crud_operation): """ static NAT CLI helper :param duthost: DUT host object :param crud_operation: dict dict with action and rule parameters :return : dict with rule parameters """ nat_type = "static_nat" for key in crud_operation.keys(): output = configure_nat_over_cli(duthost, crud_operation[key]["action"], nat_type, crud_operation[key]["global_ip"], crud_operation[key]["local_ip"]) return output def crud_operations_napt(duthost, crud_operation): """ static NAPT CLI helper :param duthost: DUT host object :param crud_operation: dict dict with action and rule parameters :return : dict with rule parameters """ nat_type = 'static_napt' for key in crud_operation.keys(): output = configure_nat_over_cli(duthost, crud_operation[key]["action"], nat_type, crud_operation[key]["global_ip"], crud_operation[key]["local_ip"], proto=crud_operation[key]["proto"], global_port=crud_operation[key]["global_port"], local_port=crud_operation[key]["local_port"]) return output def exec_command(host, command_list): """ Executes shell commands on host :param host: host object :param command_list: list of commands to execute :return : response from host or exception __str__ """ if len(command_list) == 1: try: response = host.shell(command_list[0]) return response except Exception as e: return e.__str__() else: for command in command_list: exec_command(host, [command]) def nat_zones_config(duthost, setup_info, interface_type): """ generate and deploy NAT zones configuration files :param duthost: DUT host object :param setup_info: dict, setup info fixture :param interface_type: interface type """ # Get inner and outer interfaces from setup info inner_zone_interfaces = setup_info[interface_type]["inner_zone_interfaces"] outer_zone_interfaces = setup_info[interface_type]["outer_zone_interfaces"] for rif in setup_info["dut_rifs_in_topo_t0"]: if rif in inner_zone_interfaces or rif in outer_zone_interfaces: nat_zone_vars = setup_info['interfaces_nat_zone'][rif] # Add zone configuration duthost.command("sudo config nat add interface {0} -nat_zone {1}".format(rif, nat_zone_vars['zone_id'])) # Check that zone was applied show_zones = duthost.command("show nat config zones")['stdout'] zone_id = re.search(r"{}\s+(\d)".format(rif), show_zones).group(1) pytest_assert(str(nat_zone_vars['zone_id']) == zone_id, "NAT zone was not set to {}".format(zone_id)) def get_cli_show_nat_config_output(duthost, command): """ created ditionary with output of show nat command :param duthost: DUT host object :param command: str, command to execute :return: list of dict with output """ return duthost.show_and_parse("show nat config {}".format(command)) def apply_static_nat_config(duthost, ptfadapter, ptfhost, setup_data, network_data, direction, interface_type, nat_type, public_ip, private_ip, protocol_type=None, nat_entry=None, handshake=False): """ generate and deploy static NAT/NAPT configuration files :param duthost: DUT host object :param ptfadapter: ptf adapter fixture :param ptfhost: PTF host object :param setup_info: dict, setup info fixture :param direction: string, traffic's flow direction :param interface_type: interface type :param nat_type: string, static NAT type :param public_ip: IP Address of Internet IP (host-tor) or IP Address of Public Interface (leaf-tor) :param private_ip: IP Address of Local IP (host-tor) or IP Address of Internet IP (leaf-tor) :param nat_entry: static_nat/static_napt :param protocol_type: TCP/UDP """ # Define network data and L4 ports network_data = get_network_data(ptfadapter, setup_data, direction, interface_type, nat_type=nat_type) src_port, dst_port = get_l4_default_ports(protocol_type) global_port = dst_port local_port = src_port if nat_entry != 'static_napt': # Add static basic rule duthost.command("sudo config nat add static basic {0} {1} -nat_type=dnat".format(public_ip, private_ip)) else: # Add static napt rule duthost.command("sudo config nat add static {0} {1} {2} {3} {4} -nat_type=dnat". format(protocol_type.lower(), public_ip, global_port, private_ip, local_port)) # Check that rule was applied static_nat = get_cli_show_nat_config_output(duthost, "static") pytest_assert('dnat' == static_nat[0]['nat type'], "Default NAT type was changed") pytest_assert(public_ip == static_nat[0]['global ip'], "Global IP does not match {}".format(public_ip)) pytest_assert(private_ip == static_nat[0]['local ip'], "Local IP does not match {}".format(private_ip)) if nat_entry == 'static_napt': pytest_assert(protocol_type == static_nat[0]['ip protocol'], "Protocol does not match {}".format(protocol_type)) pytest_assert(str(global_port) == static_nat[0]['global port'], "Global Port does not match {}".format(global_port)) pytest_assert(str(local_port) == static_nat[0]['local port'], "Local Port does not match {}".format(local_port)) else: pytest_assert('all' == static_nat[0]['ip protocol']) nat_zones_config(duthost, setup_data, interface_type) # Perform TCP handshake if handshake: if direction == 'leaf-tor': # set_arp entries check_peers_by_ping(duthost) perform_handshake(ptfhost, setup_data, protocol_type, direction, network_data.ip_dst, dst_port, network_data.ip_src, src_port, network_data.public_ip) def get_src_port(setup_info, direction, interface_type, second_port=False): """ return source port ids based on test case direction and interface_type :param setup_info: setup info fixture :param direction: 'host-tor', 'leaf-tor' :param interface_type: type of interface :param second_port: boolean if second port id needs to be returned :return: source port ids """ if direction == 'host-tor': if second_port: return [setup_info[interface_type]['inner_port_id'][0] + 1] return setup_info[interface_type]['inner_port_id'] return setup_info[interface_type]['outer_port_id'] def get_dst_port(setup_info, direction, interface_type, second_port=False): """ return destination port ids based on test case direction and interface_type :param setup_info: setup info fixture :param direction: 'host-tor', 'leaf-tor' :param interface_type: type of interface :param second_port: boolean if second port id needs to be returned :return: destination port ids """ if direction == 'leaf-tor': if second_port: return [setup_info[interface_type]['inner_port_id'][0] + 1] return setup_info[interface_type]['inner_port_id'] return setup_info[interface_type]['outer_port_id'] def get_src_ip(setup_info, direction, interface_type, nat_type=None, second_port=False): """ return source IP based on test case direction and interface_type :param setup_info: setup info fixture :param direction: 'host-tor', 'leaf-tor' :param interface_type: type of interface :param second_port: boolean if second port's IP settings need to be returned :param nat_type: string nat type :return: source IP """ if direction == 'host-tor' or nat_type == "static_napt": if second_port: return setup_info[interface_type]["second_src_ip"] return setup_info[interface_type]['src_ip'] return setup_info[interface_type]['dst_ip'] def get_dst_ip(setup_info, direction, interface_type, nat_type=None): """ return destination IP based on test case direction and interface_type :param setup_info: setup info fixture :param direction: 'host-tor', 'leaf-tor' :param interface_type: type of interface :param nat_type: string nat type :return: destination IP """ if direction == 'host-tor' or nat_type == "static_napt": return setup_info[interface_type]['dst_ip'] return setup_info[interface_type]['public_ip'] def get_public_ip(setup_info, interface_type): """ return public IP based on test case interface_type :param setup_info: setup info fixture :param interface_type: type of interface :return: public IP """ return setup_info[interface_type]['public_ip'] def setup_ptf_interfaces(testbed, ptfhost, duthost, setup_info, interface_type, vrf_id, vrf_name, port_id, ip_address, mask, gw_ip, key): """ setup ptf interfaces for tests :param testbed: Testbed object :param ptfhost: PTF host object :param duthost: DUT host object :param setup_info: setup info fixture :param interface_type: string interface type :param vrf_id: id of vrf :param vrf_name: vrf name :param port_id: port id of interface :param ip_address: ip address of interface :param mask: vrf mask :param gw_ip: ip address of gateway :param key: dictionary key if vrf configuration """ ptfhost.shell("grep -Fxq '{} {}' /etc/iproute2/rt_tables " "|| echo '{} {}' >> /etc/iproute2/rt_tables".format(vrf_id, vrf_name, vrf_id, vrf_name)) ptfhost.shell("ip link add {} type vrf table {}".format(vrf_name, vrf_id)) ptfhost.shell("ip link set dev {} up".format(vrf_name)) if vrf_name == "red": bond_interface = "bond1" ptfhost.shell("ip link add {} type bond".format(bond_interface)) ptfhost.shell("ip link set {} type bond miimon 100 mode balance-xor".format(bond_interface)) for iface_id in port_id[testbed['topo']['name']]: ptfhost.shell("ip link set eth{} down".format(iface_id)) ptfhost.shell("ip link set eth{} master {}".format(iface_id, bond_interface)) ptfhost.shell("ip link set dev {} up".format(bond_interface)) ptfhost.shell("ifconfig {} hw ether {}".format(bond_interface, BR_MAC[0])) ptfhost.shell("ifconfig {} mtu 9216 up".format(bond_interface)) ptfhost.shell("ip link set {} master {}".format(bond_interface, vrf_name)) ptfhost.shell("ip addr add {}/{} dev {}".format(ip_address, mask, bond_interface)) else: ptfhost.shell("ip link set eth{} master {}".format(port_id, vrf_name)) ptfhost.shell("ip addr add {}/{} dev eth{}".format(ip_address, mask, port_id)) ptfhost.shell("ip rule add iif {} table {}".format(vrf_name, vrf_id)) ptfhost.shell("ip rule add oif {} table {}".format(vrf_name, vrf_id)) ptfhost.shell("ip route add 0.0.0.0/0 via {} table {}".format(gw_ip, vrf_id)) if "dut_iface" in setup_info[interface_type]["vrf_conf"][key].keys(): dut_iface = setup_info[interface_type]["vrf_conf"][key]["dut_iface"] pch_ip = setup_info["pch_ips"][dut_iface] duthost.shell("sudo config interface ip remove {} {}/31".format(dut_iface, pch_ip)) duthost.shell("sudo config interface ip add {} {}/{}".format(dut_iface, gw_ip, mask)) def teardown_ptf_interfaces(testbed, ptfhost, gw_ip, vrf_id, ip_address, mask, port_id, vrf_name): """ teardown ptf interfaces after tests :param testbed: Testbed object :param ptfhost: PTF host object :param gw_ip: ip address of gateway :param vrf_id: id of vrf :param ip_address: ip address of interface :param mask: vrf mask :param port_id: port id of interface :param vrf_name: vrf name """ ptfhost.shell("ip route del 0.0.0.0/0 via {} table {}".format(gw_ip, vrf_id)) if vrf_name == "red": bond_interface = "bond1" ptfhost.shell("ip addr del {}/{} dev {}".format(ip_address, mask, bond_interface)) ptfhost.shell("ip rule del iif {} table {}".format(vrf_name, vrf_id)) ptfhost.shell("ip rule del oif {} table {}".format(vrf_name, vrf_id)) ptfhost.shell("ip link set {} nomaster".format(bond_interface)) for iface_id in port_id[testbed['topo']['name']]: ptfhost.shell("ip link set eth{} nomaster".format(iface_id)) ptfhost.shell("ip link set eth{} up".format(iface_id)) ptfhost.shell("ip link del {}".format(bond_interface)) ptfhost.shell("ip link del {} type vrf table {}".format(vrf_name, vrf_id)) else: ptfhost.shell("ip addr del {}/{} dev eth{}".format(ip_address, mask, port_id)) ptfhost.shell("ip rule del iif {} table {}".format(vrf_name, vrf_id)) ptfhost.shell("ip rule del oif {} table {}".format(vrf_name, vrf_id)) ptfhost.shell("ip link set eth{} nomaster".format(port_id)) ptfhost.shell("ip link del {} type vrf table {}".format(vrf_name, vrf_id)) def conf_ptf_interfaces(testbed, ptfhost, duthost, setup_info, interface_type, teardown=False): """ setup testbed's environment for CT run :param testbed: Testbed object :param ptfhost: PTF host object :param duthost: DUT host object :param setup_info: setup info fixture :param interface_type: string interface type :param teardown: Boolean parameter to remove or not PTF's interfaces config """ if not teardown: ptfhost.script("./scripts/change_mac.sh") for key in setup_info[interface_type]["vrf_conf"]: vrf_id = setup_info[interface_type]["vrf_conf"][key]["id"] vrf_name = key ip_address = setup_info[interface_type]["vrf_conf"][key]["ip"] gw_ip = setup_info[interface_type]["vrf_conf"][key]["gw"] port_id = setup_info[interface_type]["vrf_conf"][key]["port_id"] mask = setup_info[interface_type]["vrf_conf"][key]["mask"] if teardown: teardown_ptf_interfaces(testbed, ptfhost, gw_ip, vrf_id, ip_address, mask, port_id, vrf_name) else: setup_ptf_interfaces(testbed, ptfhost, duthost, setup_info, interface_type, vrf_id, vrf_name, port_id, ip_address, mask, gw_ip, key) if not teardown: ptfhost.shell('supervisorctl restart ptf_nn_agent') def expected_mask_nated_packet(pkt, protocol_type, ip_dst, ip_src, src_port=None, dst_port=None, icmp_id=None): """ Generate expected packet :param pkt: packet to be sent :param protocol_type: protocol type TCP, UDP or ICMP :param ip_src: expected source IP :param ip_dst: expected destination IP :param src_port: source L4 expected port :param dst_port: destination L4 expected port :param icmp_id: id for specify ICMP dynamic connection :return: expected packet """ # Set up all fields exp_pkt = pkt.copy() exp_pkt['IP'].ttl -= 1 exp_pkt['IP'].dst = ip_dst exp_pkt['IP'].src = ip_src if protocol_type in ["TCP", "UDP"]: exp_pkt[protocol_type].sport = src_port exp_pkt[protocol_type].dport = dst_port if protocol_type == "ICMP": exp_pkt[protocol_type].id = icmp_id exp_pkt = mask.Mask(exp_pkt) exp_pkt.set_do_not_care_scapy(packet.Ether, 'dst') exp_pkt.set_do_not_care_scapy(packet.Ether, 'src') exp_pkt.set_do_not_care_scapy(packet.IP, 'chksum') exp_pkt.set_do_not_care_scapy(packet.IP, 'id') return exp_pkt def create_packet(eth_dst, eth_src, ip_dst, ip_src, protocol_type, sport=None, dport=None): """ generate packet to send :param eth_dst: destination Ethernet address :param eth_src: source Ethernet address :param ip_dst: destination IP address :param ip_src: source IP address :param protocol_type: TCP/UDP/ICMP :param sport: source port for UDP/TCP packet :param dport: destination port for UDP/TCP traffic :return: packet based on protocol type """ if protocol_type == "TCP": return testutils.simple_tcp_packet(eth_dst=eth_dst, eth_src=eth_src, ip_dst=ip_dst, ip_src=ip_src, tcp_sport=sport, tcp_dport=dport, ip_ttl=64) elif protocol_type == "UDP": return testutils.simple_udp_packet(eth_dst=eth_dst, eth_src=eth_src, ip_dst=ip_dst, ip_src=ip_src, udp_sport=sport, udp_dport=dport, ip_ttl=64) return testutils.simple_icmp_packet(eth_dst=eth_dst, eth_src=eth_src, ip_dst=ip_dst, ip_src=ip_src, icmp_type=8, icmp_code=0, ip_ttl=64) def teardown_test_env(testbed, duthost, ptfhost, setup_info, interface_type, reboot=False, before_test=False): """ teardown function cleans DUT's config and PTF's interfaces :param duthost: duthost fixture :param ptfhost: ptfhost fixture :param setup_info: setup_info fixture :param interface_type: string interface type :param reboot: if True perform DUT reboot :param before_test: boolean to not clear/clear PTF configuration """ # reset dut to initial T0 configuration if reboot: duthost.command('reboot') else: config_reload(duthost) # wait for dut become stable time.sleep(180) # remove ptf interfaces configuration if not before_test: conf_ptf_interfaces(testbed, ptfhost, duthost, setup_info, interface_type, teardown=True) def get_network_data(ptfadapter, setup_info, direction, interface_type, nat_type=None, second_port=False): """ Gets network data: MACs, IPs, inner/outer ports ids Args: ptfadapter: ptf adapter fixture setup_info: setup_info fixture direction: string with current flow direction interface_type: string interface type nat_type: string with static napt/nat/dynamic types second_port: boolean if second port id needs to be returned """ # Get outer and inner ports outer_ports = get_dst_port(setup_info, direction, interface_type, second_port=second_port) inner_ports = get_src_port(setup_info, direction, interface_type, second_port=second_port) mac_map = {"host-tor": ptfadapter.dataplane.get_mac(0, inner_ports[0]), "leaf-tor": BR_MAC[0]} # Get source and destination IPs for packets to send ip_src = get_src_ip(setup_info, direction, interface_type, nat_type=nat_type, second_port=second_port) ip_dst = get_dst_ip(setup_info, direction, interface_type, nat_type=nat_type) # Define expected source and destination IP based on direction if nat_type == "static_napt" and direction == "leaf-tor": exp_dst_ip = ip_src ip_src = ip_dst ip_dst = setup_info[interface_type]["public_ip"] exp_src_ip = ip_src elif direction == 'host-tor': exp_dst_ip = setup_info[interface_type]["dst_ip"] exp_src_ip = setup_info[interface_type]["public_ip"] else: exp_dst_ip = setup_info[interface_type]["src_ip"] exp_src_ip = setup_info[interface_type]["dst_ip"] if second_port: exp_dst_ip = setup_info[interface_type]["second_src_ip"] # Get MAC addresses for packets to send eth_dst = setup_info['router_mac'] eth_src = mac_map[direction] # Get public and private IPs for NAT configuration public_ip = get_public_ip(setup_info, interface_type) private_ip = get_src_ip(setup_info, direction, interface_type, nat_type, second_port) return PTF_NETWORK_DATA(outer_ports, inner_ports, eth_dst, eth_src, ip_src, ip_dst, public_ip, private_ip, exp_src_ip, exp_dst_ip) def perform_handshake(ptfhost, setup_info, protocol_type, direction, ip_dst, dest_l4_port, ip_src, source_l4_port, public_ip, second_port=False): """ Performs TCP handshake to initiate NAT translation Args: ptfhost: ptf host fixture setup_info: setup_info fixture protocol_type: sting with TCP/UDP values direction: string with current flow direction ip_dst: IP destination dest_l4_port: destination L4 port ip_src: IP source source_l4_port: source L4 port public_ip: Public IP second_port: boolean if second port id needs to be returned n_perf: int specifing number of connection for performance test """ src_vrf = setup_info["inner_vrf"][0] dst_vrf = setup_info["outer_vrf"][0] if second_port: src_vrf = setup_info["inner_vrf"][1] dst_vrf = setup_info["outer_vrf"][1] if direction == "host-tor": echo_cmd = "python /tmp/nat_ptf_echo.py {} {} {} {} {} {} {} None &".format(protocol_type.lower(), ip_dst, dest_l4_port, ip_src, source_l4_port, dst_vrf, src_vrf) else: echo_cmd = "python /tmp/nat_ptf_echo.py {} {} {} {} {} {} {} {} &".format(protocol_type.lower(), ip_src, source_l4_port, ip_dst, dest_l4_port, dst_vrf, src_vrf, public_ip) ptfhost.copy(src="./scripts/nat_ptf_echo.py", dest="/tmp") ptfhost.command(echo_cmd) def generate_and_verify_traffic(duthost, ptfadapter, setup_info, interface_type, direction, protocol_type, nat_type, second_port=False, src_port=None, dst_port=None, exp_src_port=None, exp_dst_port=None): """ Generates TCP/UDP traffic and checks that traffic is translated due to NAT types/rules Args: duthost: duthost fixture ptfadapter: ptf adapter fixture setup_info: setup_info fixture interface_type: string interface type direction: string with current flow direction protocol_type: sting with TCP/UDP values nat_type: string with static napt/nat/dynamic types second_port: boolean if second port id needs to be returned src_port: L4 source port in packet to send dst_port: L4 destination port in packet to send exp_src_port: L4 source port in expected packet exp_dst_port: L4 destination port in expected packet """ # Define network data and L4 ports network_data = get_network_data(ptfadapter, setup_info, direction, interface_type, nat_type=nat_type, second_port=second_port) if nat_type != 'dynamic': l4_ports = get_static_l4_ports(protocol_type, direction, nat_type) else: l4_ports = get_dynamic_l4_ports(duthost, protocol_type, direction, network_data.public_ip) if src_port is None: src_port = l4_ports.src_port if dst_port is None: dst_port = l4_ports.dst_port if exp_src_port is None: exp_src_port = l4_ports.exp_src_port if exp_dst_port is None: exp_dst_port = l4_ports.exp_dst_port # Create packet to send pkt = create_packet(network_data.eth_dst, network_data.eth_src, network_data.ip_dst, network_data.ip_src, protocol_type, sport=src_port, dport=dst_port) # Define expected packet exp_pkt = expected_mask_nated_packet(pkt, protocol_type, network_data.exp_dst_ip, network_data.exp_src_ip, src_port=exp_src_port, dst_port=exp_dst_port) # clear buffer ptfadapter.dataplane.flush() # Send packet for port in network_data.inner_ports: testutils.send(ptfadapter, port, pkt, count=5) # Verify that expected packets arrive on outer ports testutils.verify_packet_any_port(ptfadapter, exp_pkt, ports=network_data.outer_ports) def generate_and_verify_not_translated_traffic(ptfadapter, setup_info, interface_type, direction, protocol_type, nat_type, second_port=False, ip_src=None, ip_dst=None, exp_ip_src=None, exp_ip_dst=None): """ Generates TCP/UDP traffic and checks that traffic is not translated due to NAT types/rules Args: ptfadapter: ptf adapter fixture setup_info: setup_info fixture interface_type: string interface type direction: string with current flow direction protocol_type: sting with TCP/UDP values nat_type: string with static napt/nat/dynamic types second_port: boolean if second port id needs to be returned ip_src: IP source in packet to send ip_dst: IP destination in packet to send exp_ip_src: IP source in expected packet exp_ip_dst: IP destination in expected packet """ # Define network data and L4 ports network_data = get_network_data(ptfadapter, setup_info, direction, interface_type, nat_type=nat_type, second_port=second_port) src_port, dst_port = get_l4_default_ports(protocol_type) if ip_src is None: ip_src = network_data.ip_src if ip_dst is None: ip_dst = network_data.ip_dst if exp_ip_src is None: exp_ip_src = network_data.ip_src if exp_ip_dst is None: exp_ip_dst = network_data.ip_dst # Create packet to send pkt = create_packet(network_data.eth_dst, network_data.eth_src, ip_dst, ip_src, protocol_type, sport=src_port, dport=dst_port) # Define expected packet exp_pkt = expected_mask_nated_packet(pkt, protocol_type, exp_ip_dst, exp_ip_src, src_port=src_port, dst_port=dst_port) # clear buffer ptfadapter.dataplane.flush() # Send packet for port in network_data.inner_ports: testutils.send(ptfadapter, port, pkt, count=5) # Verify that expected packets arrive on outer ports testutils.verify_packet_any_port(ptfadapter, exp_pkt, ports=network_data.outer_ports) def generate_and_verify_traffic_dropped(ptfadapter, setup_info, interface_type, direction, protocol_type, nat_type, src_port, dst_port, exp_src_port, exp_dst_port, second_port=False): """ Generates TCP/UDP traffic and checks that traffic is dropped Args: ptfadapter: ptf adapter fixture setup_info: setup_info fixture interface_type: string interface type direction: string with current flow direction protocol_type: sting with TCP/UDP values nat_type: string with static napt/nat/dynamic types src_port: L4 source port in packet to send dst_port: L4 destination port in packet to send exp_src_port: L4 source port in expected packet exp_dst_port: L4 destination port in expected packet second_port: boolean if second port id needs to be returned """ # Define network data and L4 ports network_data = get_network_data(ptfadapter, setup_info, direction, interface_type, nat_type=nat_type, second_port=second_port) # Create packet to send pkt = create_packet(network_data.eth_dst, network_data.eth_src, network_data.ip_dst, network_data.ip_src, protocol_type, sport=src_port, dport=dst_port) # Define expected packet exp_pkt = expected_mask_nated_packet(pkt, protocol_type, network_data.exp_dst_ip, network_data.exp_src_ip, src_port=exp_src_port, dst_port=exp_dst_port) # clear buffer ptfadapter.dataplane.flush() # Send packet for port in network_data.inner_ports: testutils.send(ptfadapter, port, pkt, count=5) # Verify that expected packets arrive on outer ports testutils.verify_no_packet_any(ptfadapter, exp_pkt, ports=network_data.outer_ports) def generate_and_verify_icmp_traffic(ptfadapter, setup_info, interface_type, direction, nat_type, second_port=False, icmp_id=None): """ Generates ICMP traffic and checks that traffic is translated due to NAT types/rules. Args: ptfadapter: ptf adapter fixture setup_info: setup_info fixture interface_type: string interface type direction: string with current flow direction nat_type: string with static napt/nat/dynamic types second_port: boolean if second port id needs to be returned icmp_id: id for specify ICMP dynamic connection """ protocol_type = 'ICMP' # Define network data network_data = get_network_data(ptfadapter, setup_info, direction, interface_type, nat_type=nat_type, second_port=second_port) # Create packet to send pkt = create_packet(network_data.eth_dst, network_data.eth_src, network_data.ip_dst, network_data.ip_src, protocol_type) # Define expected packet(ICMP request) exp_pkt_request = expected_mask_nated_packet(pkt, protocol_type, network_data.exp_dst_ip, network_data.exp_src_ip, icmp_id=icmp_id) # Reverse source and destination IPs for reply exp_dst_ip = get_src_ip(setup_info, direction, interface_type, nat_type=nat_type, second_port=second_port) exp_src_ip = get_dst_ip(setup_info, direction, interface_type, nat_type=nat_type) # Define expected packet(ICMP reply) exp_pkt_reply = expected_mask_nated_packet(pkt, protocol_type, exp_dst_ip, exp_src_ip, icmp_id=0) exp_pkt_reply.exp_pkt[protocol_type].type = 0 # clear buffer ptfadapter.dataplane.flush() # Send packet for port in network_data.inner_ports: testutils.send(ptfadapter, port, pkt, count=5) # Verify ICMP request packets arrive on outer ports testutils.verify_packet_any_port(ptfadapter, exp_pkt_request, ports=network_data.outer_ports) # Verify ICMP peply packets arrive on inner ports testutils.verify_packet_any_port(ptfadapter, exp_pkt_reply, ports=network_data.inner_ports) def generate_and_verify_not_translated_icmp_traffic(ptfadapter, setup_info, interface_type, direction, nat_type, second_port=False, ip_src=None, ip_dst=None, check_reply=True): """ Generates ICMP traffic and checks that traffic is not translated due to NAT types/rules. Args: ptfadapter: ptf adapter fixture setup_info: setup_info fixture interface_type: string interface type direction: string with current flow direction nat_type: string with static napt/nat/dynamic types second_port: boolean if second port id needs to be returned ip_src: IP source in packet to send ip_dst: IP destination in packet to send check_reply: boolean if requires to verify ICMP reply """ protocol_type = 'ICMP' # Define network data network_data = get_network_data(ptfadapter, setup_info, direction, interface_type, nat_type=nat_type, second_port=second_port) if ip_src is None: ip_src = network_data.ip_src if ip_dst is None: ip_dst = network_data.ip_dst # Create packet to send pkt = create_packet(network_data.eth_dst, network_data.eth_src, ip_dst, ip_src, protocol_type) # Define expected packet(ICMP request) exp_pkt_request = expected_mask_nated_packet(pkt, protocol_type, ip_dst, ip_src) # Define expected packet(ICMP reply) exp_pkt_reply = expected_mask_nated_packet(pkt, protocol_type, ip_src, ip_dst) exp_pkt_reply.exp_pkt[protocol_type].type = 0 # clear buffer ptfadapter.dataplane.flush() # Send packet for port in network_data.inner_ports: testutils.send(ptfadapter, port, pkt, count=5) # Verify ICMP request packets arrive on outer ports testutils.verify_packet_any_port(ptfadapter, exp_pkt_request, ports=network_data.outer_ports) if check_reply: # Verify ICMP peply packets arrive on inner ports testutils.verify_packet_any_port(ptfadapter, exp_pkt_reply, ports=network_data.inner_ports) def get_l4_default_ports(protocol_type): """ Get default L4 ports :param protocol_type: type of protocol TCP/UDP :return source_l4_port, dest_l4_port """ source_l4_port = TCP_LOCAL_PORT dest_l4_port = TCP_GLOBAL_PORT if protocol_type == "UDP": source_l4_port = UDP_LOCAL_PORT dest_l4_port = UDP_GLOBAL_PORT return source_l4_port, dest_l4_port def get_dynamic_l4_ports(duthost, proto, direction, public_ip): """ Get l4 ports for dynamic NAT test cases :param proto: sting with TCP/UDP values :param direction: string with current flow direction :return named tuple with values src_port, dst_port, exp_src_port, exp_dst_por """ time.sleep(5) # Get expected source port output = exec_command(duthost, ["show nat translation"])['stdout'] # Find expected source port pattern = r"{}.+{}:(\d+)".format(proto.lower(), public_ip) ports = re.findall(pattern, output) if not ports: raise Exception("Dynamic NAT translation was not created") dynamic_global_port = int(sorted(ports)[-1]) src_port, dst_port = get_l4_default_ports(proto) if direction == "leaf-tor": exp_src_port = dynamic_global_port exp_dst_port = src_port src_port = dynamic_global_port dst_port = dynamic_global_port else: exp_src_port = dynamic_global_port exp_dst_port = dynamic_global_port dst_port = dynamic_global_port return L4_PORTS_DATA(src_port, dst_port, exp_src_port, exp_dst_port) def configure_dynamic_nat_rule(duthost, ptfadapter, ptfhost, setup_info, interface_type, protocol_type, pool_name=DYNAMIC_POOL_NAME, public_ip=None, acl_table=ACL_TABLE_GLOBAL_NAME, ports_assigned=None, acl_rules=None, binding_name=DYNAMIC_BINDING_NAME, port_range=None, default=False, remove_bindings=False, handshake=False): """ method configure Dynamic NAT rules :param duthost: duthost fixture :param setup_info: setup_info fixture :param interface_type: interface_type Loopback, Portchannel etc :param pool_name: name of the pool to apply :param public_ip: IP of Public L3 interface :param acl_table: acl table name to create :param ports_assigned: assigned ports to ACL table :param acl_rules: ALC rules to apply :param binding_name: NAT binding name :param port_range: range of L4 port to apply :param remove_bindings: if True remove applied bindings from NAT rules :param default: use default ports :param handshake: if True perform handshake """ if default: # Set private IP for dynamic NAT configuration public_ip = get_public_ip(setup_info, interface_type) if not public_ip else public_ip acl_subnet = setup_info[interface_type]["acl_subnet"] acl_rules = [{"priority": "10", "src_ip": acl_subnet, "action": "forward"}] if not acl_rules else acl_rules port_range = "{0}-{1}".format(POOL_RANGE_START_PORT, POOL_RANGE_END_PORT) if not port_range else port_range ports_assigned = setup_info['indices_to_ports_config'][setup_info[interface_type]['inner_port_id'][0]] if not \ ports_assigned else ports_assigned # Set NAT configuration for test duthost.command("sudo config nat add pool {0} {1} {2}".format(pool_name, public_ip, port_range)) # Check that pool configuration was applied show_nat_pool = get_cli_show_nat_config_output(duthost, "pool") pytest_assert(pool_name == show_nat_pool[0]['pool name'], "Pool name was not set to {}".format(pool_name)) pytest_assert(public_ip == show_nat_pool[0]['global ip range'], "Global IP Range was not set to {}".format(public_ip)) pytest_assert(port_range == show_nat_pool[0]['global port range'], "Global Port Range was not set to {}".format(port_range)) # Add bindings duthost.command("sudo config nat add binding {0} {1} {2}".format(binding_name, pool_name, acl_table)) # Check that binding configuration was applied show_nat_binding = get_cli_show_nat_config_output(duthost, "bindings") pytest_assert(binding_name == show_nat_binding[0]['binding name'], "Binding Name was not set to {}".format(binding_name)) pytest_assert(pool_name == show_nat_binding[0]['pool name'], "Pool Name was not set to {}".format(pool_name)) pytest_assert(acl_table == show_nat_binding[0]['access-list'], "Access-List was not set to {}".format(acl_table)) # Apply acl table and rule duthost.command("mkdir -p {}".format(DUT_TMP_DIR)) # Initialize variables for NAT global table acl_rule_vars = { 'acl_table_name': acl_table, 'stage': "INGRESS", 'ports_assigned': ports_assigned, 'acl_rules': acl_rules } duthost.host.options['variable_manager'].extra_vars.update(acl_rule_vars) acl_config = 'acl_table.json' acl_config_path = os.path.join(DUT_TMP_DIR, acl_config) duthost.template(src=os.path.join(TEMPLATE_DIR, ACL_TEMPLATE), dest=acl_config_path) # Apply config file duthost.command('sonic-cfggen -j {} --write-to-db'.format(acl_config_path)) # Remove temporary folders duthost.command('rm -rf {}'.format(DUT_TMP_DIR)) if remove_bindings: duthost.command("config nat remove bindings") # Apply NAT zones nat_zones_config(duthost, setup_info, interface_type) # set_arp entries check_peers_by_ping(duthost) if handshake: # Perform handshake direction = 'host-tor' # Define network data and L4 ports network_data = get_network_data(ptfadapter, setup_info, direction, interface_type, nat_type='dynamic') src_port, dst_port = get_l4_default_ports(protocol_type) # Perform TCP handshake (host-tor -> leaf-tor) perform_handshake(ptfhost, setup_info, protocol_type, direction, network_data.ip_dst, dst_port, network_data.ip_src, src_port, network_data.public_ip) def wait_timeout(protocol_type, wait_time=None, default=True): """ method for wait until NAT entry expired or some time to check that they ware not expired :param protocol_type: type of protocol :param wait_time: time to wait :param default: wait default NAT timeout """ if default: if protocol_type == "UDP": # Wait until UDP entry expires time.sleep(GLOBAL_UDP_NAPT_TIMEOUT + 80) elif protocol_type == "TCP": time.sleep(GLOBAL_TCP_NAPT_TIMEOUT + 80) else: time.sleep(60) else: time.sleep(wait_time) def get_static_l4_ports(proto, direction, nat_type): """ Get l4 ports for static NAT/NAPT test cases :param proto: sting with TCP/UDP values :param direction: string with current flow direction :param nat_type: string with static napt/nat types :return named tuple with values src_port, dst_port, exp_src_port, exp_dst_por """ src_port, dst_port = get_l4_default_ports(proto) if nat_type == 'static_napt' and direction == "host-tor": exp_src_port = dst_port exp_dst_port = dst_port elif nat_type == "static_napt" and direction == "leaf-tor": exp_src_port, exp_dst_port = dst_port, src_port src_port = dst_port elif direction == "leaf-tor": exp_src_port, exp_dst_port = dst_port, src_port src_port, dst_port = dst_port, src_port elif direction == "host-tor": exp_src_port = src_port exp_dst_port = dst_port return L4_PORTS_DATA(src_port, dst_port, exp_src_port, exp_dst_port) def conf_dut_routes(duthost, setup_info, subnet, interface_type, teardown=False): """ method for add/delete routes on DUT :param duthost: DUT host object :param setup_info: dict with interfaces parameters to configure :param subnet: subnet to configure :param interface_type: string interface type :param teardown: Boolean parameter to remove or not DUT routes """ gw = setup_info[interface_type]["vrf_conf"]["red"]["ip"][:-1] + "{}". \ format(int(setup_info[interface_type]["vrf_conf"]["red"]["ip"][-1:]) + 1) if teardown: try: duthost.command("ip route del {} via {}".format(subnet, gw)) except RunAnsibleModuleFail: logger.debug("Route '%s via %s' was not deleted/existed", subnet, gw) else: duthost.command("ip route add {} via {}".format(subnet, gw)) def get_redis_val(duthost, db, key): """ Returns dictionary of value for redis key. :param duthost: DUT host object :param db: database to be selected :param key: key to be selected """ try: output = exec_command(duthost, ["redis-dump -d {} --pretty -k *{}*".format(db, key)]) if output["rc"]: raise Exception('Return code is {} not 0'.format(output_cli["rc"])) redis_dict = json.loads(output['stdout']) return redis_dict except Exception as e: return e.__str__() def get_db_rules(duthost, ptfadapter, setup_test_env, protocol_type, db_type, private_ip=None, public_ip=None, private_port=None, public_port=None, start_port=POOL_RANGE_START_PORT, end_port=POOL_RANGE_END_PORT, access_list=ACL_TABLE_GLOBAL_NAME, nat_pool=DYNAMIC_POOL_NAME, post_flag=False): """ Returns dictionary of database rules. :param duthost: DUT host object :param ptfadapter: ptf adapter fixture :param setup_test_env: fixture used to gather setup_info fixture and interface_type (Loopback, Portchannel etc) :param protocol_type: type of protocol TCP/UDP :param db_type: databyte type used to select which redis dump should be checked :param private_ip: IP variable used to confirm proper configuration :param public_ip: IP variable used to confirm proper configuration :param private_port: port variable used to confirm proper configuration :param public_port: port variable used to confirm proper configuration :param start_port: port variable used to confirm proper configuration :param end_port: port variable used to confirm proper configuration :param access_list: ACL variable used to confirm proper configuration :param nat_pool: pool variable used to confirm proper configuration :param post_flag: boolean flag used to determine which redis dump template should be used (pre or post configuration) """ interface_type, setup_info = setup_test_env setup_data = copy.deepcopy(setup_info) nat_type = 'static_napt' direction = 'host-tor' network_data = get_network_data(ptfadapter, setup_data, direction, interface_type, nat_type=nat_type) secondary_protocol = {"TCP": "UDP", "UDP": "TCP"}[protocol_type] global_port = {"TCP": TCP_GLOBAL_PORT, "UDP": UDP_GLOBAL_PORT}[protocol_type] local_port = {"TCP": TCP_LOCAL_PORT, "UDP": UDP_LOCAL_PORT}[protocol_type] db_rules = {} # APP_DB timeout if db_type == 'APP_DB timeout': offset = {True: 200, False: 0}[post_flag] db_rules = {"nat_timeout" : "{}".format(GLOBAL_NAT_TIMEOUT + offset), "admin_mode" : "enabled", "nat_udp_timeout" : "{}".format(GLOBAL_UDP_NAPT_TIMEOUT + offset), "nat_tcp_timeout" : "{}".format(GLOBAL_TCP_NAPT_TIMEOUT + offset * 25) } # Pool CONFIG_DB elif db_type == 'Pool CONFIG_DB': db_rules = {"nat_ip": "{}".format(public_ip), "nat_port": "{}-{}".format(start_port, end_port) } # Pool APP_DB elif db_type == 'Pool APP_DB': db_rules = {"port_range": "{}-{}".format(start_port, end_port)} # Binding CONFIG_DB elif db_type == 'Binding CONFIG_DB': db_rules = {"access_list": access_list, "nat_pool": nat_pool, "nat_type": "snat", "twice_nat_id": "NULL" } # NAPT APP_DB elif db_type == 'NAPT APP_DB': db_rules = { "NAPT_TABLE:{}:{}:{}".format(protocol_type, network_data.public_ip, global_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "dnat", "translated_ip": "{}".format(network_data.private_ip), "translated_l4_port": "{}".format(local_port) } }, "NAPT_TABLE:{}:{}:{}".format(protocol_type, network_data.private_ip, local_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "snat", "translated_ip": "{}".format(network_data.public_ip), "translated_l4_port": "{}".format(global_port) } } } # NAPT CONFIG_DB elif db_type == 'NAPT CONFIG_DB': db_rules = { "STATIC_NAPT|{}|{}|{}".format(network_data.public_ip, protocol_type, global_port): { "type": "hash", "value": { "local_ip": "{}".format(network_data.private_ip), "local_port": "{}".format(local_port), "nat_type": "dnat" } } } # NAPT APP_DB POST elif db_type == 'NAPT APP_DB POST': db_rules = { "NAPT_TABLE:{}:{}:{}".format(protocol_type, public_ip, public_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "dnat", "translated_ip": "{}".format(private_ip), "translated_l4_port": "{}".format(private_port) } }, "NAPT_TABLE:{}:{}:{}".format(protocol_type, private_ip, private_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "snat", "translated_ip": "{}".format(public_ip), "translated_l4_port": "{}".format(public_port) } }, "NAPT_TABLE:{}:{}:{}".format(protocol_type, network_data.public_ip, global_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "dnat", "translated_ip": "{}".format(network_data.private_ip), "translated_l4_port": "{}".format(local_port) } }, "NAPT_TABLE:{}:{}:{}".format(protocol_type, network_data.private_ip, local_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "snat", "translated_ip": "{}".format(network_data.public_ip), "translated_l4_port": "{}".format(global_port) } }, "NAPT_TABLE:{}:{}:{}".format(secondary_protocol, public_ip, public_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "dnat", "translated_ip": "{}".format(private_ip), "translated_l4_port": "{}".format(private_port) } }, "NAPT_TABLE:{}:{}:{}".format(secondary_protocol, private_ip, private_port): { "type": "hash", "value": { "entry_type": "static", "nat_type": "snat", "translated_ip": "{}".format(public_ip), "translated_l4_port": "{}".format(public_port) } } } # NAPT CONFIG_DB POST elif db_type == 'NAPT CONFIG_DB POST': db_rules = { "STATIC_NAPT|{}|{}|{}".format(public_ip, protocol_type, public_port): { "type": "hash", "value": { "local_ip": "{}".format(private_ip), "local_port": "{}".format(private_port), "nat_type": "dnat" } }, "STATIC_NAPT|{}|{}|{}".format(public_ip, secondary_protocol, public_port): { "type": "hash", "value": { "local_ip": "{}".format(private_ip), "local_port": "{}".format(private_port), "nat_type": "dnat" } }, "STATIC_NAPT|{}|{}|{}".format(network_data.public_ip, protocol_type, global_port): { "type": "hash", "value": { "local_ip": "{}".format(network_data.private_ip), "local_port": "{}".format(local_port), "nat_type": "dnat" } } } # ASIC_DB SRC status elif db_type == 'ASIC_DB SRC': db_rules = { "SAI_NAT_ENTRY_ATTR_SRC_IP": "{}".format(network_data.public_ip), "SAI_NAT_ENTRY_ATTR_L4_SRC_PORT": "{}".format(global_port) } # ASIC_DB DST status elif db_type == 'ASIC_DB DST': db_rules = { "SAI_NAT_ENTRY_ATTR_DST_IP": "{}".format(network_data.private_ip), "SAI_NAT_ENTRY_ATTR_L4_DST_PORT": "{}".format(local_port) } else: raise Exception('Improper db_type selected') return db_rules def write_json(duthost, json_dict, feature): """ Write NAT config json to dut :param DUT host name :param json dictionary with variables used by templates :param feature used to select which template should be used """ TEMP_FILE = "{}.json".format(feature) curr_dir = os.path.dirname(os.path.abspath(__file__)) j2_template = Environment(loader=FileSystemLoader(curr_dir), trim_blocks=True) if feature == "dynamic_binding": j2_temp = j2_template.get_template(NAT_CONF_J2_TEMPLATE).render(nat=json_dict) else: raise AttributeError("Unexpected feature {}".format(feature)) exec_command(duthost, ["mkdir -p {}".format(DUT_TMP_DIR)]) exec_command(duthost, ["echo '{j2_temp}' > {dir}/{file}". format(j2_temp=j2_temp, dir=DUT_TMP_DIR, file=TEMP_FILE)]) exec_command(duthost, ["sudo config load {} -y".format(DUT_TMP_DIR+"/"+TEMP_FILE)]) exec_command(duthost, ["rm -rf {}".format(DUT_TMP_DIR)])
crud_operations_basic
0005_add_surveys_permissions_to_groups.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.9 on 2016-10-23 12:29 from __future__ import unicode_literals from django.db import migrations from django.core.management.sql import emit_post_migrate_signal class
(migrations.Migration): def add_surveys_permissions_to_groups(apps, schema_editor): db_alias = schema_editor.connection.alias try: # Django 1.9 emit_post_migrate_signal(2, False, db_alias) except TypeError: # Django < 1.9 try: # Django 1.8 emit_post_migrate_signal(2, False, 'default', db_alias) except TypeError: # Django < 1.8 emit_post_migrate_signal([], 2, False, 'default', db_alias) Group = apps.get_model('auth.Group') GroupPagePermission = apps.get_model('wagtailcore.GroupPagePermission') SurveysIndexPage = apps.get_model('surveys.SurveysIndexPage') # Create groups # <- Editors -> editor_group = Group.objects.get(name='Editors') surveys = SurveysIndexPage.objects.first() GroupPagePermission.objects.get_or_create( group=editor_group, page=surveys, permission_type='add', ) GroupPagePermission.objects.get_or_create( group=editor_group, page=surveys, permission_type='edit', ) # <- Moderator -> moderator_group = Group.objects.get(name='Moderators') surveys = SurveysIndexPage.objects.first() GroupPagePermission.objects.get_or_create( group=moderator_group, page=surveys, permission_type='add', ) GroupPagePermission.objects.get_or_create( group=moderator_group, page=surveys, permission_type='edit', ) GroupPagePermission.objects.get_or_create( group=moderator_group, page=surveys, permission_type='publish', ) dependencies = [ ('surveys', '0004_create_surveys_index_page'), ('core', '0047_add_core_permissions_to_groups'), ('contenttypes', '__latest__'), ('sites', '__latest__'), ] operations = [ migrations.RunPython(add_surveys_permissions_to_groups), ]
Migration
svm.py
""" Linear SVM ========== This script fits a linear support vector machine classifier to random data. It illustrates how a function defined purely by NumPy operations can be minimized directly with a gradient-based solver. """ import numpy as np from autodiff.optimize import fmin_l_bfgs_b def test_svm(): rng = np.random.RandomState(1) # -- create some fake data x = rng.rand(10, 5) y = 2 * (rng.rand(10) > 0.5) - 1 l2_regularization = 1e-4 # -- loss function def loss_fn(weights, bias): margin = y * (np.dot(x, weights) + bias) loss = np.maximum(0, 1 - margin) ** 2 l2_cost = 0.5 * l2_regularization * np.dot(weights, weights) loss = np.mean(loss) + l2_cost print('ran loss_fn(), returning {}'.format(loss)) return loss # -- call optimizer w_0, b_0 = np.zeros(5), np.zeros(())
final_loss = loss_fn(w, b) assert np.allclose(final_loss, 0.7229) print('optimization successful!') if __name__ == '__main__': test_svm()
w, b = fmin_l_bfgs_b(loss_fn, init_args=(w_0, b_0))
traverse.go
package htmlutil import ( "golang.org/x/net/html" ) // FilterCallback is used to filter out nodes by Traverse(). type FilterCallback func(*Doc) bool // Traverse traverses the nodes of a tree // using DFS and calls `callback' on every node. func (root *Doc) Traverse(filter FilterCallback, recurse bool) []*Doc { var rv []*Doc if root.Node == nil { return nil }
return rv } } for c := root.FirstChild; c != nil; c = c.NextSibling { rv = append(rv, (&Doc{c}).Traverse(filter, recurse)...) if rv != nil && !recurse { return rv } } return rv } func makeFinder(node string, argattr []html.Attribute) func(n *Doc) bool { return func(n *Doc) bool { if n.Type == html.ElementNode { if (node == "" || n.Data == node) && attrMatch(argattr, n.Attr) { return true } } return false } } //Find searches and returns the first occurance of `node'. func (root *Doc) Find(node string, argattr []html.Attribute) *Doc { if root == nil || (node == "" && argattr == nil) { return nil } if nodes := root.Traverse(makeFinder(node, argattr), false); nodes != nil { return nodes[0] } return nil } // FindAll returns all the matching elements. func (root *Doc) FindAll(node string, argattr []html.Attribute) []*Doc { if root == nil || (node == "" && argattr == nil) { return nil } return root.Traverse(makeFinder(node, argattr), true) }
if filter(root) { rv = append(rv, root) if !recurse {
keywords.rs
//! Keywords are whole words that are lexed as a single unit and reserved //! by the language. //! //! Keywords are reserved to help the parser interpret tokens and resolve //! ambiguities. Some of the keywords here are reserved but not used. They are //! reserved to avoid their use as symbols in source files so that they can //! potentially be used in the future without breaking existing code. use std::collections::HashMap; use crate::common::multiphase::{Accessibility, PseudoIdentifier}; use crate::lexing::tokens::{ Binding, BranchingAndJumping, DeclarationHead, Macros, Modifier, ModuleDefinitions, Token, }; pub fn new() -> HashMap<&'static str, Token>
{ let mut map = HashMap::new(); map.extend(vec![ // // Pseudoidentifiers // ( "_", Token::PseudoIdentifier(PseudoIdentifier::PlaceholderIdentifier), ), ( "continue", Token::PseudoIdentifier(PseudoIdentifier::Continue), ), ("this", Token::PseudoIdentifier(PseudoIdentifier::This)), ("This", Token::PseudoIdentifier(PseudoIdentifier::ThisType)), ("it", Token::PseudoIdentifier(PseudoIdentifier::It)), ("super", Token::PseudoIdentifier(PseudoIdentifier::Super)), // // Used // ("as", Token::Binding(Binding::As)), ("class", Token::DeclarationHead(DeclarationHead::Class)), ( "else", Token::BranchingAndJumping(BranchingAndJumping::Else), ), ("extend", Token::DeclarationHead(DeclarationHead::Extend)), ("extends", Token::Extends), ( "exports", Token::ModuleDefinitions(ModuleDefinitions::Exports), ), ("extern", Token::DeclarationHead(DeclarationHead::Extern)), ("final", Token::Binding(Binding::Final)), ("for", Token::BranchingAndJumping(BranchingAndJumping::For)), ("fun", Token::DeclarationHead(DeclarationHead::Fun)), ("global", Token::Global), ("if", Token::BranchingAndJumping(BranchingAndJumping::If)), ("ignorable", Token::Modifier(Modifier::Ignorable)), ( "implements", Token::DeclarationHead(DeclarationHead::Implements), ), ( "internal", Token::Modifier(Modifier::Accessibility(Accessibility::Internal)), ), ( "interface", Token::DeclarationHead(DeclarationHead::Interface), ), ("module", Token::DeclarationHead(DeclarationHead::Module)), ("operator", Token::Modifier(Modifier::Operator)), ("override", Token::Modifier(Modifier::Override)), ("package", Token::DeclarationHead(DeclarationHead::Package)), ( "public", Token::Modifier(Modifier::Accessibility(Accessibility::Public)), ), ("quote", Token::Macros(Macros::Quote)), ("reader", Token::ReservedKeyword), ( "reject", Token::ModuleDefinitions(ModuleDefinitions::Reject), ), ( "requires", Token::ModuleDefinitions(ModuleDefinitions::Requires), ), ( "select", Token::BranchingAndJumping(BranchingAndJumping::Select), ), ( "switch", Token::BranchingAndJumping(BranchingAndJumping::Switch), ), ("syntax", Token::Macros(Macros::Syntax)), ("throw", Token::Throw), ("timeout", Token::Timeout), ("unquote", Token::Macros(Macros::Unquote)), ("use", Token::Use), ("var", Token::Binding(Binding::Var)), ("with", Token::With), ( "while", Token::BranchingAndJumping(BranchingAndJumping::While), ), // // Reserved, but not yet used // ("asm", Token::ReservedKeyword), ("ast", Token::ReservedKeyword), ("alias", Token::ReservedKeyword), ("align", Token::ReservedKeyword), ("alignto", Token::ReservedKeyword), ("arena", Token::ReservedKeyword), ("atom", Token::ReservedKeyword), ("bind", Token::ReservedKeyword), ("blittable", Token::ReservedKeyword), ("case", Token::ReservedKeyword), ("catch", Token::ReservedKeyword), ("co", Token::ReservedKeyword), ("constexpr", Token::ReservedKeyword), ("comptime", Token::ReservedKeyword), ("constructor", Token::ReservedKeyword), ("checked", Token::ReservedKeyword), ("derives", Token::ReservedKeyword), ("diverging", Token::ReservedKeyword), ("disasm", Token::ReservedKeyword), ("do", Token::ReservedKeyword), ("dyn", Token::ReservedKeyword), ("dynamic", Token::ReservedKeyword), ("embed", Token::ReservedKeyword), ("fexpr", Token::ReservedKeyword), ("fixed", Token::ReservedKeyword), ("fn", Token::ReservedKeyword), ("func", Token::ReservedKeyword), ("forall", Token::ReservedKeyword), ("gc", Token::ReservedKeyword), ("gen", Token::ReservedKeyword), ("get", Token::ReservedKeyword), ("infix", Token::ReservedKeyword), ("in", Token::ReservedKeyword), ("lexemes", Token::ReservedKeyword), ("link", Token::ReservedKeyword), ("llvm", Token::ReservedKeyword), ("macro", Token::ReservedKeyword), ("mut", Token::ReservedKeyword), ("mutating", Token::ReservedKeyword), ("never", Token::ReservedKeyword), ("nogc", Token::ReservedKeyword), ("noyield", Token::ReservedKeyword), ("offset", Token::ReservedKeyword), ("offsetof", Token::ReservedKeyword), ("pack", Token::ReservedKeyword), ("pin", Token::ReservedKeyword), ("platform", Token::ReservedKeyword), ("prefix", Token::ReservedKeyword), ("pragma", Token::ReservedKeyword), ("pure", Token::ReservedKeyword), ("quasiquote", Token::ReservedKeyword), ("raw", Token::ReservedKeyword), ("read", Token::ReservedKeyword), ("ref", Token::ReservedKeyword), ("restrict", Token::ReservedKeyword), ("stackalloc", Token::ReservedKeyword), ("seq", Token::ReservedKeyword), ("struct", Token::ReservedKeyword), ("source", Token::ReservedKeyword), ("sync", Token::ReservedKeyword), ("throws", Token::ReservedKeyword), ("tokens", Token::ReservedKeyword), ("total", Token::ReservedKeyword), ("transient", Token::ReservedKeyword), ("try", Token::ReservedKeyword), ("unary", Token::ReservedKeyword), ("unchecked", Token::ReservedKeyword), ("unsafe", Token::ReservedKeyword), ("unllvm", Token::ReservedKeyword), ("yield", Token::ReservedKeyword), ("value", Token::ReservedKeyword), ("virtual", Token::ReservedKeyword), ("where", Token::ReservedKeyword), ]); map }
rename_collection_not_blocked_by_txn.js
/** * Test that rename collection only takes database IX lock and will not be blocked by transactions. * * @tags: [uses_transactions, requires_db_locking, assumes_unsharded_collection] */ (function() { "use strict"; let dbName = 'rename_collection_not_blocked_by_txn'; let mydb = db.getSiblingDB(dbName); mydb.t.drop({writeConcern: {w: "majority"}}); mydb.a.drop({writeConcern: {w: "majority"}}); mydb.b.drop({writeConcern: {w: "majority"}}); mydb.c.drop({writeConcern: {w: "majority"}});
assert.commandWorked(mydb.runCommand({insert: "b", documents: [{x: 1}]})); const session = mydb.getMongo().startSession(); const sessionDb = session.getDatabase(dbName); session.startTransaction(); // This holds a database IX lock and a collection IX lock on "test.t". sessionDb.t.insert({y: 1}); // This only requires database IX lock. assert.commandWorked( mydb.adminCommand({renameCollection: dbName + ".a", to: dbName + ".b", dropTarget: true})); assert.commandWorked(mydb.adminCommand({renameCollection: dbName + ".b", to: dbName + ".c"})); assert.commandWorked(session.commitTransaction_forTesting()); })();
assert.commandWorked(mydb.runCommand({insert: "t", documents: [{x: 1}]})); assert.commandWorked(mydb.runCommand({insert: "a", documents: [{x: 1}]}));
my.js
$(function () { $('#example1').DataTable() $('#example2').DataTable({ 'paging' : true, 'lengthChange': false, 'searching' : false, 'ordering' : true,
});
'info' : true, 'autoWidth' : false })
test_dropship.py
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo.tests import common, Form from odoo.tools import mute_logger class TestDropship(common.TransactionCase): def test_change_qty(self): # enable the dropship and MTO route on the product prod = self.env.ref('product.product_product_8') dropshipping_route = self.env.ref('stock_dropshipping.route_drop_shipping') mto_route = self.env.ref('stock.route_warehouse0_mto') prod.write({'route_ids': [(6, 0, [dropshipping_route.id, mto_route.id])]}) # add a vendor vendor1 = self.env['res.partner'].create({'name': 'vendor1'}) seller1 = self.env['product.supplierinfo'].create({ 'name': vendor1.id, 'price': 8, }) prod.write({'seller_ids': [(6, 0, [seller1.id])]}) # sell one unit of this product cust = self.env['res.partner'].create({'name': 'customer1'}) so = self.env['sale.order'].create({ 'partner_id': cust.id, 'partner_invoice_id': cust.id, 'partner_shipping_id': cust.id, 'order_line': [(0, 0, { 'name': prod.name, 'product_id': prod.id, 'product_uom_qty': 1.00, 'product_uom': prod.uom_id.id, 'price_unit': 12, })], 'pricelist_id': self.env.ref('product.list0').id, 'picking_policy': 'direct', }) so.action_confirm() po = self.env['purchase.order'].search([('group_id', '=', so.procurement_group_id.id)]) po_line = po.order_line # Check the qty on the P0 self.assertAlmostEqual(po_line.product_qty, 1.00) # Update qty on SO and check PO so.order_line.product_uom_qty = 2.00 self.assertAlmostEqual(po_line.product_qty, 2.00) # Create a new so line sol2 = self.env['sale.order.line'].create({ 'order_id': so.id, 'name': prod.name, 'product_id': prod.id, 'product_uom_qty': 3.00, 'product_uom': prod.uom_id.id, 'price_unit': 12, }) # there is a new line pol2 = po.order_line - po_line # the first line is unchanged self.assertAlmostEqual(po_line.product_qty, 2.00) # the new line matches the new line on the so self.assertAlmostEqual(pol2.product_qty, sol2.product_uom_qty) def test_00_dropship(self): # Create a vendor
supplier_dropship = self.env['res.partner'].create({'name': 'Vendor of Dropshipping test'}) # Create new product without any routes drop_shop_product = self.env['product.product'].create({ 'name': "Pen drive", 'type': "product", 'categ_id': self.env.ref('product.product_category_1').id, 'lst_price': 100.0, 'standard_price': 0.0, 'uom_id': self.env.ref('uom.product_uom_unit').id, 'uom_po_id': self.env.ref('uom.product_uom_unit').id, 'seller_ids': [(0, 0, { 'delay': 1, 'name': supplier_dropship.id, 'min_qty': 2.0 })] }) # Create a sales order with a line of 200 PCE incoming shipment, with route_id drop shipping so_form = Form(self.env['sale.order']) so_form.partner_id = self.env.ref('base.res_partner_2') so_form.payment_term_id = self.env.ref('account.account_payment_term') with mute_logger('odoo.tests.common.onchange'): # otherwise complains that there's not enough inventory and # apparently that's normal according to @jco and @sle with so_form.order_line.new() as line: line.product_id = drop_shop_product line.product_uom_qty = 200 line.price_unit = 1.00 line.route_id = self.env.ref('stock_dropshipping.route_drop_shipping') sale_order_drp_shpng = so_form.save() # Confirm sales order sale_order_drp_shpng.action_confirm() # Check the sales order created a procurement group which has a procurement of 200 pieces self.assertTrue(sale_order_drp_shpng.procurement_group_id, 'SO should have procurement group') # Check a quotation was created to a certain vendor and confirm so it becomes a confirmed purchase order purchase = self.env['purchase.order'].search([('partner_id', '=', supplier_dropship.id)]) self.assertTrue(purchase, "an RFQ should have been created by the scheduler") purchase.button_confirm() self.assertEquals(purchase.state, 'purchase', 'Purchase order should be in the approved state') self.assertEquals(len(purchase.ids), 1, 'There should be one picking') # Send the 200 pieces purchase.picking_ids.move_lines.quantity_done = purchase.picking_ids.move_lines.product_qty purchase.picking_ids.button_validate() # Check one move line was created in Customers location with 200 pieces move_line = self.env['stock.move.line'].search([ ('location_dest_id', '=', self.env.ref('stock.stock_location_customers').id), ('product_id', '=', drop_shop_product.id)]) self.assertEquals(len(move_line.ids), 1, 'There should be exactly one move line')
navigation_target.rs
//! FIXME: write short doc here use hir::{AssocItem, Either, FieldSource, HasSource, ModuleSource}; use ra_db::{FileId, SourceDatabase}; use ra_syntax::{ ast::{self, DocCommentsOwner, NameOwner}, match_ast, AstNode, SmolStr, SyntaxKind::{self, BIND_PAT}, SyntaxNode, TextRange, }; use super::short_label::ShortLabel; use crate::{db::RootDatabase, FileSymbol}; /// `NavigationTarget` represents and element in the editor's UI which you can /// click on to navigate to a particular piece of code. /// /// Typically, a `NavigationTarget` corresponds to some element in the source /// code, like a function or a struct, but this is not strictly required. #[derive(Debug, Clone)] pub struct NavigationTarget { file_id: FileId, name: SmolStr, kind: SyntaxKind, full_range: TextRange, focus_range: Option<TextRange>, container_name: Option<SmolStr>, description: Option<String>, docs: Option<String>, } pub(crate) trait ToNav { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget; } impl NavigationTarget { /// When `focus_range` is specified, returns it. otherwise /// returns `full_range` pub fn range(&self) -> TextRange { self.focus_range.unwrap_or(self.full_range) } pub fn name(&self) -> &SmolStr { &self.name } pub fn container_name(&self) -> Option<&SmolStr> { self.container_name.as_ref() } pub fn kind(&self) -> SyntaxKind { self.kind } pub fn file_id(&self) -> FileId { self.file_id } pub fn full_range(&self) -> TextRange { self.full_range } pub fn docs(&self) -> Option<&str> { self.docs.as_ref().map(String::as_str) } pub fn description(&self) -> Option<&str> { self.description.as_ref().map(String::as_str) } /// A "most interesting" range withing the `full_range`. /// /// Typically, `full_range` is the whole syntax node, /// including doc comments, and `focus_range` is the range of the identifier. pub fn focus_range(&self) -> Option<TextRange> { self.focus_range } pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); if let Some(src) = module.declaration_source(db) { let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax()); return NavigationTarget::from_syntax( file_id, name, None, text_range, src.ast.syntax(), src.ast.doc_comment_text(), src.ast.short_label(), ); } module.to_nav(db) } pub(crate) fn from_def( db: &RootDatabase, module_def: hir::ModuleDef, ) -> Option<NavigationTarget> { let nav = match module_def { hir::ModuleDef::Module(module) => module.to_nav(db), hir::ModuleDef::Function(it) => it.to_nav(db), hir::ModuleDef::Adt(it) => it.to_nav(db), hir::ModuleDef::Const(it) => it.to_nav(db), hir::ModuleDef::Static(it) => it.to_nav(db), hir::ModuleDef::EnumVariant(it) => it.to_nav(db), hir::ModuleDef::Trait(it) => it.to_nav(db), hir::ModuleDef::TypeAlias(it) => it.to_nav(db), hir::ModuleDef::BuiltinType(..) => { return None; } }; Some(nav) } #[cfg(test)] pub(crate) fn assert_match(&self, expected: &str) { let actual = self.debug_render(); test_utils::assert_eq_text!(expected.trim(), actual.trim(),); } #[cfg(test)] pub(crate) fn debug_render(&self) -> String { let mut buf = format!( "{} {:?} {:?} {:?}", self.name(), self.kind(), self.file_id(), self.full_range() ); if let Some(focus_range) = self.focus_range() { buf.push_str(&format!(" {:?}", focus_range)) } if let Some(container_name) = self.container_name() { buf.push_str(&format!(" {}", container_name)) } buf } /// Allows `NavigationTarget` to be created from a `NameOwner` pub(crate) fn from_named( db: &RootDatabase, file_id: hir::HirFileId, node: &impl ast::NameOwner, docs: Option<String>, description: Option<String>, ) -> NavigationTarget { //FIXME: use `_` instead of empty string let name = node.name().map(|it| it.text().clone()).unwrap_or_default(); let focus_range = node.name().map(|it| find_range_from_node(db, file_id, it.syntax()).1); let (file_id, full_range) = find_range_from_node(db, file_id, node.syntax()); NavigationTarget::from_syntax( file_id, name, focus_range, full_range, node.syntax(), docs, description, ) } fn from_syntax( file_id: FileId, name: SmolStr, focus_range: Option<TextRange>, full_range: TextRange, node: &SyntaxNode, docs: Option<String>, description: Option<String>, ) -> NavigationTarget { NavigationTarget { file_id, name, kind: node.kind(), full_range, focus_range, container_name: None, description, docs, } } } impl ToNav for FileSymbol { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { NavigationTarget { file_id: self.file_id, name: self.name.clone(), kind: self.ptr.kind(), full_range: self.ptr.range(), focus_range: self.name_range, container_name: self.container_name.clone(), description: description_from_symbol(db, self), docs: docs_from_symbol(db, self), } } } pub(crate) trait ToNavFromAst {} impl ToNavFromAst for hir::Function {} impl ToNavFromAst for hir::Const {} impl ToNavFromAst for hir::Static {} impl ToNavFromAst for hir::Struct {} impl ToNavFromAst for hir::Enum {} impl ToNavFromAst for hir::EnumVariant {} impl ToNavFromAst for hir::Union {} impl ToNavFromAst for hir::TypeAlias {} impl ToNavFromAst for hir::Trait {} impl<D> ToNav for D where D: HasSource + ToNavFromAst + Copy, D::Ast: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { let src = self.source(db); NavigationTarget::from_named( db, src.file_id, &src.ast, src.ast.doc_comment_text(), src.ast.short_label(), ) } } impl ToNav for hir::Module { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { let src = self.definition_source(db); let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default(); match src.ast { ModuleSource::SourceFile(node) => { let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax()); NavigationTarget::from_syntax( file_id, name, None, text_range, node.syntax(), None, None, ) } ModuleSource::Module(node) => { let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax()); NavigationTarget::from_syntax( file_id, name, None, text_range, node.syntax(), node.doc_comment_text(), node.short_label(), ) } } } } impl ToNav for hir::ImplBlock { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { let src = self.source(db); let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax()); NavigationTarget::from_syntax( file_id, "impl".into(), None, text_range, src.ast.syntax(), None, None, ) } } impl ToNav for hir::StructField { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { let src = self.source(db); match src.ast { FieldSource::Named(it) => NavigationTarget::from_named( db, src.file_id, &it, it.doc_comment_text(), it.short_label(), ), FieldSource::Pos(it) => { let (file_id, text_range) = find_range_from_node(db, src.file_id, it.syntax()); NavigationTarget::from_syntax( file_id, "".into(), None, text_range, it.syntax(), None, None, ) } } } } impl ToNav for hir::MacroDef { fn
(&self, db: &RootDatabase) -> NavigationTarget { let src = self.source(db); log::debug!("nav target {:#?}", src.ast.syntax()); NavigationTarget::from_named(db, src.file_id, &src.ast, src.ast.doc_comment_text(), None) } } impl ToNav for hir::Adt { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { match self { hir::Adt::Struct(it) => it.to_nav(db), hir::Adt::Union(it) => it.to_nav(db), hir::Adt::Enum(it) => it.to_nav(db), } } } impl ToNav for hir::AssocItem { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { match self { AssocItem::Function(it) => it.to_nav(db), AssocItem::Const(it) => it.to_nav(db), AssocItem::TypeAlias(it) => it.to_nav(db), } } } impl ToNav for hir::Local { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { let src = self.source(db); let (full_range, focus_range) = match src.ast { Either::A(it) => { (it.syntax().text_range(), it.name().map(|it| it.syntax().text_range())) } Either::B(it) => (it.syntax().text_range(), Some(it.self_kw_token().text_range())), }; let name = match self.name(db) { Some(it) => it.to_string().into(), None => "".into(), }; NavigationTarget { file_id: src.file_id.original_file(db), name, kind: BIND_PAT, full_range, focus_range, container_name: None, description: None, docs: None, } } } fn find_range_from_node( db: &RootDatabase, src: hir::HirFileId, node: &SyntaxNode, ) -> (FileId, TextRange) { let text_range = node.text_range(); let (file_id, text_range) = src .expansion_info(db) .and_then(|expansion_info| expansion_info.find_range(text_range)) .unwrap_or((src, text_range)); // FIXME: handle recursive macro generated macro (file_id.original_file(db), text_range) } pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> { let parse = db.parse(symbol.file_id); let node = symbol.ptr.to_node(parse.tree().syntax()); match_ast! { match node { ast::FnDef(it) => { it.doc_comment_text() }, ast::StructDef(it) => { it.doc_comment_text() }, ast::EnumDef(it) => { it.doc_comment_text() }, ast::TraitDef(it) => { it.doc_comment_text() }, ast::Module(it) => { it.doc_comment_text() }, ast::TypeAliasDef(it) => { it.doc_comment_text() }, ast::ConstDef(it) => { it.doc_comment_text() }, ast::StaticDef(it) => { it.doc_comment_text() }, ast::RecordFieldDef(it) => { it.doc_comment_text() }, ast::EnumVariant(it) => { it.doc_comment_text() }, ast::MacroCall(it) => { it.doc_comment_text() }, _ => None, } } } /// Get a description of a symbol. /// /// e.g. `struct Name`, `enum Name`, `fn Name` pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> { let parse = db.parse(symbol.file_id); let node = symbol.ptr.to_node(parse.tree().syntax()); match_ast! { match node { ast::FnDef(it) => { it.short_label() }, ast::StructDef(it) => { it.short_label() }, ast::EnumDef(it) => { it.short_label() }, ast::TraitDef(it) => { it.short_label() }, ast::Module(it) => { it.short_label() }, ast::TypeAliasDef(it) => { it.short_label() }, ast::ConstDef(it) => { it.short_label() }, ast::StaticDef(it) => { it.short_label() }, ast::RecordFieldDef(it) => { it.short_label() }, ast::EnumVariant(it) => { it.short_label() }, _ => None, } } }
to_nav
test_html_escaping.py
from flask import json from lms.lmsdb.models import Solution, User from tests import conftest USER_COMMENT_BEFORE_ESCAPING = '<html><body><p>Welcome "LMS"</p></body></html>' USER_COMMENT_AFTER_ESCAPING = ( '&lt;html&gt;&lt;body&gt;&lt;p&gt;Welcome &quot;LMS&quot;' '&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;' ) class
: @staticmethod def test_comment_text_escaping(student_user: User, solution: Solution): client = conftest.get_logged_user(student_user.username) comment_response = client.post('/comments', data=json.dumps({ 'fileId': solution.files[0].id, 'act': 'create', 'kind': 'text', 'comment': USER_COMMENT_BEFORE_ESCAPING, 'line': 1, }), content_type='application/json') assert comment_response.status_code == 200 assert solution.comments[0].comment.text == USER_COMMENT_AFTER_ESCAPING
TestHtmlEscaping
double.go
// Code generated by go doublegen; DO NOT EDIT. // This file was generated at 2020-02-14T22:11:18+11:00 // Package appsyncdouble provides a TestDouble implementation of appsynciface.AppSyncAPI package appsyncdouble import ( "context" "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/appsync" "github.com/aws/aws-sdk-go/service/appsync/appsynciface" "github.com/lwoggardner/awsdouble" "github.com/lwoggardner/godouble/godouble" ) // AppSyncDouble is TestDouble for appsynciface.AppSyncAPI type AppSyncDouble struct { appsynciface.AppSyncAPI *awsdouble.AWSTestDouble } // Constructor for AppSyncDouble // Default configuration will ensure // * API operations not explicitly stubbed will return an empty output struct pointer, and nil error. // To simulate long polling, "Poll" operations will return these values after a random delay of up to 100ms. // * WithContext methods implement a 'Fake' method that returns a cancellation error if the context is cancelled // before the method is called. // * Pages and PagesWithContext methods similarly implement a 'Fake' method that paginates over the underlying method. // // This allows tests to only stub the simple api methods and be generally unconcerned whether the SUT is using the // Context or Pagination forms of the API. // func NewDouble(t godouble.T, configurators ...func(*awsdouble.AWSTestDouble)) *AppSyncDouble
func (d *AppSyncDouble) defaultReturnValues(m godouble.Method) godouble.ReturnValues { return d.DefaultReturnValues(m) } func (d *AppSyncDouble) defaultMethodCall(m godouble.Method) godouble.MethodCall { switch m.Reflect().Name { case "CreateApiCacheWithContext": return m.Fake(d.fakeCreateApiCacheWithContext) case "CreateApiKeyWithContext": return m.Fake(d.fakeCreateApiKeyWithContext) case "CreateDataSourceWithContext": return m.Fake(d.fakeCreateDataSourceWithContext) case "CreateFunctionWithContext": return m.Fake(d.fakeCreateFunctionWithContext) case "CreateGraphqlApiWithContext": return m.Fake(d.fakeCreateGraphqlApiWithContext) case "CreateResolverWithContext": return m.Fake(d.fakeCreateResolverWithContext) case "CreateTypeWithContext": return m.Fake(d.fakeCreateTypeWithContext) case "DeleteApiCacheWithContext": return m.Fake(d.fakeDeleteApiCacheWithContext) case "DeleteApiKeyWithContext": return m.Fake(d.fakeDeleteApiKeyWithContext) case "DeleteDataSourceWithContext": return m.Fake(d.fakeDeleteDataSourceWithContext) case "DeleteFunctionWithContext": return m.Fake(d.fakeDeleteFunctionWithContext) case "DeleteGraphqlApiWithContext": return m.Fake(d.fakeDeleteGraphqlApiWithContext) case "DeleteResolverWithContext": return m.Fake(d.fakeDeleteResolverWithContext) case "DeleteTypeWithContext": return m.Fake(d.fakeDeleteTypeWithContext) case "FlushApiCacheWithContext": return m.Fake(d.fakeFlushApiCacheWithContext) case "GetApiCacheWithContext": return m.Fake(d.fakeGetApiCacheWithContext) case "GetDataSourceWithContext": return m.Fake(d.fakeGetDataSourceWithContext) case "GetFunctionWithContext": return m.Fake(d.fakeGetFunctionWithContext) case "GetGraphqlApiWithContext": return m.Fake(d.fakeGetGraphqlApiWithContext) case "GetIntrospectionSchemaWithContext": return m.Fake(d.fakeGetIntrospectionSchemaWithContext) case "GetResolverWithContext": return m.Fake(d.fakeGetResolverWithContext) case "GetSchemaCreationStatusWithContext": return m.Fake(d.fakeGetSchemaCreationStatusWithContext) case "GetTypeWithContext": return m.Fake(d.fakeGetTypeWithContext) case "ListApiKeysWithContext": return m.Fake(d.fakeListApiKeysWithContext) case "ListDataSourcesWithContext": return m.Fake(d.fakeListDataSourcesWithContext) case "ListFunctionsWithContext": return m.Fake(d.fakeListFunctionsWithContext) case "ListGraphqlApisWithContext": return m.Fake(d.fakeListGraphqlApisWithContext) case "ListResolversByFunctionWithContext": return m.Fake(d.fakeListResolversByFunctionWithContext) case "ListResolversWithContext": return m.Fake(d.fakeListResolversWithContext) case "ListTagsForResourceWithContext": return m.Fake(d.fakeListTagsForResourceWithContext) case "ListTypesWithContext": return m.Fake(d.fakeListTypesWithContext) case "StartSchemaCreationWithContext": return m.Fake(d.fakeStartSchemaCreationWithContext) case "TagResourceWithContext": return m.Fake(d.fakeTagResourceWithContext) case "UntagResourceWithContext": return m.Fake(d.fakeUntagResourceWithContext) case "UpdateApiCacheWithContext": return m.Fake(d.fakeUpdateApiCacheWithContext) case "UpdateApiKeyWithContext": return m.Fake(d.fakeUpdateApiKeyWithContext) case "UpdateDataSourceWithContext": return m.Fake(d.fakeUpdateDataSourceWithContext) case "UpdateFunctionWithContext": return m.Fake(d.fakeUpdateFunctionWithContext) case "UpdateGraphqlApiWithContext": return m.Fake(d.fakeUpdateGraphqlApiWithContext) case "UpdateResolverWithContext": return m.Fake(d.fakeUpdateResolverWithContext) case "UpdateTypeWithContext": return m.Fake(d.fakeUpdateTypeWithContext) default: return nil } } func (d *AppSyncDouble) CreateApiCache(i0 *appsync.CreateApiCacheInput) (r0 *appsync.CreateApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateApiCache", i0) r0, _ = returns[0].(*appsync.CreateApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) CreateApiCacheRequest(i0 *appsync.CreateApiCacheInput) (r0 *request.Request, r1 *appsync.CreateApiCacheOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateApiCacheRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.CreateApiCacheOutput) return } func (d *AppSyncDouble) CreateApiCacheWithContext(i0 context.Context, i1 *appsync.CreateApiCacheInput, i2 ...request.Option) (r0 *appsync.CreateApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateApiCacheWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.CreateApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeCreateApiCacheWithContext(ctx context.Context, in *appsync.CreateApiCacheInput, _ ...request.Option) (*appsync.CreateApiCacheOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "CreateApiCache cancelled", ctx.Err()) default: return d.CreateApiCache(in) } } func (d *AppSyncDouble) CreateApiKey(i0 *appsync.CreateApiKeyInput) (r0 *appsync.CreateApiKeyOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateApiKey", i0) r0, _ = returns[0].(*appsync.CreateApiKeyOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) CreateApiKeyRequest(i0 *appsync.CreateApiKeyInput) (r0 *request.Request, r1 *appsync.CreateApiKeyOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateApiKeyRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.CreateApiKeyOutput) return } func (d *AppSyncDouble) CreateApiKeyWithContext(i0 context.Context, i1 *appsync.CreateApiKeyInput, i2 ...request.Option) (r0 *appsync.CreateApiKeyOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateApiKeyWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.CreateApiKeyOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeCreateApiKeyWithContext(ctx context.Context, in *appsync.CreateApiKeyInput, _ ...request.Option) (*appsync.CreateApiKeyOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "CreateApiKey cancelled", ctx.Err()) default: return d.CreateApiKey(in) } } func (d *AppSyncDouble) CreateDataSource(i0 *appsync.CreateDataSourceInput) (r0 *appsync.CreateDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateDataSource", i0) r0, _ = returns[0].(*appsync.CreateDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) CreateDataSourceRequest(i0 *appsync.CreateDataSourceInput) (r0 *request.Request, r1 *appsync.CreateDataSourceOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateDataSourceRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.CreateDataSourceOutput) return } func (d *AppSyncDouble) CreateDataSourceWithContext(i0 context.Context, i1 *appsync.CreateDataSourceInput, i2 ...request.Option) (r0 *appsync.CreateDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateDataSourceWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.CreateDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeCreateDataSourceWithContext(ctx context.Context, in *appsync.CreateDataSourceInput, _ ...request.Option) (*appsync.CreateDataSourceOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "CreateDataSource cancelled", ctx.Err()) default: return d.CreateDataSource(in) } } func (d *AppSyncDouble) CreateFunction(i0 *appsync.CreateFunctionInput) (r0 *appsync.CreateFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateFunction", i0) r0, _ = returns[0].(*appsync.CreateFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) CreateFunctionRequest(i0 *appsync.CreateFunctionInput) (r0 *request.Request, r1 *appsync.CreateFunctionOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateFunctionRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.CreateFunctionOutput) return } func (d *AppSyncDouble) CreateFunctionWithContext(i0 context.Context, i1 *appsync.CreateFunctionInput, i2 ...request.Option) (r0 *appsync.CreateFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateFunctionWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.CreateFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeCreateFunctionWithContext(ctx context.Context, in *appsync.CreateFunctionInput, _ ...request.Option) (*appsync.CreateFunctionOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "CreateFunction cancelled", ctx.Err()) default: return d.CreateFunction(in) } } func (d *AppSyncDouble) CreateGraphqlApi(i0 *appsync.CreateGraphqlApiInput) (r0 *appsync.CreateGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateGraphqlApi", i0) r0, _ = returns[0].(*appsync.CreateGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) CreateGraphqlApiRequest(i0 *appsync.CreateGraphqlApiInput) (r0 *request.Request, r1 *appsync.CreateGraphqlApiOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateGraphqlApiRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.CreateGraphqlApiOutput) return } func (d *AppSyncDouble) CreateGraphqlApiWithContext(i0 context.Context, i1 *appsync.CreateGraphqlApiInput, i2 ...request.Option) (r0 *appsync.CreateGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateGraphqlApiWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.CreateGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeCreateGraphqlApiWithContext(ctx context.Context, in *appsync.CreateGraphqlApiInput, _ ...request.Option) (*appsync.CreateGraphqlApiOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "CreateGraphqlApi cancelled", ctx.Err()) default: return d.CreateGraphqlApi(in) } } func (d *AppSyncDouble) CreateResolver(i0 *appsync.CreateResolverInput) (r0 *appsync.CreateResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateResolver", i0) r0, _ = returns[0].(*appsync.CreateResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) CreateResolverRequest(i0 *appsync.CreateResolverInput) (r0 *request.Request, r1 *appsync.CreateResolverOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateResolverRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.CreateResolverOutput) return } func (d *AppSyncDouble) CreateResolverWithContext(i0 context.Context, i1 *appsync.CreateResolverInput, i2 ...request.Option) (r0 *appsync.CreateResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateResolverWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.CreateResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeCreateResolverWithContext(ctx context.Context, in *appsync.CreateResolverInput, _ ...request.Option) (*appsync.CreateResolverOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "CreateResolver cancelled", ctx.Err()) default: return d.CreateResolver(in) } } func (d *AppSyncDouble) CreateType(i0 *appsync.CreateTypeInput) (r0 *appsync.CreateTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateType", i0) r0, _ = returns[0].(*appsync.CreateTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) CreateTypeRequest(i0 *appsync.CreateTypeInput) (r0 *request.Request, r1 *appsync.CreateTypeOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateTypeRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.CreateTypeOutput) return } func (d *AppSyncDouble) CreateTypeWithContext(i0 context.Context, i1 *appsync.CreateTypeInput, i2 ...request.Option) (r0 *appsync.CreateTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("CreateTypeWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.CreateTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeCreateTypeWithContext(ctx context.Context, in *appsync.CreateTypeInput, _ ...request.Option) (*appsync.CreateTypeOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "CreateType cancelled", ctx.Err()) default: return d.CreateType(in) } } func (d *AppSyncDouble) DeleteApiCache(i0 *appsync.DeleteApiCacheInput) (r0 *appsync.DeleteApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteApiCache", i0) r0, _ = returns[0].(*appsync.DeleteApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) DeleteApiCacheRequest(i0 *appsync.DeleteApiCacheInput) (r0 *request.Request, r1 *appsync.DeleteApiCacheOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteApiCacheRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.DeleteApiCacheOutput) return } func (d *AppSyncDouble) DeleteApiCacheWithContext(i0 context.Context, i1 *appsync.DeleteApiCacheInput, i2 ...request.Option) (r0 *appsync.DeleteApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteApiCacheWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.DeleteApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeDeleteApiCacheWithContext(ctx context.Context, in *appsync.DeleteApiCacheInput, _ ...request.Option) (*appsync.DeleteApiCacheOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "DeleteApiCache cancelled", ctx.Err()) default: return d.DeleteApiCache(in) } } func (d *AppSyncDouble) DeleteApiKey(i0 *appsync.DeleteApiKeyInput) (r0 *appsync.DeleteApiKeyOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteApiKey", i0) r0, _ = returns[0].(*appsync.DeleteApiKeyOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) DeleteApiKeyRequest(i0 *appsync.DeleteApiKeyInput) (r0 *request.Request, r1 *appsync.DeleteApiKeyOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteApiKeyRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.DeleteApiKeyOutput) return } func (d *AppSyncDouble) DeleteApiKeyWithContext(i0 context.Context, i1 *appsync.DeleteApiKeyInput, i2 ...request.Option) (r0 *appsync.DeleteApiKeyOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteApiKeyWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.DeleteApiKeyOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeDeleteApiKeyWithContext(ctx context.Context, in *appsync.DeleteApiKeyInput, _ ...request.Option) (*appsync.DeleteApiKeyOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "DeleteApiKey cancelled", ctx.Err()) default: return d.DeleteApiKey(in) } } func (d *AppSyncDouble) DeleteDataSource(i0 *appsync.DeleteDataSourceInput) (r0 *appsync.DeleteDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteDataSource", i0) r0, _ = returns[0].(*appsync.DeleteDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) DeleteDataSourceRequest(i0 *appsync.DeleteDataSourceInput) (r0 *request.Request, r1 *appsync.DeleteDataSourceOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteDataSourceRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.DeleteDataSourceOutput) return } func (d *AppSyncDouble) DeleteDataSourceWithContext(i0 context.Context, i1 *appsync.DeleteDataSourceInput, i2 ...request.Option) (r0 *appsync.DeleteDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteDataSourceWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.DeleteDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeDeleteDataSourceWithContext(ctx context.Context, in *appsync.DeleteDataSourceInput, _ ...request.Option) (*appsync.DeleteDataSourceOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "DeleteDataSource cancelled", ctx.Err()) default: return d.DeleteDataSource(in) } } func (d *AppSyncDouble) DeleteFunction(i0 *appsync.DeleteFunctionInput) (r0 *appsync.DeleteFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteFunction", i0) r0, _ = returns[0].(*appsync.DeleteFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) DeleteFunctionRequest(i0 *appsync.DeleteFunctionInput) (r0 *request.Request, r1 *appsync.DeleteFunctionOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteFunctionRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.DeleteFunctionOutput) return } func (d *AppSyncDouble) DeleteFunctionWithContext(i0 context.Context, i1 *appsync.DeleteFunctionInput, i2 ...request.Option) (r0 *appsync.DeleteFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteFunctionWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.DeleteFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeDeleteFunctionWithContext(ctx context.Context, in *appsync.DeleteFunctionInput, _ ...request.Option) (*appsync.DeleteFunctionOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "DeleteFunction cancelled", ctx.Err()) default: return d.DeleteFunction(in) } } func (d *AppSyncDouble) DeleteGraphqlApi(i0 *appsync.DeleteGraphqlApiInput) (r0 *appsync.DeleteGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteGraphqlApi", i0) r0, _ = returns[0].(*appsync.DeleteGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) DeleteGraphqlApiRequest(i0 *appsync.DeleteGraphqlApiInput) (r0 *request.Request, r1 *appsync.DeleteGraphqlApiOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteGraphqlApiRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.DeleteGraphqlApiOutput) return } func (d *AppSyncDouble) DeleteGraphqlApiWithContext(i0 context.Context, i1 *appsync.DeleteGraphqlApiInput, i2 ...request.Option) (r0 *appsync.DeleteGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteGraphqlApiWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.DeleteGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeDeleteGraphqlApiWithContext(ctx context.Context, in *appsync.DeleteGraphqlApiInput, _ ...request.Option) (*appsync.DeleteGraphqlApiOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "DeleteGraphqlApi cancelled", ctx.Err()) default: return d.DeleteGraphqlApi(in) } } func (d *AppSyncDouble) DeleteResolver(i0 *appsync.DeleteResolverInput) (r0 *appsync.DeleteResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteResolver", i0) r0, _ = returns[0].(*appsync.DeleteResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) DeleteResolverRequest(i0 *appsync.DeleteResolverInput) (r0 *request.Request, r1 *appsync.DeleteResolverOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteResolverRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.DeleteResolverOutput) return } func (d *AppSyncDouble) DeleteResolverWithContext(i0 context.Context, i1 *appsync.DeleteResolverInput, i2 ...request.Option) (r0 *appsync.DeleteResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteResolverWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.DeleteResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeDeleteResolverWithContext(ctx context.Context, in *appsync.DeleteResolverInput, _ ...request.Option) (*appsync.DeleteResolverOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "DeleteResolver cancelled", ctx.Err()) default: return d.DeleteResolver(in) } } func (d *AppSyncDouble) DeleteType(i0 *appsync.DeleteTypeInput) (r0 *appsync.DeleteTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteType", i0) r0, _ = returns[0].(*appsync.DeleteTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) DeleteTypeRequest(i0 *appsync.DeleteTypeInput) (r0 *request.Request, r1 *appsync.DeleteTypeOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteTypeRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.DeleteTypeOutput) return } func (d *AppSyncDouble) DeleteTypeWithContext(i0 context.Context, i1 *appsync.DeleteTypeInput, i2 ...request.Option) (r0 *appsync.DeleteTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("DeleteTypeWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.DeleteTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeDeleteTypeWithContext(ctx context.Context, in *appsync.DeleteTypeInput, _ ...request.Option) (*appsync.DeleteTypeOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "DeleteType cancelled", ctx.Err()) default: return d.DeleteType(in) } } func (d *AppSyncDouble) FlushApiCache(i0 *appsync.FlushApiCacheInput) (r0 *appsync.FlushApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("FlushApiCache", i0) r0, _ = returns[0].(*appsync.FlushApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) FlushApiCacheRequest(i0 *appsync.FlushApiCacheInput) (r0 *request.Request, r1 *appsync.FlushApiCacheOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("FlushApiCacheRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.FlushApiCacheOutput) return } func (d *AppSyncDouble) FlushApiCacheWithContext(i0 context.Context, i1 *appsync.FlushApiCacheInput, i2 ...request.Option) (r0 *appsync.FlushApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("FlushApiCacheWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.FlushApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeFlushApiCacheWithContext(ctx context.Context, in *appsync.FlushApiCacheInput, _ ...request.Option) (*appsync.FlushApiCacheOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "FlushApiCache cancelled", ctx.Err()) default: return d.FlushApiCache(in) } } func (d *AppSyncDouble) GetApiCache(i0 *appsync.GetApiCacheInput) (r0 *appsync.GetApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetApiCache", i0) r0, _ = returns[0].(*appsync.GetApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetApiCacheRequest(i0 *appsync.GetApiCacheInput) (r0 *request.Request, r1 *appsync.GetApiCacheOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetApiCacheRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetApiCacheOutput) return } func (d *AppSyncDouble) GetApiCacheWithContext(i0 context.Context, i1 *appsync.GetApiCacheInput, i2 ...request.Option) (r0 *appsync.GetApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetApiCacheWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetApiCacheWithContext(ctx context.Context, in *appsync.GetApiCacheInput, _ ...request.Option) (*appsync.GetApiCacheOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetApiCache cancelled", ctx.Err()) default: return d.GetApiCache(in) } } func (d *AppSyncDouble) GetDataSource(i0 *appsync.GetDataSourceInput) (r0 *appsync.GetDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetDataSource", i0) r0, _ = returns[0].(*appsync.GetDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetDataSourceRequest(i0 *appsync.GetDataSourceInput) (r0 *request.Request, r1 *appsync.GetDataSourceOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetDataSourceRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetDataSourceOutput) return } func (d *AppSyncDouble) GetDataSourceWithContext(i0 context.Context, i1 *appsync.GetDataSourceInput, i2 ...request.Option) (r0 *appsync.GetDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetDataSourceWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetDataSourceWithContext(ctx context.Context, in *appsync.GetDataSourceInput, _ ...request.Option) (*appsync.GetDataSourceOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetDataSource cancelled", ctx.Err()) default: return d.GetDataSource(in) } } func (d *AppSyncDouble) GetFunction(i0 *appsync.GetFunctionInput) (r0 *appsync.GetFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetFunction", i0) r0, _ = returns[0].(*appsync.GetFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetFunctionRequest(i0 *appsync.GetFunctionInput) (r0 *request.Request, r1 *appsync.GetFunctionOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetFunctionRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetFunctionOutput) return } func (d *AppSyncDouble) GetFunctionWithContext(i0 context.Context, i1 *appsync.GetFunctionInput, i2 ...request.Option) (r0 *appsync.GetFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetFunctionWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetFunctionWithContext(ctx context.Context, in *appsync.GetFunctionInput, _ ...request.Option) (*appsync.GetFunctionOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetFunction cancelled", ctx.Err()) default: return d.GetFunction(in) } } func (d *AppSyncDouble) GetGraphqlApi(i0 *appsync.GetGraphqlApiInput) (r0 *appsync.GetGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetGraphqlApi", i0) r0, _ = returns[0].(*appsync.GetGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetGraphqlApiRequest(i0 *appsync.GetGraphqlApiInput) (r0 *request.Request, r1 *appsync.GetGraphqlApiOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetGraphqlApiRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetGraphqlApiOutput) return } func (d *AppSyncDouble) GetGraphqlApiWithContext(i0 context.Context, i1 *appsync.GetGraphqlApiInput, i2 ...request.Option) (r0 *appsync.GetGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetGraphqlApiWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetGraphqlApiWithContext(ctx context.Context, in *appsync.GetGraphqlApiInput, _ ...request.Option) (*appsync.GetGraphqlApiOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetGraphqlApi cancelled", ctx.Err()) default: return d.GetGraphqlApi(in) } } func (d *AppSyncDouble) GetIntrospectionSchema(i0 *appsync.GetIntrospectionSchemaInput) (r0 *appsync.GetIntrospectionSchemaOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetIntrospectionSchema", i0) r0, _ = returns[0].(*appsync.GetIntrospectionSchemaOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetIntrospectionSchemaRequest(i0 *appsync.GetIntrospectionSchemaInput) (r0 *request.Request, r1 *appsync.GetIntrospectionSchemaOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetIntrospectionSchemaRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetIntrospectionSchemaOutput) return } func (d *AppSyncDouble) GetIntrospectionSchemaWithContext(i0 context.Context, i1 *appsync.GetIntrospectionSchemaInput, i2 ...request.Option) (r0 *appsync.GetIntrospectionSchemaOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetIntrospectionSchemaWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetIntrospectionSchemaOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetIntrospectionSchemaWithContext(ctx context.Context, in *appsync.GetIntrospectionSchemaInput, _ ...request.Option) (*appsync.GetIntrospectionSchemaOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetIntrospectionSchema cancelled", ctx.Err()) default: return d.GetIntrospectionSchema(in) } } func (d *AppSyncDouble) GetResolver(i0 *appsync.GetResolverInput) (r0 *appsync.GetResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetResolver", i0) r0, _ = returns[0].(*appsync.GetResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetResolverRequest(i0 *appsync.GetResolverInput) (r0 *request.Request, r1 *appsync.GetResolverOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetResolverRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetResolverOutput) return } func (d *AppSyncDouble) GetResolverWithContext(i0 context.Context, i1 *appsync.GetResolverInput, i2 ...request.Option) (r0 *appsync.GetResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetResolverWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetResolverWithContext(ctx context.Context, in *appsync.GetResolverInput, _ ...request.Option) (*appsync.GetResolverOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetResolver cancelled", ctx.Err()) default: return d.GetResolver(in) } } func (d *AppSyncDouble) GetSchemaCreationStatus(i0 *appsync.GetSchemaCreationStatusInput) (r0 *appsync.GetSchemaCreationStatusOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetSchemaCreationStatus", i0) r0, _ = returns[0].(*appsync.GetSchemaCreationStatusOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetSchemaCreationStatusRequest(i0 *appsync.GetSchemaCreationStatusInput) (r0 *request.Request, r1 *appsync.GetSchemaCreationStatusOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetSchemaCreationStatusRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetSchemaCreationStatusOutput) return } func (d *AppSyncDouble) GetSchemaCreationStatusWithContext(i0 context.Context, i1 *appsync.GetSchemaCreationStatusInput, i2 ...request.Option) (r0 *appsync.GetSchemaCreationStatusOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetSchemaCreationStatusWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetSchemaCreationStatusOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetSchemaCreationStatusWithContext(ctx context.Context, in *appsync.GetSchemaCreationStatusInput, _ ...request.Option) (*appsync.GetSchemaCreationStatusOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetSchemaCreationStatus cancelled", ctx.Err()) default: return d.GetSchemaCreationStatus(in) } } func (d *AppSyncDouble) GetType(i0 *appsync.GetTypeInput) (r0 *appsync.GetTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetType", i0) r0, _ = returns[0].(*appsync.GetTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) GetTypeRequest(i0 *appsync.GetTypeInput) (r0 *request.Request, r1 *appsync.GetTypeOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetTypeRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.GetTypeOutput) return } func (d *AppSyncDouble) GetTypeWithContext(i0 context.Context, i1 *appsync.GetTypeInput, i2 ...request.Option) (r0 *appsync.GetTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("GetTypeWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.GetTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeGetTypeWithContext(ctx context.Context, in *appsync.GetTypeInput, _ ...request.Option) (*appsync.GetTypeOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "GetType cancelled", ctx.Err()) default: return d.GetType(in) } } func (d *AppSyncDouble) ListApiKeys(i0 *appsync.ListApiKeysInput) (r0 *appsync.ListApiKeysOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListApiKeys", i0) r0, _ = returns[0].(*appsync.ListApiKeysOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListApiKeysRequest(i0 *appsync.ListApiKeysInput) (r0 *request.Request, r1 *appsync.ListApiKeysOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListApiKeysRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListApiKeysOutput) return } func (d *AppSyncDouble) ListApiKeysWithContext(i0 context.Context, i1 *appsync.ListApiKeysInput, i2 ...request.Option) (r0 *appsync.ListApiKeysOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListApiKeysWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListApiKeysOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListApiKeysWithContext(ctx context.Context, in *appsync.ListApiKeysInput, _ ...request.Option) (*appsync.ListApiKeysOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListApiKeys cancelled", ctx.Err()) default: return d.ListApiKeys(in) } } func (d *AppSyncDouble) ListDataSources(i0 *appsync.ListDataSourcesInput) (r0 *appsync.ListDataSourcesOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListDataSources", i0) r0, _ = returns[0].(*appsync.ListDataSourcesOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListDataSourcesRequest(i0 *appsync.ListDataSourcesInput) (r0 *request.Request, r1 *appsync.ListDataSourcesOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListDataSourcesRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListDataSourcesOutput) return } func (d *AppSyncDouble) ListDataSourcesWithContext(i0 context.Context, i1 *appsync.ListDataSourcesInput, i2 ...request.Option) (r0 *appsync.ListDataSourcesOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListDataSourcesWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListDataSourcesOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListDataSourcesWithContext(ctx context.Context, in *appsync.ListDataSourcesInput, _ ...request.Option) (*appsync.ListDataSourcesOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListDataSources cancelled", ctx.Err()) default: return d.ListDataSources(in) } } func (d *AppSyncDouble) ListFunctions(i0 *appsync.ListFunctionsInput) (r0 *appsync.ListFunctionsOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListFunctions", i0) r0, _ = returns[0].(*appsync.ListFunctionsOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListFunctionsRequest(i0 *appsync.ListFunctionsInput) (r0 *request.Request, r1 *appsync.ListFunctionsOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListFunctionsRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListFunctionsOutput) return } func (d *AppSyncDouble) ListFunctionsWithContext(i0 context.Context, i1 *appsync.ListFunctionsInput, i2 ...request.Option) (r0 *appsync.ListFunctionsOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListFunctionsWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListFunctionsOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListFunctionsWithContext(ctx context.Context, in *appsync.ListFunctionsInput, _ ...request.Option) (*appsync.ListFunctionsOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListFunctions cancelled", ctx.Err()) default: return d.ListFunctions(in) } } func (d *AppSyncDouble) ListGraphqlApis(i0 *appsync.ListGraphqlApisInput) (r0 *appsync.ListGraphqlApisOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListGraphqlApis", i0) r0, _ = returns[0].(*appsync.ListGraphqlApisOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListGraphqlApisRequest(i0 *appsync.ListGraphqlApisInput) (r0 *request.Request, r1 *appsync.ListGraphqlApisOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListGraphqlApisRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListGraphqlApisOutput) return } func (d *AppSyncDouble) ListGraphqlApisWithContext(i0 context.Context, i1 *appsync.ListGraphqlApisInput, i2 ...request.Option) (r0 *appsync.ListGraphqlApisOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListGraphqlApisWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListGraphqlApisOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListGraphqlApisWithContext(ctx context.Context, in *appsync.ListGraphqlApisInput, _ ...request.Option) (*appsync.ListGraphqlApisOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListGraphqlApis cancelled", ctx.Err()) default: return d.ListGraphqlApis(in) } } func (d *AppSyncDouble) ListResolvers(i0 *appsync.ListResolversInput) (r0 *appsync.ListResolversOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListResolvers", i0) r0, _ = returns[0].(*appsync.ListResolversOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListResolversByFunction(i0 *appsync.ListResolversByFunctionInput) (r0 *appsync.ListResolversByFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListResolversByFunction", i0) r0, _ = returns[0].(*appsync.ListResolversByFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListResolversByFunctionRequest(i0 *appsync.ListResolversByFunctionInput) (r0 *request.Request, r1 *appsync.ListResolversByFunctionOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListResolversByFunctionRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListResolversByFunctionOutput) return } func (d *AppSyncDouble) ListResolversByFunctionWithContext(i0 context.Context, i1 *appsync.ListResolversByFunctionInput, i2 ...request.Option) (r0 *appsync.ListResolversByFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListResolversByFunctionWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListResolversByFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListResolversByFunctionWithContext(ctx context.Context, in *appsync.ListResolversByFunctionInput, _ ...request.Option) (*appsync.ListResolversByFunctionOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListResolversByFunction cancelled", ctx.Err()) default: return d.ListResolversByFunction(in) } } func (d *AppSyncDouble) ListResolversRequest(i0 *appsync.ListResolversInput) (r0 *request.Request, r1 *appsync.ListResolversOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListResolversRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListResolversOutput) return } func (d *AppSyncDouble) ListResolversWithContext(i0 context.Context, i1 *appsync.ListResolversInput, i2 ...request.Option) (r0 *appsync.ListResolversOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListResolversWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListResolversOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListResolversWithContext(ctx context.Context, in *appsync.ListResolversInput, _ ...request.Option) (*appsync.ListResolversOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListResolvers cancelled", ctx.Err()) default: return d.ListResolvers(in) } } func (d *AppSyncDouble) ListTagsForResource(i0 *appsync.ListTagsForResourceInput) (r0 *appsync.ListTagsForResourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListTagsForResource", i0) r0, _ = returns[0].(*appsync.ListTagsForResourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListTagsForResourceRequest(i0 *appsync.ListTagsForResourceInput) (r0 *request.Request, r1 *appsync.ListTagsForResourceOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListTagsForResourceRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListTagsForResourceOutput) return } func (d *AppSyncDouble) ListTagsForResourceWithContext(i0 context.Context, i1 *appsync.ListTagsForResourceInput, i2 ...request.Option) (r0 *appsync.ListTagsForResourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListTagsForResourceWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListTagsForResourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListTagsForResourceWithContext(ctx context.Context, in *appsync.ListTagsForResourceInput, _ ...request.Option) (*appsync.ListTagsForResourceOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListTagsForResource cancelled", ctx.Err()) default: return d.ListTagsForResource(in) } } func (d *AppSyncDouble) ListTypes(i0 *appsync.ListTypesInput) (r0 *appsync.ListTypesOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListTypes", i0) r0, _ = returns[0].(*appsync.ListTypesOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) ListTypesRequest(i0 *appsync.ListTypesInput) (r0 *request.Request, r1 *appsync.ListTypesOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListTypesRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.ListTypesOutput) return } func (d *AppSyncDouble) ListTypesWithContext(i0 context.Context, i1 *appsync.ListTypesInput, i2 ...request.Option) (r0 *appsync.ListTypesOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("ListTypesWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.ListTypesOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeListTypesWithContext(ctx context.Context, in *appsync.ListTypesInput, _ ...request.Option) (*appsync.ListTypesOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "ListTypes cancelled", ctx.Err()) default: return d.ListTypes(in) } } func (d *AppSyncDouble) StartSchemaCreation(i0 *appsync.StartSchemaCreationInput) (r0 *appsync.StartSchemaCreationOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("StartSchemaCreation", i0) r0, _ = returns[0].(*appsync.StartSchemaCreationOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) StartSchemaCreationRequest(i0 *appsync.StartSchemaCreationInput) (r0 *request.Request, r1 *appsync.StartSchemaCreationOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("StartSchemaCreationRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.StartSchemaCreationOutput) return } func (d *AppSyncDouble) StartSchemaCreationWithContext(i0 context.Context, i1 *appsync.StartSchemaCreationInput, i2 ...request.Option) (r0 *appsync.StartSchemaCreationOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("StartSchemaCreationWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.StartSchemaCreationOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeStartSchemaCreationWithContext(ctx context.Context, in *appsync.StartSchemaCreationInput, _ ...request.Option) (*appsync.StartSchemaCreationOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "StartSchemaCreation cancelled", ctx.Err()) default: return d.StartSchemaCreation(in) } } func (d *AppSyncDouble) TagResource(i0 *appsync.TagResourceInput) (r0 *appsync.TagResourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("TagResource", i0) r0, _ = returns[0].(*appsync.TagResourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) TagResourceRequest(i0 *appsync.TagResourceInput) (r0 *request.Request, r1 *appsync.TagResourceOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("TagResourceRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.TagResourceOutput) return } func (d *AppSyncDouble) TagResourceWithContext(i0 context.Context, i1 *appsync.TagResourceInput, i2 ...request.Option) (r0 *appsync.TagResourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("TagResourceWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.TagResourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeTagResourceWithContext(ctx context.Context, in *appsync.TagResourceInput, _ ...request.Option) (*appsync.TagResourceOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "TagResource cancelled", ctx.Err()) default: return d.TagResource(in) } } func (d *AppSyncDouble) UntagResource(i0 *appsync.UntagResourceInput) (r0 *appsync.UntagResourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UntagResource", i0) r0, _ = returns[0].(*appsync.UntagResourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UntagResourceRequest(i0 *appsync.UntagResourceInput) (r0 *request.Request, r1 *appsync.UntagResourceOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UntagResourceRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UntagResourceOutput) return } func (d *AppSyncDouble) UntagResourceWithContext(i0 context.Context, i1 *appsync.UntagResourceInput, i2 ...request.Option) (r0 *appsync.UntagResourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UntagResourceWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UntagResourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUntagResourceWithContext(ctx context.Context, in *appsync.UntagResourceInput, _ ...request.Option) (*appsync.UntagResourceOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UntagResource cancelled", ctx.Err()) default: return d.UntagResource(in) } } func (d *AppSyncDouble) UpdateApiCache(i0 *appsync.UpdateApiCacheInput) (r0 *appsync.UpdateApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateApiCache", i0) r0, _ = returns[0].(*appsync.UpdateApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UpdateApiCacheRequest(i0 *appsync.UpdateApiCacheInput) (r0 *request.Request, r1 *appsync.UpdateApiCacheOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateApiCacheRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UpdateApiCacheOutput) return } func (d *AppSyncDouble) UpdateApiCacheWithContext(i0 context.Context, i1 *appsync.UpdateApiCacheInput, i2 ...request.Option) (r0 *appsync.UpdateApiCacheOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateApiCacheWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UpdateApiCacheOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUpdateApiCacheWithContext(ctx context.Context, in *appsync.UpdateApiCacheInput, _ ...request.Option) (*appsync.UpdateApiCacheOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UpdateApiCache cancelled", ctx.Err()) default: return d.UpdateApiCache(in) } } func (d *AppSyncDouble) UpdateApiKey(i0 *appsync.UpdateApiKeyInput) (r0 *appsync.UpdateApiKeyOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateApiKey", i0) r0, _ = returns[0].(*appsync.UpdateApiKeyOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UpdateApiKeyRequest(i0 *appsync.UpdateApiKeyInput) (r0 *request.Request, r1 *appsync.UpdateApiKeyOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateApiKeyRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UpdateApiKeyOutput) return } func (d *AppSyncDouble) UpdateApiKeyWithContext(i0 context.Context, i1 *appsync.UpdateApiKeyInput, i2 ...request.Option) (r0 *appsync.UpdateApiKeyOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateApiKeyWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UpdateApiKeyOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUpdateApiKeyWithContext(ctx context.Context, in *appsync.UpdateApiKeyInput, _ ...request.Option) (*appsync.UpdateApiKeyOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UpdateApiKey cancelled", ctx.Err()) default: return d.UpdateApiKey(in) } } func (d *AppSyncDouble) UpdateDataSource(i0 *appsync.UpdateDataSourceInput) (r0 *appsync.UpdateDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateDataSource", i0) r0, _ = returns[0].(*appsync.UpdateDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UpdateDataSourceRequest(i0 *appsync.UpdateDataSourceInput) (r0 *request.Request, r1 *appsync.UpdateDataSourceOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateDataSourceRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UpdateDataSourceOutput) return } func (d *AppSyncDouble) UpdateDataSourceWithContext(i0 context.Context, i1 *appsync.UpdateDataSourceInput, i2 ...request.Option) (r0 *appsync.UpdateDataSourceOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateDataSourceWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UpdateDataSourceOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUpdateDataSourceWithContext(ctx context.Context, in *appsync.UpdateDataSourceInput, _ ...request.Option) (*appsync.UpdateDataSourceOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UpdateDataSource cancelled", ctx.Err()) default: return d.UpdateDataSource(in) } } func (d *AppSyncDouble) UpdateFunction(i0 *appsync.UpdateFunctionInput) (r0 *appsync.UpdateFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateFunction", i0) r0, _ = returns[0].(*appsync.UpdateFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UpdateFunctionRequest(i0 *appsync.UpdateFunctionInput) (r0 *request.Request, r1 *appsync.UpdateFunctionOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateFunctionRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UpdateFunctionOutput) return } func (d *AppSyncDouble) UpdateFunctionWithContext(i0 context.Context, i1 *appsync.UpdateFunctionInput, i2 ...request.Option) (r0 *appsync.UpdateFunctionOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateFunctionWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UpdateFunctionOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUpdateFunctionWithContext(ctx context.Context, in *appsync.UpdateFunctionInput, _ ...request.Option) (*appsync.UpdateFunctionOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UpdateFunction cancelled", ctx.Err()) default: return d.UpdateFunction(in) } } func (d *AppSyncDouble) UpdateGraphqlApi(i0 *appsync.UpdateGraphqlApiInput) (r0 *appsync.UpdateGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateGraphqlApi", i0) r0, _ = returns[0].(*appsync.UpdateGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UpdateGraphqlApiRequest(i0 *appsync.UpdateGraphqlApiInput) (r0 *request.Request, r1 *appsync.UpdateGraphqlApiOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateGraphqlApiRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UpdateGraphqlApiOutput) return } func (d *AppSyncDouble) UpdateGraphqlApiWithContext(i0 context.Context, i1 *appsync.UpdateGraphqlApiInput, i2 ...request.Option) (r0 *appsync.UpdateGraphqlApiOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateGraphqlApiWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UpdateGraphqlApiOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUpdateGraphqlApiWithContext(ctx context.Context, in *appsync.UpdateGraphqlApiInput, _ ...request.Option) (*appsync.UpdateGraphqlApiOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UpdateGraphqlApi cancelled", ctx.Err()) default: return d.UpdateGraphqlApi(in) } } func (d *AppSyncDouble) UpdateResolver(i0 *appsync.UpdateResolverInput) (r0 *appsync.UpdateResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateResolver", i0) r0, _ = returns[0].(*appsync.UpdateResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UpdateResolverRequest(i0 *appsync.UpdateResolverInput) (r0 *request.Request, r1 *appsync.UpdateResolverOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateResolverRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UpdateResolverOutput) return } func (d *AppSyncDouble) UpdateResolverWithContext(i0 context.Context, i1 *appsync.UpdateResolverInput, i2 ...request.Option) (r0 *appsync.UpdateResolverOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateResolverWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UpdateResolverOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUpdateResolverWithContext(ctx context.Context, in *appsync.UpdateResolverInput, _ ...request.Option) (*appsync.UpdateResolverOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UpdateResolver cancelled", ctx.Err()) default: return d.UpdateResolver(in) } } func (d *AppSyncDouble) UpdateType(i0 *appsync.UpdateTypeInput) (r0 *appsync.UpdateTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateType", i0) r0, _ = returns[0].(*appsync.UpdateTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) UpdateTypeRequest(i0 *appsync.UpdateTypeInput) (r0 *request.Request, r1 *appsync.UpdateTypeOutput) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateTypeRequest", i0) r0, _ = returns[0].(*request.Request) r1, _ = returns[1].(*appsync.UpdateTypeOutput) return } func (d *AppSyncDouble) UpdateTypeWithContext(i0 context.Context, i1 *appsync.UpdateTypeInput, i2 ...request.Option) (r0 *appsync.UpdateTypeOutput, r1 error) { d.TestDouble.T().Helper() returns := d.TestDouble.Invoke("UpdateTypeWithContext", i0, i1, i2) r0, _ = returns[0].(*appsync.UpdateTypeOutput) r1, _ = returns[1].(error) return } func (d *AppSyncDouble) fakeUpdateTypeWithContext(ctx context.Context, in *appsync.UpdateTypeInput, _ ...request.Option) (*appsync.UpdateTypeOutput, error) { select { case <-ctx.Done(): return nil, awserr.New(request.CanceledErrorCode, "UpdateType cancelled", ctx.Err()) default: return d.UpdateType(in) } }
{ result := &AppSyncDouble{} configurators = append([]func(configurator *awsdouble.AWSTestDouble){func(d *awsdouble.AWSTestDouble) { d.SetDefaultCall(result.defaultMethodCall) d.SetDefaultReturnValues(result.defaultReturnValues) }}, configurators...) result.AWSTestDouble = awsdouble.NewDouble(t, (*appsynciface.AppSyncAPI)(nil), configurators...) return result }
resolve_dependencies.go
package pkg import ( birelpkg "github.com/cloudfoundry/bosh-init/release/pkg" ) func ResolveDependencies(pkg *birelpkg.Package) []*birelpkg.Package { return resolveInner(pkg, []*birelpkg.Package{}) } func resolveInner(pkg *birelpkg.Package, noFollow []*birelpkg.Package) []*birelpkg.Package { all := []*birelpkg.Package{} for _, depPkg := range pkg.Dependencies { if !contains(all, depPkg) && !contains(noFollow, depPkg) { all = append(all, depPkg) tDeps := resolveInner(depPkg, joinUnique(all, noFollow)) for _, tDepPkg := range tDeps { all = append(all, tDepPkg) } } } for i, el := range all { if el == pkg { all = append(all[:i], all[i+1:]...) } } return all } func contains(list []*birelpkg.Package, element *birelpkg.Package) bool { for _, pkg := range list { if element == pkg
} return false } func joinUnique(a []*birelpkg.Package, b []*birelpkg.Package) []*birelpkg.Package { joined := []*birelpkg.Package{} joined = append(joined, a...) for _, pkg := range b { if !contains(a, pkg) { joined = append(joined, pkg) } } return joined }
{ return true }
main.rs
use futures::executor::block_on; use perseus::internal::i18n::TranslationsManager; use perseus::internal::serve::{ServerOptions, ServerProps}; use perseus::plugins::PluginAction; use perseus::stores::MutableStore; use perseus::SsrNode; use perseus_engine::app::{ get_app_root, get_error_pages_contained, get_immutable_store, get_locales, get_mutable_store, get_plugins, get_static_aliases, get_templates_map_atomic_contained, get_translations_manager, }; use std::env; use std::fs; // This server executable can be run in two modes: // dev: inside `.perseus/server/src/main.rs`, works with that file structure // prod: as a standalone executable with a `dist/` directory as a sibling // Integration: Actix Web #[cfg(feature = "integration-actix-web")] #[actix_web::main] async fn main() -> std::io::Result<()> { println!("WARNING: The Actix Web integration uses a beta version of Actix Web, and is considered unstable. It is not recommended for production usage."); use actix_web::{App, HttpServer}; use perseus_actix_web::configurer; let is_standalone = get_standalone_and_act(); let (host, port) = get_host_and_port(); HttpServer::new(move || App::new().configure(block_on(configurer(get_props(is_standalone))))) .bind((host, port))? .run() .await } // Integration: Warp #[cfg(feature = "integration-warp")] #[tokio::main] async fn main() { use perseus_warp::perseus_routes; use std::net::SocketAddr; let is_standalone = get_standalone_and_act(); let props = get_props(is_standalone);
let (host, port) = get_host_and_port(); let addr: SocketAddr = format!("{}:{}", host, port) .parse() .expect("Invalid address provided to bind to."); let routes = block_on(perseus_routes(props)); warp::serve(routes).run(addr).await; } /// Determines whether or not we're operating in standalone mode, and acts accordingly. This MUST be executed in the parent thread, as it switches the current directory. fn get_standalone_and_act() -> bool { // So we don't have to define a different `FsConfigManager` just for the server, we shift the execution context to the same level as everything else // The server has to be a separate crate because otherwise the dependencies don't work with Wasm bundling // If we're not running as a standalone binary, assume we're running in dev mode under `.perseus/` if !cfg!(feature = "standalone") { env::set_current_dir("../").unwrap(); false } else { // If we are running as a standalone binary, we have no idea where we're being executed from (#63), so we should set the working directory to be the same as the binary location let binary_loc = env::current_exe().unwrap(); let binary_dir = binary_loc.parent().unwrap(); // It's a file, there's going to be a parent if we're working on anything close to sanity env::set_current_dir(binary_dir).unwrap(); true } } /// Gets the host and port to serve on. fn get_host_and_port() -> (String, u16) { // We have to use two sets of environment variables until v0.4.0 // TODO Remove the old environment variables in v0.4.0 let host_old = env::var("HOST"); let port_old = env::var("PORT"); let host = env::var("PERSEUS_HOST"); let port = env::var("PERSEUS_PORT"); let host = host.unwrap_or_else(|_| host_old.unwrap_or_else(|_| "127.0.0.1".to_string())); let port = port .unwrap_or_else(|_| port_old.unwrap_or_else(|_| "8080".to_string())) .parse::<u16>() .expect("Port must be a number."); (host, port) } /// Gets the properties to pass to the server. fn get_props(is_standalone: bool) -> ServerProps<impl MutableStore, impl TranslationsManager> { let plugins = get_plugins::<SsrNode>(); plugins .functional_actions .server_actions .before_serve .run((), plugins.get_plugin_data()); // This allows us to operate inside `.perseus/` and as a standalone binary in production let (html_shell_path, static_dir_path) = if is_standalone { ("./index.html", "./static") } else { ("../index.html", "../static") }; let immutable_store = get_immutable_store(&plugins); let locales = get_locales(&plugins); let app_root = get_app_root(&plugins); let static_aliases = get_static_aliases(&plugins); let opts = ServerOptions { // We don't support setting some attributes from `wasm-pack` through plugins/`define_app!` because that would require CLI changes as well (a job for an alternative engine) index: html_shell_path.to_string(), // The user must define their own `index.html` file js_bundle: "dist/pkg/perseus_engine.js".to_string(), // Our crate has the same name, so this will be predictable wasm_bundle: "dist/pkg/perseus_engine_bg.wasm".to_string(), // It's a nightmare to get the templates map to take plugins, so we use a self-contained version // TODO reduce allocations here templates_map: get_templates_map_atomic_contained(), locales, root_id: app_root, snippets: "dist/pkg/snippets".to_string(), error_pages: get_error_pages_contained(), // The CLI supports static content in `../static` by default if it exists // This will be available directly at `/.perseus/static` static_dir: if fs::metadata(&static_dir_path).is_ok() { Some(static_dir_path.to_string()) } else { None }, static_aliases, }; ServerProps { opts, immutable_store, mutable_store: get_mutable_store(), translations_manager: block_on(get_translations_manager()), } }
numpy2png.py
import numpy as np from PIL import Image import sys def
(source, dest): image = Image.fromarray(np.load(source)) image.save(dest,"PNG") if __name__ == "__main__": source = sys.argv[1] dest = source.split('.npy')[0] + '.png' print(source, " to ", dest) numpy_to_png(source, dest)
numpy_to_png
backend.py
############################################################################### ## ## Copyright (C) Tavendo GmbH and/or collaborators. All rights reserved. ## ## Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## 1. Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## ## 2. Redistributions in binary form must reproduce the above copyright notice, ## this list of conditions and the following disclaimer in the documentation ## and/or other materials provided with the distribution. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ## ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE ## LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR ## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF ## SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS ## INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN ## CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ## POSSIBILITY OF SUCH DAMAGE. ## ############################################################################### from twisted.internet.defer import inlineCallbacks from autobahn.twisted.wamp import ApplicationSession class BackendSession(ApplicationSession): @inlineCallbacks def onJoin(self, details): print("Backend session joined: {}".format(details)) def onhello(msg=None):
## SUBSCRIBE to a few topics we are allowed to subscribe to. ## for topic in [ 'com.example.topic1', 'com.foobar.topic1', 'com.foobar.topic2']: try: sub = yield self.subscribe(onhello, topic) print("ok, subscribed to topic {}".format(topic)) except Exception as e: print("could not subscribe to {}: {}".format(topic, e)) ## (try to) SUBSCRIBE to a topic we are not allowed to subscribe to (so this should fail). ## try: sub = yield self.subscribe(onhello, 'com.example.topic2') except Exception as e: print("subscription failed (this is expected!) {}".format(e)) ## REGISTER a procedure for remote calling ## def add2(x, y): print("add2() called with {} and {}".format(x, y)) return x + y try: reg = yield self.register(add2, 'com.example.add2') print("procedure add2() registered") except Exception as e: print("could not register procedure: {}".format(e))
print("event received on {}: {}".format(topic, msg))
private_variant_1.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. mod super_sekrit { pub enum sooper_sekrit { quux, priv baz } }
// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
miners.go
package genesis import ( "bytes" "context" "fmt" "math/rand" power4 "github.com/filecoin-project/specs-actors/v4/actors/builtin/power" builtin6 "github.com/filecoin-project/specs-actors/v6/actors/builtin" reward4 "github.com/filecoin-project/specs-actors/v4/actors/builtin/reward" market4 "github.com/filecoin-project/specs-actors/v4/actors/builtin/market" "github.com/filecoin-project/venus/pkg/types/specactors" "github.com/filecoin-project/venus/pkg/types/specactors/builtin" "github.com/filecoin-project/venus/pkg/types/specactors/policy" "github.com/filecoin-project/venus/pkg/types/specactors/adt" "github.com/filecoin-project/go-state-types/network" market0 "github.com/filecoin-project/specs-actors/actors/builtin/market" "github.com/filecoin-project/venus/pkg/types/specactors/builtin/power" "github.com/filecoin-project/venus/pkg/types/specactors/builtin/reward" "github.com/filecoin-project/venus/pkg/types/specactors/builtin/market" "github.com/filecoin-project/venus/pkg/types/specactors/builtin/miner" "github.com/ipfs/go-cid" cbor "github.com/ipfs/go-ipld-cbor" cbg "github.com/whyrusleeping/cbor-gen" "golang.org/x/xerrors" "github.com/filecoin-project/go-address" "github.com/filecoin-project/go-state-types/abi" "github.com/filecoin-project/go-state-types/big" "github.com/filecoin-project/go-state-types/crypto" builtin0 "github.com/filecoin-project/specs-actors/actors/builtin" miner0 "github.com/filecoin-project/specs-actors/actors/builtin/miner" power0 "github.com/filecoin-project/specs-actors/actors/builtin/power" reward0 "github.com/filecoin-project/specs-actors/actors/builtin/reward" "github.com/filecoin-project/venus/pkg/chain" "github.com/filecoin-project/venus/pkg/config" "github.com/filecoin-project/venus/pkg/consensusfault" "github.com/filecoin-project/venus/pkg/fork" "github.com/filecoin-project/venus/pkg/state/tree" "github.com/filecoin-project/venus/pkg/types" "github.com/filecoin-project/venus/pkg/util/ffiwrapper/impl" "github.com/filecoin-project/venus/pkg/vm" "github.com/filecoin-project/venus/pkg/vm/gas" "github.com/filecoin-project/venus/pkg/vm/vmcontext" "github.com/filecoin-project/venus/pkg/vmsupport" ) func MinerAddress(genesisIndex uint64) address.Address { maddr, err := address.NewIDAddress(MinerStart + genesisIndex) if err != nil { panic(err) } return maddr } type fakedSigSyscalls struct { vmcontext.SyscallsImpl } func (fss *fakedSigSyscalls) VerifySignature(ctx context.Context, view vmcontext.SyscallsStateView, signature crypto.Signature, signer address.Address, plaintext []byte) error { return nil } func mkFakedSigSyscalls(sys vmcontext.SyscallsImpl) vmcontext.SyscallsImpl { return &fakedSigSyscalls{ sys, } } // Note: Much of this is brittle, if the methodNum / param / return changes, it will break things func SetupStorageMiners(ctx context.Context, cs *chain.Store, sroot cid.Cid, miners []Miner, nv network.Version, para *config.ForkUpgradeConfig) (cid.Cid, error) { cst := cbor.NewCborStore(cs.Blockstore()) av, err := specactors.VersionForNetwork(nv) if err != nil { return cid.Undef, xerrors.Errorf("get actor version: %w", err) } csc := func(context.Context, abi.ChainEpoch, tree.Tree) (abi.TokenAmount, error) { return big.Zero(), nil } genesisNetworkVersion := func(context.Context, abi.ChainEpoch) network.Version { return nv } faultChecker := consensusfault.NewFaultChecker(cs, fork.NewMockFork()) syscalls := vmsupport.NewSyscalls(faultChecker, impl.ProofVerifier) gasPirceSchedule := gas.NewPricesSchedule(para) vmopt := vm.VmOption{ CircSupplyCalculator: csc, NtwkVersionGetter: genesisNetworkVersion, Rnd: &fakeRand{}, BaseFee: big.NewInt(0), Epoch: 0, PRoot: sroot, Bsstore: cs.Blockstore(), SysCallsImpl: mkFakedSigSyscalls(syscalls), GasPriceSchedule: gasPirceSchedule, } vmi, err := vm.NewVM(vmopt) if err != nil { return cid.Undef, xerrors.Errorf("failed to create NewVM: %w", err) } if len(miners) == 0 { return cid.Undef, xerrors.New("no genesis miners") } minerInfos := make([]struct { maddr address.Address presealExp abi.ChainEpoch dealIDs []abi.DealID }, len(miners)) maxPeriods := policy.GetMaxSectorExpirationExtension() / miner.WPoStProvingPeriod for i, m := range miners { // Create miner through power actor i := i m := m spt, err := miner.SealProofTypeFromSectorSize(m.SectorSize, nv) if err != nil { return cid.Undef, err } { constructorParams := &power0.CreateMinerParams{ Owner: m.Worker, Worker: m.Worker, Peer: []byte(m.PeerID), SealProofType: spt, } params := mustEnc(constructorParams) rval, err := doExecValue(ctx, vmi, power.Address, m.Owner, m.PowerBalance, power.Methods.CreateMiner, params) if err != nil { return cid.Undef, xerrors.Errorf("failed to create genesis miner: %w", err) } var ma power0.CreateMinerReturn if err := ma.UnmarshalCBOR(bytes.NewReader(rval)); err != nil { return cid.Undef, xerrors.Errorf("unmarshaling CreateMinerReturn: %w", err) } expma := MinerAddress(uint64(i)) if ma.IDAddress != expma { return cid.Undef, xerrors.Errorf("miner assigned wrong address: %s != %s", ma.IDAddress, expma) } minerInfos[i].maddr = ma.IDAddress _, err = vmi.Flush() if err != nil { return cid.Undef, xerrors.Errorf("flushing vm: %w", err) } mact, find, err := vmi.StateTree().GetActor(ctx, minerInfos[i].maddr) if err != nil { return cid.Undef, xerrors.Errorf("getting newly created miner actor: %w", err) } if !find { return cid.Undef, xerrors.New("actor not found") } mst, err := miner.Load(adt.WrapStore(ctx, cst), mact) if err != nil { return cid.Undef, xerrors.Errorf("getting newly created miner state: %w", err) } pps, err := mst.GetProvingPeriodStart() if err != nil { return cid.Undef, xerrors.Errorf("getting newly created miner proving period start: %w", err) } minerInfos[i].presealExp = (maxPeriods-1)*miner0.WPoStProvingPeriod + pps - 1 } // Add market funds if m.MarketBalance.GreaterThan(big.Zero()) { params := mustEnc(&minerInfos[i].maddr) _, err := doExecValue(ctx, vmi, market.Address, m.Worker, m.MarketBalance, market.Methods.AddBalance, params) if err != nil { return cid.Undef, xerrors.Errorf("failed to create genesis miner (add balance): %w", err) } } // Publish preseal deals { publish := func(params *market.PublishStorageDealsParams) error { fmt.Printf("publishing %d storage deals on miner %s with worker %s\n", len(params.Deals), params.Deals[0].Proposal.Provider, m.Worker) ret, err := doExecValue(ctx, vmi, market.Address, m.Worker, big.Zero(), builtin0.MethodsMarket.PublishStorageDeals, mustEnc(params)) if err != nil { return xerrors.Errorf("failed to create genesis miner (publish deals): %w", err) } retval, err := market.DecodePublishStorageDealsReturn(ret, nv) if err != nil
ids, err := retval.DealIDs() if err != nil { return xerrors.Errorf("failed to create genesis miner (getting published dealIDs): %w", err) } if len(ids) != len(params.Deals) { return xerrors.Errorf("failed to create genesis miner (at least one deal was invalid on publication") } minerInfos[i].dealIDs = append(minerInfos[i].dealIDs, ids...) return nil } params := &market.PublishStorageDealsParams{} for _, preseal := range m.Sectors { preseal.Deal.VerifiedDeal = true preseal.Deal.EndEpoch = minerInfos[i].presealExp params.Deals = append(params.Deals, market.ClientDealProposal{ Proposal: preseal.Deal, ClientSignature: crypto.Signature{Type: crypto.SigTypeBLS}, // TODO: do we want to sign these? Or do we want to fake signatures for genesis setup? }) if len(params.Deals) == cbg.MaxLength { if err := publish(params); err != nil { return cid.Undef, err } params = &market.PublishStorageDealsParams{} } } if len(params.Deals) > 0 { if err := publish(params); err != nil { return cid.Undef, err } } } } // adjust total network power for equal pledge per sector rawPow, qaPow := big.NewInt(0), big.NewInt(0) { for i, m := range miners { for pi := range m.Sectors { rawPow = types.BigAdd(rawPow, types.NewInt(uint64(m.SectorSize))) dweight, vdweight, err := dealWeight(ctx, vmi, minerInfos[i].maddr, []abi.DealID{minerInfos[i].dealIDs[pi]}, 0, minerInfos[i].presealExp, av) if err != nil { return cid.Undef, xerrors.Errorf("getting deal weight: %w", err) } sectorWeight := builtin.QAPowerForWeight(m.SectorSize, minerInfos[i].presealExp, dweight, vdweight) qaPow = types.BigAdd(qaPow, sectorWeight) } } _, err = vmi.Flush() if err != nil { return cid.Undef, xerrors.Errorf("flushing vm: %w", err) } pact, find, err := vmi.StateTree().GetActor(ctx, power.Address) if err != nil { return cid.Undef, xerrors.Errorf("getting power actor: %w", err) } if !find { return cid.Undef, xerrors.New("power actor not exist") } pst, err := power.Load(adt.WrapStore(ctx, cst), pact) if err != nil { return cid.Undef, xerrors.Errorf("getting power state: %w", err) } if err = pst.SetTotalQualityAdjPower(qaPow); err != nil { return cid.Undef, xerrors.Errorf("setting TotalQualityAdjPower in power state: %w", err) } if err = pst.SetTotalRawBytePower(rawPow); err != nil { return cid.Undef, xerrors.Errorf("setting TotalRawBytePower in power state: %w", err) } if err = pst.SetThisEpochQualityAdjPower(qaPow); err != nil { return cid.Undef, xerrors.Errorf("setting ThisEpochQualityAdjPower in power state: %w", err) } if err = pst.SetThisEpochRawBytePower(rawPow); err != nil { return cid.Undef, xerrors.Errorf("setting ThisEpochRawBytePower in power state: %w", err) } pcid, err := cst.Put(ctx, pst.GetState()) if err != nil { return cid.Undef, xerrors.Errorf("putting power state: %w", err) } pact.Head = pcid if err = vmi.StateTree().SetActor(ctx, power.Address, pact); err != nil { return cid.Undef, xerrors.Errorf("setting power state: %w", err) } ver, err := specactors.VersionForNetwork(nv) if err != nil { return cid.Undef, xerrors.Errorf("get actor version: %w", err) } rewact, err := SetupRewardActor(ctx, cs.Blockstore(), big.Zero(), ver) if err != nil { return cid.Undef, xerrors.Errorf("setup reward actor: %w", err) } if err = vmi.StateTree().SetActor(ctx, reward.Address, rewact); err != nil { return cid.Undef, xerrors.Errorf("set reward actor: %w", err) } } for i, m := range miners { // Commit sectors { for pi, preseal := range m.Sectors { params := &miner.SectorPreCommitInfo{ SealProof: preseal.ProofType, SectorNumber: preseal.SectorID, SealedCID: preseal.CommR, SealRandEpoch: -1, DealIDs: []abi.DealID{minerInfos[i].dealIDs[pi]}, Expiration: minerInfos[i].presealExp, // TODO: Allow setting externally! } dweight, vdweight, err := dealWeight(ctx, vmi, minerInfos[i].maddr, params.DealIDs, 0, minerInfos[i].presealExp, av) if err != nil { return cid.Undef, xerrors.Errorf("getting deal weight: %w", err) } sectorWeight := builtin.QAPowerForWeight(m.SectorSize, minerInfos[i].presealExp, dweight, vdweight) // we've added fake power for this sector above, remove it now _, err = vmi.Flush() if err != nil { return cid.Undef, xerrors.Errorf("flushing vm: %w", err) } pact, find, err := vmi.StateTree().GetActor(ctx, power.Address) if err != nil { return cid.Undef, xerrors.Errorf("getting power actor: %w", err) } if !find { return cid.Undef, xerrors.New("power actor not exist") } pst, err := power.Load(adt.WrapStore(ctx, cst), pact) if err != nil { return cid.Undef, xerrors.Errorf("getting power state: %w", err) } pc, err := pst.TotalPower() if err != nil { return cid.Undef, xerrors.Errorf("getting total power: %w", err) } if err = pst.SetTotalRawBytePower(types.BigSub(pc.RawBytePower, types.NewInt(uint64(m.SectorSize)))); err != nil { return cid.Undef, xerrors.Errorf("setting TotalRawBytePower in power state: %w", err) } if err = pst.SetTotalQualityAdjPower(types.BigSub(pc.QualityAdjPower, sectorWeight)); err != nil { return cid.Undef, xerrors.Errorf("setting TotalQualityAdjPower in power state: %w", err) } pcid, err := cst.Put(ctx, pst.GetState()) if err != nil { return cid.Undef, xerrors.Errorf("putting power state: %w", err) } pact.Head = pcid if err = vmi.StateTree().SetActor(ctx, power.Address, pact); err != nil { return cid.Undef, xerrors.Errorf("setting power state: %w", err) } baselinePower, rewardSmoothed, err := currentEpochBlockReward(ctx, vmi, minerInfos[i].maddr, av) if err != nil { return cid.Undef, xerrors.Errorf("getting current epoch reward: %w", err) } tpow, err := currentTotalPower(ctx, vmi, minerInfos[i].maddr) if err != nil { return cid.Undef, xerrors.Errorf("getting current total power: %w", err) } pcd := miner0.PreCommitDepositForPower(&rewardSmoothed, tpow.QualityAdjPowerSmoothed, sectorWeight) pledge := miner0.InitialPledgeForPower( sectorWeight, baselinePower, tpow.PledgeCollateral, &rewardSmoothed, tpow.QualityAdjPowerSmoothed, circSupply(ctx, vmi, minerInfos[i].maddr), ) pledge = big.Add(pcd, pledge) fmt.Println(types.FIL(pledge)) _, err = doExecValue(ctx, vmi, minerInfos[i].maddr, m.Worker, pledge, miner.Methods.PreCommitSector, mustEnc(params)) if err != nil { return cid.Undef, xerrors.Errorf("failed to confirm presealed sectors: %w", err) } // Commit one-by-one, otherwise pledge math tends to explode var paramBytes []byte if av >= specactors.Version6 { // TODO: fixup confirmParams := &builtin6.ConfirmSectorProofsParams{ Sectors: []abi.SectorNumber{preseal.SectorID}, } paramBytes = mustEnc(confirmParams) } else { confirmParams := &builtin0.ConfirmSectorProofsParams{ Sectors: []abi.SectorNumber{preseal.SectorID}, } paramBytes = mustEnc(confirmParams) } _, err = doExecValue(ctx, vmi, minerInfos[i].maddr, power.Address, big.Zero(), miner.Methods.ConfirmSectorProofsValid, paramBytes) if err != nil { return cid.Undef, xerrors.Errorf("failed to confirm presealed sectors: %w", err) } if av > specactors.Version2 { // post v2, we need to explicitly Claim this power since ConfirmSectorProofsValid doesn't do it anymore claimParams := &power4.UpdateClaimedPowerParams{ RawByteDelta: types.NewInt(uint64(m.SectorSize)), QualityAdjustedDelta: sectorWeight, } _, err = doExecValue(ctx, vmi, power.Address, minerInfos[i].maddr, big.Zero(), power.Methods.UpdateClaimedPower, mustEnc(claimParams)) if err != nil { return cid.Undef, xerrors.Errorf("failed to confirm presealed sectors: %w", err) } _, err = vmi.Flush() if err != nil { return cid.Undef, xerrors.Errorf("flushing vm: %w", err) } mact, find, err := vmi.StateTree().GetActor(ctx, minerInfos[i].maddr) if err != nil { return cid.Undef, xerrors.Errorf("getting miner actor: %w", err) } if !find { return cid.Undef, xerrors.New("actor not found") } mst, err := miner.Load(adt.WrapStore(ctx, cst), mact) if err != nil { return cid.Undef, xerrors.Errorf("getting miner state: %w", err) } if err = mst.EraseAllUnproven(); err != nil { return cid.Undef, xerrors.Errorf("failed to erase unproven sectors: %w", err) } mcid, err := cst.Put(ctx, mst.GetState()) if err != nil { return cid.Undef, xerrors.Errorf("putting miner state: %w", err) } mact.Head = mcid if err = vmi.StateTree().SetActor(ctx, minerInfos[i].maddr, mact); err != nil { return cid.Undef, xerrors.Errorf("setting miner state: %w", err) } } } } } // Sanity-check total network power _, err = vmi.Flush() if err != nil { return cid.Undef, xerrors.Errorf("flushing vm: %w", err) } pact, find, err := vmi.StateTree().GetActor(ctx, power.Address) if err != nil { return cid.Undef, xerrors.Errorf("getting power actor: %w", err) } if !find { return cid.Undef, xerrors.New("actor not found") } pst, err := power.Load(adt.WrapStore(ctx, cst), pact) if err != nil { return cid.Undef, xerrors.Errorf("getting power state: %w", err) } pc, err := pst.TotalPower() if err != nil { return cid.Undef, xerrors.Errorf("getting total power: %w", err) } if !pc.RawBytePower.Equals(rawPow) { return cid.Undef, xerrors.Errorf("TotalRawBytePower (%s) doesn't match previously calculated rawPow (%s)", pc.RawBytePower, rawPow) } if !pc.QualityAdjPower.Equals(qaPow) { return cid.Undef, xerrors.Errorf("QualityAdjPower (%s) doesn't match previously calculated qaPow (%s)", pc.QualityAdjPower, qaPow) } // TODO: Should we re-ConstructState for the reward actor using rawPow as currRealizedPower here? c, err := vmi.Flush() if err != nil { return cid.Undef, xerrors.Errorf("flushing vm: %w", err) } return c, nil } // TODO: copied from actors test harness, deduplicate or remove from here type fakeRand struct{} func (fr *fakeRand) ChainGetRandomnessFromBeacon(ctx context.Context, personalization crypto.DomainSeparationTag, randEpoch abi.ChainEpoch, entropy []byte) (abi.Randomness, error) { out := make([]byte, 32) _, _ = rand.New(rand.NewSource(int64(randEpoch * 1000))).Read(out) //nolint return out, nil } func (fr *fakeRand) ChainGetRandomnessFromTickets(ctx context.Context, personalization crypto.DomainSeparationTag, randEpoch abi.ChainEpoch, entropy []byte) (abi.Randomness, error) { out := make([]byte, 32) _, _ = rand.New(rand.NewSource(int64(randEpoch * 1000))).Read(out) //nolint return out, nil } func currentTotalPower(ctx context.Context, vmi vm.Interpreter, maddr address.Address) (*power0.CurrentTotalPowerReturn, error) { pwret, err := doExecValue(ctx, vmi, power.Address, maddr, big.Zero(), builtin0.MethodsPower.CurrentTotalPower, nil) if err != nil { return nil, err } var pwr power0.CurrentTotalPowerReturn if err := pwr.UnmarshalCBOR(bytes.NewReader(pwret)); err != nil { return nil, err } return &pwr, nil } func dealWeight(ctx context.Context, vmi vm.Interpreter, maddr address.Address, dealIDs []abi.DealID, sectorStart, sectorExpiry abi.ChainEpoch, av specactors.Version) (abi.DealWeight, abi.DealWeight, error) { // TODO: This hack should move to market actor wrapper if av <= specactors.Version2 { params := &market0.VerifyDealsForActivationParams{ DealIDs: dealIDs, SectorStart: sectorStart, SectorExpiry: sectorExpiry, } var dealWeights market0.VerifyDealsForActivationReturn ret, err := doExecValue(ctx, vmi, market.Address, maddr, abi.NewTokenAmount(0), builtin0.MethodsMarket.VerifyDealsForActivation, mustEnc(params), ) if err != nil { return big.Zero(), big.Zero(), err } if err := dealWeights.UnmarshalCBOR(bytes.NewReader(ret)); err != nil { return big.Zero(), big.Zero(), err } return dealWeights.DealWeight, dealWeights.VerifiedDealWeight, nil } params := &market4.VerifyDealsForActivationParams{Sectors: []market4.SectorDeals{{ SectorExpiry: sectorExpiry, DealIDs: dealIDs, }}} var dealWeights market4.VerifyDealsForActivationReturn ret, err := doExecValue(ctx, vmi, market.Address, maddr, abi.NewTokenAmount(0), market.Methods.VerifyDealsForActivation, mustEnc(params), ) if err != nil { return big.Zero(), big.Zero(), err } if err := dealWeights.UnmarshalCBOR(bytes.NewReader(ret)); err != nil { return big.Zero(), big.Zero(), err } return dealWeights.Sectors[0].DealWeight, dealWeights.Sectors[0].VerifiedDealWeight, nil } func currentEpochBlockReward(ctx context.Context, vmi vm.Interpreter, maddr address.Address, av specactors.Version) (abi.StoragePower, builtin.FilterEstimate, error) { rwret, err := doExecValue(ctx, vmi, reward.Address, maddr, big.Zero(), reward.Methods.ThisEpochReward, nil) if err != nil { return big.Zero(), builtin.FilterEstimate{}, err } // TODO: This hack should move to reward actor wrapper if av <= specactors.Version2 { var epochReward reward0.ThisEpochRewardReturn if err := epochReward.UnmarshalCBOR(bytes.NewReader(rwret)); err != nil { return big.Zero(), builtin.FilterEstimate{}, err } return epochReward.ThisEpochBaselinePower, *epochReward.ThisEpochRewardSmoothed, nil } var epochReward reward4.ThisEpochRewardReturn if err := epochReward.UnmarshalCBOR(bytes.NewReader(rwret)); err != nil { return big.Zero(), builtin.FilterEstimate{}, err } return epochReward.ThisEpochBaselinePower, builtin.FilterEstimate(epochReward.ThisEpochRewardSmoothed), nil } // todo what is actually called here is vmi.vmOption.CircSupplyCalculator(context.TODO(), height, st) -> L55, So there is no structure UnsafeVM ??? func circSupply(ctx context.Context, vmi vm.Interpreter, maddr address.Address) abi.TokenAmount { //unsafeVM := &vm.UnsafeVM{VM: vmi.} //rt := unsafeVM.MakeRuntime(ctx, &types.Message{ // GasLimit: 1_000_000_000, // From: maddr, //}) // //return rt.TotalFilCircSupply() return big.Zero() }
{ return xerrors.Errorf("failed to create genesis miner (decoding published deals): %w", err) }
Filler.js
'use strict'; import cx from 'classnames'; import styles from './progress-bar.scss'; const Filler = props => { const width = `${Math.round(props.share * props.progress * 100)}%`; return ( <div className={cx(styles.fillerContainer)} style={{ width }}> <div className={cx(styles.filler)} /> <div className={styles.children}> {props.children} </div>
); }; export default Filler;
</div>
client.rs
#[cfg(any(feature = "native-tls", feature = "__rustls",))] use std::any::Any; use std::net::IpAddr; use std::sync::Arc; use std::time::Duration; use std::{collections::HashMap, convert::TryInto, net::SocketAddr}; use std::{fmt, str}; use bytes::Bytes; use http::header::{ Entry, HeaderMap, HeaderValue, ACCEPT, ACCEPT_ENCODING, CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, LOCATION, PROXY_AUTHORIZATION, RANGE, REFERER, TRANSFER_ENCODING, USER_AGENT, }; use http::uri::Scheme; use http::Uri; use hyper::client::ResponseFuture; #[cfg(feature = "native-tls-crate")] use native_tls_crate::TlsConnector; use pin_project_lite::pin_project; #[cfg(feature = "rustls-tls-native-roots")] use rustls::RootCertStore; use std::future::Future; use std::pin::Pin; use std::task::{Context, Poll}; use tokio::time::Sleep; use log::debug; use super::decoder::Accepts; use super::request::{Request, RequestBuilder}; use super::response::Response; use super::Body; use crate::connect::{Connector, HttpConnector}; #[cfg(feature = "cookies")] use crate::cookie; use crate::error; use crate::into_url::{expect_uri, try_uri}; use crate::redirect::{self, remove_sensitive_headers}; #[cfg(feature = "__tls")] use crate::tls::TlsBackend; #[cfg(feature = "__tls")] use crate::Certificate; #[cfg(any(feature = "native-tls", feature = "__rustls"))] use crate::Identity; use crate::{IntoUrl, Method, Proxy, StatusCode, Url}; /// An asynchronous `Client` to make Requests with. /// /// The Client has various configuration values to tweak, but the defaults /// are set to what is usually the most commonly desired value. To configure a /// `Client`, use `Client::builder()`. /// /// The `Client` holds a connection pool internally, so it is advised that /// you create one and **reuse** it. /// /// You do **not** have to wrap the `Client` in an [`Rc`] or [`Arc`] to **reuse** it, /// because it already uses an [`Arc`] internally. /// /// [`Rc`]: std::rc::Rc #[derive(Clone)] pub struct Client { inner: Arc<ClientRef>, } /// A `ClientBuilder` can be used to create a `Client` with custom configuration. #[must_use] pub struct ClientBuilder { config: Config, } struct Config { // NOTE: When adding a new field, update `fmt::Debug for ClientBuilder` accepts: Accepts, headers: HeaderMap, #[cfg(feature = "native-tls")] hostname_verification: bool, #[cfg(feature = "__tls")] certs_verification: bool, connect_timeout: Option<Duration>, connection_verbose: bool, pool_idle_timeout: Option<Duration>, pool_max_idle_per_host: usize, tcp_keepalive: Option<Duration>, #[cfg(any(feature = "native-tls", feature = "__rustls"))] identity: Option<Identity>, proxies: Vec<Proxy>, auto_sys_proxy: bool, redirect_policy: redirect::Policy, referer: bool, timeout: Option<Duration>, #[cfg(feature = "__tls")] root_certs: Vec<Certificate>, #[cfg(feature = "__tls")] tls_built_in_root_certs: bool, #[cfg(feature = "__tls")] tls: TlsBackend, http2_only: bool, http1_title_case_headers: bool, http2_initial_stream_window_size: Option<u32>, http2_initial_connection_window_size: Option<u32>, http2_adaptive_window: bool, http2_max_frame_size: Option<u32>, local_address: Option<IpAddr>, nodelay: bool, #[cfg(feature = "cookies")] cookie_store: Option<Arc<dyn cookie::CookieStore>>, trust_dns: bool, error: Option<crate::Error>, https_only: bool, dns_overrides: HashMap<String, SocketAddr>, } impl Default for ClientBuilder { fn default() -> Self { Self::new() } } impl ClientBuilder { /// Constructs a new `ClientBuilder`. /// /// This is the same as `Client::builder()`. pub fn new() -> ClientBuilder { let mut headers: HeaderMap<HeaderValue> = HeaderMap::with_capacity(2); headers.insert(ACCEPT, HeaderValue::from_static("*/*")); ClientBuilder { config: Config { error: None, accepts: Accepts::default(), headers, #[cfg(feature = "native-tls")] hostname_verification: true, #[cfg(feature = "__tls")] certs_verification: true, connect_timeout: None, connection_verbose: false, pool_idle_timeout: Some(Duration::from_secs(90)), pool_max_idle_per_host: std::usize::MAX, // TODO: Re-enable default duration once hyper's HttpConnector is fixed // to no longer error when an option fails. tcp_keepalive: None, //Some(Duration::from_secs(60)), proxies: Vec::new(), auto_sys_proxy: true, redirect_policy: redirect::Policy::default(), referer: true, timeout: None, #[cfg(feature = "__tls")] root_certs: Vec::new(), #[cfg(feature = "__tls")] tls_built_in_root_certs: true, #[cfg(any(feature = "native-tls", feature = "__rustls"))] identity: None, #[cfg(feature = "__tls")] tls: TlsBackend::default(), http2_only: false, http1_title_case_headers: false, http2_initial_stream_window_size: None, http2_initial_connection_window_size: None, http2_adaptive_window: false, http2_max_frame_size: None, local_address: None, nodelay: true, trust_dns: cfg!(feature = "trust-dns"), #[cfg(feature = "cookies")] cookie_store: None, https_only: false, dns_overrides: HashMap::new(), }, } } /// Returns a `Client` that uses this `ClientBuilder` configuration. /// /// # Errors /// /// This method fails if a TLS backend cannot be initialized, or the resolver /// cannot load the system configuration. pub fn build(self) -> crate::Result<Client> { let config = self.config; if let Some(err) = config.error { return Err(err); } let mut proxies = config.proxies; if config.auto_sys_proxy { proxies.push(Proxy::system()); } let proxies = Arc::new(proxies); let mut connector = { #[cfg(feature = "__tls")] fn user_agent(headers: &HeaderMap) -> Option<HeaderValue> { headers.get(USER_AGENT).cloned() } let http = match config.trust_dns { false => { if config.dns_overrides.is_empty() { HttpConnector::new_gai() } else { HttpConnector::new_gai_with_overrides(config.dns_overrides) } } #[cfg(feature = "trust-dns")] true => { if config.dns_overrides.is_empty() { HttpConnector::new_trust_dns()? } else { HttpConnector::new_trust_dns_with_overrides(config.dns_overrides)? } } #[cfg(not(feature = "trust-dns"))] true => unreachable!("trust-dns shouldn't be enabled unless the feature is"), }; #[cfg(feature = "__tls")] match config.tls { #[cfg(feature = "default-tls")] TlsBackend::Default => { let mut tls = TlsConnector::builder(); #[cfg(feature = "native-tls-alpn")] { if config.http2_only { tls.request_alpns(&["h2"]); } else { tls.request_alpns(&["h2", "http/1.1"]); } } #[cfg(feature = "native-tls")] { tls.danger_accept_invalid_hostnames(!config.hostname_verification); } tls.danger_accept_invalid_certs(!config.certs_verification); tls.disable_built_in_roots(!config.tls_built_in_root_certs); for cert in config.root_certs { cert.add_to_native_tls(&mut tls); } #[cfg(feature = "native-tls")] { if let Some(id) = config.identity { id.add_to_native_tls(&mut tls)?; } } Connector::new_default_tls( http, tls, proxies.clone(), user_agent(&config.headers), config.local_address, config.nodelay, )? } #[cfg(feature = "native-tls")] TlsBackend::BuiltNativeTls(conn) => Connector::from_built_default_tls( http, conn, proxies.clone(), user_agent(&config.headers), config.local_address, config.nodelay, ), #[cfg(feature = "__rustls")] TlsBackend::BuiltRustls(conn) => Connector::new_rustls_tls( http, conn, proxies.clone(), user_agent(&config.headers), config.local_address, config.nodelay, ), #[cfg(feature = "__rustls")] TlsBackend::Rustls => { use crate::tls::NoVerifier; let mut tls = rustls::ClientConfig::new(); if config.http2_only { tls.set_protocols(&["h2".into()]); } else { tls.set_protocols(&["h2".into(), "http/1.1".into()]); } #[cfg(feature = "rustls-tls-webpki-roots")] if config.tls_built_in_root_certs { tls.root_store .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS); } #[cfg(feature = "rustls-tls-native-roots")] if config.tls_built_in_root_certs { let roots_slice = NATIVE_ROOTS.as_ref().unwrap().roots.as_slice(); tls.root_store.roots.extend_from_slice(roots_slice); } if !config.certs_verification { tls.dangerous() .set_certificate_verifier(Arc::new(NoVerifier)); } for cert in config.root_certs { cert.add_to_rustls(&mut tls)?; } if let Some(id) = config.identity { id.add_to_rustls(&mut tls)?; } Connector::new_rustls_tls( http, tls, proxies.clone(), user_agent(&config.headers), config.local_address, config.nodelay, ) } #[cfg(any(feature = "native-tls", feature = "__rustls",))] TlsBackend::UnknownPreconfigured => { return Err(crate::error::builder( "Unknown TLS backend passed to `use_preconfigured_tls`", )); } } #[cfg(not(feature = "__tls"))] Connector::new(http, proxies.clone(), config.local_address, config.nodelay) }; connector.set_timeout(config.connect_timeout); connector.set_verbose(config.connection_verbose); let mut builder = hyper::Client::builder(); if config.http2_only { builder.http2_only(true); } if let Some(http2_initial_stream_window_size) = config.http2_initial_stream_window_size { builder.http2_initial_stream_window_size(http2_initial_stream_window_size); } if let Some(http2_initial_connection_window_size) = config.http2_initial_connection_window_size { builder.http2_initial_connection_window_size(http2_initial_connection_window_size); } if config.http2_adaptive_window { builder.http2_adaptive_window(true); } if let Some(http2_max_frame_size) = config.http2_max_frame_size { builder.http2_max_frame_size(http2_max_frame_size); } builder.pool_idle_timeout(config.pool_idle_timeout); builder.pool_max_idle_per_host(config.pool_max_idle_per_host); connector.set_keepalive(config.tcp_keepalive); if config.http1_title_case_headers { builder.http1_title_case_headers(true); } let hyper_client = builder.build(connector); let proxies_maybe_http_auth = proxies.iter().any(|p| p.maybe_has_http_auth()); Ok(Client { inner: Arc::new(ClientRef { accepts: config.accepts, #[cfg(feature = "cookies")] cookie_store: config.cookie_store, hyper: hyper_client, headers: config.headers, redirect_policy: config.redirect_policy, referer: config.referer, request_timeout: config.timeout, proxies, proxies_maybe_http_auth, https_only: config.https_only, }), }) } // Higher-level options /// Sets the `User-Agent` header to be used by this client. /// /// # Example /// /// ```rust /// # async fn doc() -> Result<(), reqwest::Error> { /// // Name your user agent after your app? /// static APP_USER_AGENT: &str = concat!( /// env!("CARGO_PKG_NAME"), /// "/", /// env!("CARGO_PKG_VERSION"), /// ); /// /// let client = reqwest::Client::builder() /// .user_agent(APP_USER_AGENT) /// .build()?; /// let res = client.get("https://www.rust-lang.org").send().await?; /// # Ok(()) /// # } /// ``` pub fn user_agent<V>(mut self, value: V) -> ClientBuilder where V: TryInto<HeaderValue>, V::Error: Into<http::Error>, { match value.try_into() { Ok(value) => { self.config.headers.insert(USER_AGENT, value); } Err(e) => { self.config.error = Some(crate::error::builder(e.into())); } }; self } /// Sets the default headers for every request. /// /// # Example /// /// ```rust /// use reqwest::header; /// # async fn doc() -> Result<(), reqwest::Error> { /// let mut headers = header::HeaderMap::new(); /// headers.insert("X-MY-HEADER", header::HeaderValue::from_static("value")); /// /// // Consider marking security-sensitive headers with `set_sensitive`. /// let mut auth_value = header::HeaderValue::from_static("secret"); /// auth_value.set_sensitive(true); /// headers.insert(header::AUTHORIZATION, auth_value); /// /// // get a client builder /// let client = reqwest::Client::builder() /// .default_headers(headers) /// .build()?; /// let res = client.get("https://www.rust-lang.org").send().await?; /// # Ok(()) /// # } /// ``` /// /// Override the default headers: /// /// ```rust /// use reqwest::header; /// # async fn doc() -> Result<(), reqwest::Error> { /// let mut headers = header::HeaderMap::new(); /// headers.insert("X-MY-HEADER", header::HeaderValue::from_static("value")); /// /// // get a client builder /// let client = reqwest::Client::builder() /// .default_headers(headers) /// .build()?; /// let res = client /// .get("https://www.rust-lang.org") /// .header("X-MY-HEADER", "new_value") /// .send() /// .await?; /// # Ok(()) /// # } /// ``` pub fn
(mut self, headers: HeaderMap) -> ClientBuilder { for (key, value) in headers.iter() { self.config.headers.insert(key, value.clone()); } self } /// Enable a persistent cookie store for the client. /// /// Cookies received in responses will be preserved and included in /// additional requests. /// /// By default, no cookie store is used. /// /// # Optional /// /// This requires the optional `cookies` feature to be enabled. #[cfg(feature = "cookies")] #[cfg_attr(docsrs, doc(cfg(feature = "cookies")))] pub fn cookie_store(mut self, enable: bool) -> ClientBuilder { if enable { self.cookie_provider(Arc::new(cookie::Jar::default())) } else { self.config.cookie_store = None; self } } /// Set the persistent cookie store for the client. /// /// Cookies received in responses will be passed to this store, and /// additional requests will query this store for cookies. /// /// By default, no cookie store is used. /// /// # Optional /// /// This requires the optional `cookies` feature to be enabled. #[cfg(feature = "cookies")] #[cfg_attr(docsrs, doc(cfg(feature = "cookies")))] pub fn cookie_provider<C: cookie::CookieStore + 'static>( mut self, cookie_store: Arc<C>, ) -> ClientBuilder { self.config.cookie_store = Some(cookie_store as _); self } /// Enable auto gzip decompression by checking the `Content-Encoding` response header. /// /// If auto gzip decompression is turned on: /// /// - When sending a request and if the request's headers do not already contain /// an `Accept-Encoding` **and** `Range` values, the `Accept-Encoding` header is set to `gzip`. /// The request body is **not** automatically compressed. /// - When receiving a response, if its headers contain a `Content-Encoding` value of /// `gzip`, both `Content-Encoding` and `Content-Length` are removed from the /// headers' set. The response body is automatically decompressed. /// /// If the `gzip` feature is turned on, the default option is enabled. /// /// # Optional /// /// This requires the optional `gzip` feature to be enabled #[cfg(feature = "gzip")] #[cfg_attr(docsrs, doc(cfg(feature = "gzip")))] pub fn gzip(mut self, enable: bool) -> ClientBuilder { self.config.accepts.gzip = enable; self } /// Enable auto brotli decompression by checking the `Content-Encoding` response header. /// /// If auto brotli decompression is turned on: /// /// - When sending a request and if the request's headers do not already contain /// an `Accept-Encoding` **and** `Range` values, the `Accept-Encoding` header is set to `br`. /// The request body is **not** automatically compressed. /// - When receiving a response, if its headers contain a `Content-Encoding` value of /// `br`, both `Content-Encoding` and `Content-Length` are removed from the /// headers' set. The response body is automatically decompressed. /// /// If the `brotli` feature is turned on, the default option is enabled. /// /// # Optional /// /// This requires the optional `brotli` feature to be enabled #[cfg(feature = "brotli")] #[cfg_attr(docsrs, doc(cfg(feature = "brotli")))] pub fn brotli(mut self, enable: bool) -> ClientBuilder { self.config.accepts.brotli = enable; self } /// Enable auto deflate decompression by checking the `Content-Encoding` response header. /// /// If auto deflate decompression is turned on: /// /// - When sending a request and if the request's headers do not already contain /// an `Accept-Encoding` **and** `Range` values, the `Accept-Encoding` header is set to `deflate`. /// The request body is **not** automatically compressed. /// - When receiving a response, if it's headers contain a `Content-Encoding` value that /// equals to `deflate`, both values `Content-Encoding` and `Content-Length` are removed from the /// headers' set. The response body is automatically decompressed. /// /// If the `deflate` feature is turned on, the default option is enabled. /// /// # Optional /// /// This requires the optional `deflate` feature to be enabled #[cfg(feature = "deflate")] #[cfg_attr(docsrs, doc(cfg(feature = "deflate")))] pub fn deflate(mut self, enable: bool) -> ClientBuilder { self.config.accepts.deflate = enable; self } /// Disable auto response body gzip decompression. /// /// This method exists even if the optional `gzip` feature is not enabled. /// This can be used to ensure a `Client` doesn't use gzip decompression /// even if another dependency were to enable the optional `gzip` feature. pub fn no_gzip(self) -> ClientBuilder { #[cfg(feature = "gzip")] { self.gzip(false) } #[cfg(not(feature = "gzip"))] { self } } /// Disable auto response body brotli decompression. /// /// This method exists even if the optional `brotli` feature is not enabled. /// This can be used to ensure a `Client` doesn't use brotli decompression /// even if another dependency were to enable the optional `brotli` feature. pub fn no_brotli(self) -> ClientBuilder { #[cfg(feature = "brotli")] { self.brotli(false) } #[cfg(not(feature = "brotli"))] { self } } /// Disable auto response body deflate decompression. /// /// This method exists even if the optional `deflate` feature is not enabled. /// This can be used to ensure a `Client` doesn't use deflate decompression /// even if another dependency were to enable the optional `deflate` feature. pub fn no_deflate(self) -> ClientBuilder { #[cfg(feature = "deflate")] { self.deflate(false) } #[cfg(not(feature = "deflate"))] { self } } // Redirect options /// Set a `RedirectPolicy` for this client. /// /// Default will follow redirects up to a maximum of 10. pub fn redirect(mut self, policy: redirect::Policy) -> ClientBuilder { self.config.redirect_policy = policy; self } /// Enable or disable automatic setting of the `Referer` header. /// /// Default is `true`. pub fn referer(mut self, enable: bool) -> ClientBuilder { self.config.referer = enable; self } // Proxy options /// Add a `Proxy` to the list of proxies the `Client` will use. /// /// # Note /// /// Adding a proxy will disable the automatic usage of the "system" proxy. pub fn proxy(mut self, proxy: Proxy) -> ClientBuilder { self.config.proxies.push(proxy); self.config.auto_sys_proxy = false; self } /// Clear all `Proxies`, so `Client` will use no proxy anymore. /// /// This also disables the automatic usage of the "system" proxy. pub fn no_proxy(mut self) -> ClientBuilder { self.config.proxies.clear(); self.config.auto_sys_proxy = false; self } // Timeout options /// Enables a request timeout. /// /// The timeout is applied from when the request starts connecting until the /// response body has finished. /// /// Default is no timeout. pub fn timeout(mut self, timeout: Duration) -> ClientBuilder { self.config.timeout = Some(timeout); self } /// Set a timeout for only the connect phase of a `Client`. /// /// Default is `None`. /// /// # Note /// /// This **requires** the futures be executed in a tokio runtime with /// a tokio timer enabled. pub fn connect_timeout(mut self, timeout: Duration) -> ClientBuilder { self.config.connect_timeout = Some(timeout); self } /// Set whether connections should emit verbose logs. /// /// Enabling this option will emit [log][] messages at the `TRACE` level /// for read and write operations on connections. /// /// [log]: https://crates.io/crates/log pub fn connection_verbose(mut self, verbose: bool) -> ClientBuilder { self.config.connection_verbose = verbose; self } // HTTP options /// Set an optional timeout for idle sockets being kept-alive. /// /// Pass `None` to disable timeout. /// /// Default is 90 seconds. pub fn pool_idle_timeout<D>(mut self, val: D) -> ClientBuilder where D: Into<Option<Duration>>, { self.config.pool_idle_timeout = val.into(); self } /// Sets the maximum idle connection per host allowed in the pool. pub fn pool_max_idle_per_host(mut self, max: usize) -> ClientBuilder { self.config.pool_max_idle_per_host = max; self } /// Enable case sensitive headers. pub fn http1_title_case_headers(mut self) -> ClientBuilder { self.config.http1_title_case_headers = true; self } /// Only use HTTP/2. pub fn http2_prior_knowledge(mut self) -> ClientBuilder { self.config.http2_only = true; self } /// Sets the `SETTINGS_INITIAL_WINDOW_SIZE` option for HTTP2 stream-level flow control. /// /// Default is currently 65,535 but may change internally to optimize for common uses. pub fn http2_initial_stream_window_size(mut self, sz: impl Into<Option<u32>>) -> ClientBuilder { self.config.http2_initial_stream_window_size = sz.into(); self } /// Sets the max connection-level flow control for HTTP2 /// /// Default is currently 65,535 but may change internally to optimize for common uses. pub fn http2_initial_connection_window_size( mut self, sz: impl Into<Option<u32>>, ) -> ClientBuilder { self.config.http2_initial_connection_window_size = sz.into(); self } /// Sets whether to use an adaptive flow control. /// /// Enabling this will override the limits set in `http2_initial_stream_window_size` and /// `http2_initial_connection_window_size`. pub fn http2_adaptive_window(mut self, enabled: bool) -> ClientBuilder { self.config.http2_adaptive_window = enabled; self } /// Sets the maximum frame size to use for HTTP2. /// /// Default is currently 16,384 but may change internally to optimize for common uses. pub fn http2_max_frame_size(mut self, sz: impl Into<Option<u32>>) -> ClientBuilder { self.config.http2_max_frame_size = sz.into(); self } // TCP options /// Set whether sockets have `SO_NODELAY` enabled. /// /// Default is `true`. pub fn tcp_nodelay(mut self, enabled: bool) -> ClientBuilder { self.config.nodelay = enabled; self } /// Bind to a local IP Address. /// /// # Example /// /// ``` /// use std::net::IpAddr; /// let local_addr = IpAddr::from([12, 4, 1, 8]); /// let client = reqwest::Client::builder() /// .local_address(local_addr) /// .build().unwrap(); /// ``` pub fn local_address<T>(mut self, addr: T) -> ClientBuilder where T: Into<Option<IpAddr>>, { self.config.local_address = addr.into(); self } /// Set that all sockets have `SO_KEEPALIVE` set with the supplied duration. /// /// If `None`, the option will not be set. pub fn tcp_keepalive<D>(mut self, val: D) -> ClientBuilder where D: Into<Option<Duration>>, { self.config.tcp_keepalive = val.into(); self } // TLS options /// Add a custom root certificate. /// /// This can be used to connect to a server that has a self-signed /// certificate for example. /// /// # Optional /// /// This requires the optional `default-tls`, `native-tls`, or `rustls-tls(-...)` /// feature to be enabled. #[cfg(feature = "__tls")] #[cfg_attr( docsrs, doc(cfg(any( feature = "default-tls", feature = "native-tls", feature = "rustls-tls" ))) )] pub fn add_root_certificate(mut self, cert: Certificate) -> ClientBuilder { self.config.root_certs.push(cert); self } /// Controls the use of built-in/preloaded certificates during certificate validation. /// /// Defaults to `true` -- built-in system certs will be used. /// /// # Optional /// /// This requires the optional `default-tls`, `native-tls`, or `rustls-tls(-...)` /// feature to be enabled. #[cfg(feature = "__tls")] #[cfg_attr( docsrs, doc(cfg(any( feature = "default-tls", feature = "native-tls", feature = "rustls-tls" ))) )] pub fn tls_built_in_root_certs(mut self, tls_built_in_root_certs: bool) -> ClientBuilder { self.config.tls_built_in_root_certs = tls_built_in_root_certs; self } /// Sets the identity to be used for client certificate authentication. /// /// # Optional /// /// This requires the optional `native-tls` or `rustls-tls(-...)` feature to be /// enabled. #[cfg(any(feature = "native-tls", feature = "__rustls"))] #[cfg_attr(docsrs, doc(cfg(any(feature = "native-tls", feature = "rustls-tls"))))] pub fn identity(mut self, identity: Identity) -> ClientBuilder { self.config.identity = Some(identity); self } /// Controls the use of hostname verification. /// /// Defaults to `false`. /// /// # Warning /// /// You should think very carefully before you use this method. If /// hostname verification is not used, any valid certificate for any /// site will be trusted for use from any other. This introduces a /// significant vulnerability to man-in-the-middle attacks. /// /// # Optional /// /// This requires the optional `native-tls` feature to be enabled. #[cfg(feature = "native-tls")] #[cfg_attr(docsrs, doc(cfg(feature = "native-tls")))] pub fn danger_accept_invalid_hostnames( mut self, accept_invalid_hostname: bool, ) -> ClientBuilder { self.config.hostname_verification = !accept_invalid_hostname; self } /// Controls the use of certificate validation. /// /// Defaults to `false`. /// /// # Warning /// /// You should think very carefully before using this method. If /// invalid certificates are trusted, *any* certificate for *any* site /// will be trusted for use. This includes expired certificates. This /// introduces significant vulnerabilities, and should only be used /// as a last resort. /// /// # Optional /// /// This requires the optional `default-tls`, `native-tls`, or `rustls-tls(-...)` /// feature to be enabled. #[cfg(feature = "__tls")] #[cfg_attr( docsrs, doc(cfg(any( feature = "default-tls", feature = "native-tls", feature = "rustls-tls" ))) )] pub fn danger_accept_invalid_certs(mut self, accept_invalid_certs: bool) -> ClientBuilder { self.config.certs_verification = !accept_invalid_certs; self } /// Force using the native TLS backend. /// /// Since multiple TLS backends can be optionally enabled, this option will /// force the `native-tls` backend to be used for this `Client`. /// /// # Optional /// /// This requires the optional `native-tls` feature to be enabled. #[cfg(feature = "native-tls")] #[cfg_attr(docsrs, doc(cfg(feature = "native-tls")))] pub fn use_native_tls(mut self) -> ClientBuilder { self.config.tls = TlsBackend::Default; self } /// Force using the Rustls TLS backend. /// /// Since multiple TLS backends can be optionally enabled, this option will /// force the `rustls` backend to be used for this `Client`. /// /// # Optional /// /// This requires the optional `rustls-tls(-...)` feature to be enabled. #[cfg(feature = "__rustls")] #[cfg_attr(docsrs, doc(cfg(feature = "rustls-tls")))] pub fn use_rustls_tls(mut self) -> ClientBuilder { self.config.tls = TlsBackend::Rustls; self } /// Use a preconfigured TLS backend. /// /// If the passed `Any` argument is not a TLS backend that reqwest /// understands, the `ClientBuilder` will error when calling `build`. /// /// # Advanced /// /// This is an advanced option, and can be somewhat brittle. Usage requires /// keeping the preconfigured TLS argument version in sync with reqwest, /// since version mismatches will result in an "unknown" TLS backend. /// /// If possible, it's preferable to use the methods on `ClientBuilder` /// to configure reqwest's TLS. /// /// # Optional /// /// This requires one of the optional features `native-tls` or /// `rustls-tls(-...)` to be enabled. #[cfg(any(feature = "native-tls", feature = "__rustls",))] #[cfg_attr(docsrs, doc(cfg(any(feature = "native-tls", feature = "rustls-tls"))))] pub fn use_preconfigured_tls(mut self, tls: impl Any) -> ClientBuilder { let mut tls = Some(tls); #[cfg(feature = "native-tls")] { if let Some(conn) = (&mut tls as &mut dyn Any).downcast_mut::<Option<native_tls_crate::TlsConnector>>() { let tls = conn.take().expect("is definitely Some"); let tls = crate::tls::TlsBackend::BuiltNativeTls(tls); self.config.tls = tls; return self; } } #[cfg(feature = "__rustls")] { if let Some(conn) = (&mut tls as &mut dyn Any).downcast_mut::<Option<rustls::ClientConfig>>() { let tls = conn.take().expect("is definitely Some"); let tls = crate::tls::TlsBackend::BuiltRustls(tls); self.config.tls = tls; return self; } } // Otherwise, we don't recognize the TLS backend! self.config.tls = crate::tls::TlsBackend::UnknownPreconfigured; self } /// Enables the [trust-dns](trust_dns_resolver) async resolver instead of a default threadpool using `getaddrinfo`. /// /// If the `trust-dns` feature is turned on, the default option is enabled. /// /// # Optional /// /// This requires the optional `trust-dns` feature to be enabled #[cfg(feature = "trust-dns")] #[cfg_attr(docsrs, doc(cfg(feature = "trust-dns")))] pub fn trust_dns(mut self, enable: bool) -> ClientBuilder { self.config.trust_dns = enable; self } /// Disables the trust-dns async resolver. /// /// This method exists even if the optional `trust-dns` feature is not enabled. /// This can be used to ensure a `Client` doesn't use the trust-dns async resolver /// even if another dependency were to enable the optional `trust-dns` feature. pub fn no_trust_dns(self) -> ClientBuilder { #[cfg(feature = "trust-dns")] { self.trust_dns(false) } #[cfg(not(feature = "trust-dns"))] { self } } /// Restrict the Client to be used with HTTPS only requests. /// /// Defaults to false. pub fn https_only(mut self, enabled: bool) -> ClientBuilder { self.config.https_only = enabled; self } /// Override DNS resolution for specific domains to particular IP addresses. /// /// Warning /// /// Since the DNS protocol has no notion of ports, if you wish to send /// traffic to a particular port you must include this port in the URL /// itself, any port in the overridden addr will be ignored and traffic sent /// to the conventional port for the given scheme (e.g. 80 for http). pub fn resolve(mut self, domain: &str, addr: SocketAddr) -> ClientBuilder { self.config.dns_overrides.insert(domain.to_string(), addr); self } } type HyperClient = hyper::Client<Connector, super::body::ImplStream>; impl Default for Client { fn default() -> Self { Self::new() } } impl Client { /// Constructs a new `Client`. /// /// # Panics /// /// This method panics if a TLS backend cannot initialized, or the resolver /// cannot load the system configuration. /// /// Use `Client::builder()` if you wish to handle the failure as an `Error` /// instead of panicking. pub fn new() -> Client { ClientBuilder::new().build().expect("Client::new()") } /// Creates a `ClientBuilder` to configure a `Client`. /// /// This is the same as `ClientBuilder::new()`. pub fn builder() -> ClientBuilder { ClientBuilder::new() } /// Convenience method to make a `GET` request to a URL. /// /// # Errors /// /// This method fails whenever the supplied `Url` cannot be parsed. pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder { self.request(Method::GET, url) } /// Convenience method to make a `POST` request to a URL. /// /// # Errors /// /// This method fails whenever the supplied `Url` cannot be parsed. pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder { self.request(Method::POST, url) } /// Convenience method to make a `PUT` request to a URL. /// /// # Errors /// /// This method fails whenever the supplied `Url` cannot be parsed. pub fn put<U: IntoUrl>(&self, url: U) -> RequestBuilder { self.request(Method::PUT, url) } /// Convenience method to make a `PATCH` request to a URL. /// /// # Errors /// /// This method fails whenever the supplied `Url` cannot be parsed. pub fn patch<U: IntoUrl>(&self, url: U) -> RequestBuilder { self.request(Method::PATCH, url) } /// Convenience method to make a `DELETE` request to a URL. /// /// # Errors /// /// This method fails whenever the supplied `Url` cannot be parsed. pub fn delete<U: IntoUrl>(&self, url: U) -> RequestBuilder { self.request(Method::DELETE, url) } /// Convenience method to make a `HEAD` request to a URL. /// /// # Errors /// /// This method fails whenever the supplied `Url` cannot be parsed. pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder { self.request(Method::HEAD, url) } /// Start building a `Request` with the `Method` and `Url`. /// /// Returns a `RequestBuilder`, which will allow setting headers and /// the request body before sending. /// /// # Errors /// /// This method fails whenever the supplied `Url` cannot be parsed. pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder { let req = url.into_url().map(move |url| Request::new(method, url)); RequestBuilder::new(self.clone(), req) } /// Executes a `Request`. /// /// A `Request` can be built manually with `Request::new()` or obtained /// from a RequestBuilder with `RequestBuilder::build()`. /// /// You should prefer to use the `RequestBuilder` and /// `RequestBuilder::send()`. /// /// # Errors /// /// This method fails if there was an error while sending request, /// redirect loop was detected or redirect limit was exhausted. pub fn execute( &self, request: Request, ) -> impl Future<Output = Result<Response, crate::Error>> { self.execute_request(request) } pub(super) fn execute_request(&self, req: Request) -> Pending { let (method, url, mut headers, body, timeout, version) = req.pieces(); if url.scheme() != "http" && url.scheme() != "https" { return Pending::new_err(error::url_bad_scheme(url)); } // check if we're in https_only mode and check the scheme of the current URL if self.inner.https_only && url.scheme() != "https" { return Pending::new_err(error::url_bad_scheme(url)); } // insert default headers in the request headers // without overwriting already appended headers. for (key, value) in &self.inner.headers { if let Entry::Vacant(entry) = headers.entry(key) { entry.insert(value.clone()); } } // Add cookies from the cookie store. #[cfg(feature = "cookies")] { if let Some(cookie_store) = self.inner.cookie_store.as_ref() { if headers.get(crate::header::COOKIE).is_none() { add_cookie_header(&mut headers, &**cookie_store, &url); } } } let accept_encoding = self.inner.accepts.as_str(); if let Some(accept_encoding) = accept_encoding { if !headers.contains_key(ACCEPT_ENCODING) && !headers.contains_key(RANGE) { headers.insert(ACCEPT_ENCODING, HeaderValue::from_static(accept_encoding)); } } let uri = expect_uri(&url); let (reusable, body) = match body { Some(body) => { let (reusable, body) = body.try_reuse(); (Some(reusable), body) } None => (None, Body::empty()), }; self.proxy_auth(&uri, &mut headers); let mut req = hyper::Request::builder() .method(method.clone()) .uri(uri) .version(version) .body(body.into_stream()) .expect("valid request parts"); let timeout = timeout .or(self.inner.request_timeout) .map(tokio::time::sleep) .map(Box::pin); *req.headers_mut() = headers.clone(); let in_flight = self.inner.hyper.request(req); Pending { inner: PendingInner::Request(PendingRequest { method, url, headers, body: reusable, urls: Vec::new(), client: self.inner.clone(), in_flight, timeout, }), } } fn proxy_auth(&self, dst: &Uri, headers: &mut HeaderMap) { if !self.inner.proxies_maybe_http_auth { return; } // Only set the header here if the destination scheme is 'http', // since otherwise, the header will be included in the CONNECT tunnel // request instead. if dst.scheme() != Some(&Scheme::HTTP) { return; } if headers.contains_key(PROXY_AUTHORIZATION) { return; } for proxy in self.inner.proxies.iter() { if proxy.is_match(dst) { if let Some(header) = proxy.http_basic_auth(dst) { headers.insert(PROXY_AUTHORIZATION, header); } break; } } } } impl fmt::Debug for Client { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut builder = f.debug_struct("Client"); self.inner.fmt_fields(&mut builder); builder.finish() } } impl fmt::Debug for ClientBuilder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut builder = f.debug_struct("ClientBuilder"); self.config.fmt_fields(&mut builder); builder.finish() } } impl Config { fn fmt_fields(&self, f: &mut fmt::DebugStruct<'_, '_>) { // Instead of deriving Debug, only print fields when their output // would provide relevant or interesting data. #[cfg(feature = "cookies")] { if let Some(_) = self.cookie_store { f.field("cookie_store", &true); } } f.field("accepts", &self.accepts); if !self.proxies.is_empty() { f.field("proxies", &self.proxies); } if !self.redirect_policy.is_default() { f.field("redirect_policy", &self.redirect_policy); } if self.referer { f.field("referer", &true); } f.field("default_headers", &self.headers); if self.http1_title_case_headers { f.field("http1_title_case_headers", &true); } if self.http2_only { f.field("http2_prior_knowledge", &true); } if let Some(ref d) = self.connect_timeout { f.field("connect_timeout", d); } if let Some(ref d) = self.timeout { f.field("timeout", d); } if let Some(ref v) = self.local_address { f.field("local_address", v); } if self.nodelay { f.field("tcp_nodelay", &true); } #[cfg(feature = "native-tls")] { if !self.hostname_verification { f.field("danger_accept_invalid_hostnames", &true); } } #[cfg(feature = "__tls")] { if !self.certs_verification { f.field("danger_accept_invalid_certs", &true); } } #[cfg(all(feature = "native-tls-crate", feature = "__rustls"))] { f.field("tls_backend", &self.tls); } if !self.dns_overrides.is_empty() { f.field("dns_overrides", &self.dns_overrides); } } } struct ClientRef { accepts: Accepts, #[cfg(feature = "cookies")] cookie_store: Option<Arc<dyn cookie::CookieStore>>, headers: HeaderMap, hyper: HyperClient, redirect_policy: redirect::Policy, referer: bool, request_timeout: Option<Duration>, proxies: Arc<Vec<Proxy>>, proxies_maybe_http_auth: bool, https_only: bool, } impl ClientRef { fn fmt_fields(&self, f: &mut fmt::DebugStruct<'_, '_>) { // Instead of deriving Debug, only print fields when their output // would provide relevant or interesting data. #[cfg(feature = "cookies")] { if let Some(_) = self.cookie_store { f.field("cookie_store", &true); } } f.field("accepts", &self.accepts); if !self.proxies.is_empty() { f.field("proxies", &self.proxies); } if !self.redirect_policy.is_default() { f.field("redirect_policy", &self.redirect_policy); } if self.referer { f.field("referer", &true); } f.field("default_headers", &self.headers); if let Some(ref d) = self.request_timeout { f.field("timeout", d); } } } pin_project! { pub(super) struct Pending { #[pin] inner: PendingInner, } } enum PendingInner { Request(PendingRequest), Error(Option<crate::Error>), } pin_project! { struct PendingRequest { method: Method, url: Url, headers: HeaderMap, body: Option<Option<Bytes>>, urls: Vec<Url>, client: Arc<ClientRef>, #[pin] in_flight: ResponseFuture, #[pin] timeout: Option<Pin<Box<Sleep>>>, } } impl PendingRequest { fn in_flight(self: Pin<&mut Self>) -> Pin<&mut ResponseFuture> { self.project().in_flight } fn timeout(self: Pin<&mut Self>) -> Pin<&mut Option<Pin<Box<Sleep>>>> { self.project().timeout } fn urls(self: Pin<&mut Self>) -> &mut Vec<Url> { self.project().urls } fn headers(self: Pin<&mut Self>) -> &mut HeaderMap { self.project().headers } } impl Pending { pub(super) fn new_err(err: crate::Error) -> Pending { Pending { inner: PendingInner::Error(Some(err)), } } fn inner(self: Pin<&mut Self>) -> Pin<&mut PendingInner> { self.project().inner } } impl Future for Pending { type Output = Result<Response, crate::Error>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let inner = self.inner(); match inner.get_mut() { PendingInner::Request(ref mut req) => Pin::new(req).poll(cx), PendingInner::Error(ref mut err) => Poll::Ready(Err(err .take() .expect("Pending error polled more than once"))), } } } impl Future for PendingRequest { type Output = Result<Response, crate::Error>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { if let Some(delay) = self.as_mut().timeout().as_mut().as_pin_mut() { if let Poll::Ready(()) = delay.poll(cx) { return Poll::Ready(Err( crate::error::request(crate::error::TimedOut).with_url(self.url.clone()) )); } } loop { let res = match self.as_mut().in_flight().as_mut().poll(cx) { Poll::Ready(Err(e)) => { return Poll::Ready(Err(crate::error::request(e).with_url(self.url.clone()))); } Poll::Ready(Ok(res)) => res, Poll::Pending => return Poll::Pending, }; #[cfg(feature = "cookies")] { if let Some(ref cookie_store) = self.client.cookie_store { let mut cookies = cookie::extract_response_cookie_headers(&res.headers()).peekable(); if cookies.peek().is_some() { cookie_store.set_cookies(&mut cookies, &self.url); } } } let should_redirect = match res.status() { StatusCode::MOVED_PERMANENTLY | StatusCode::FOUND | StatusCode::SEE_OTHER => { self.body = None; for header in &[ TRANSFER_ENCODING, CONTENT_ENCODING, CONTENT_TYPE, CONTENT_LENGTH, ] { self.headers.remove(header); } match self.method { Method::GET | Method::HEAD => {} _ => { self.method = Method::GET; } } true } StatusCode::TEMPORARY_REDIRECT | StatusCode::PERMANENT_REDIRECT => { match self.body { Some(Some(_)) | None => true, Some(None) => false, } } _ => false, }; if should_redirect { let loc = res.headers().get(LOCATION).and_then(|val| { let loc = (|| -> Option<Url> { // Some sites may send a utf-8 Location header, // even though we're supposed to treat those bytes // as opaque, we'll check specifically for utf8. self.url.join(str::from_utf8(val.as_bytes()).ok()?).ok() })(); // Check that the `url` is also a valid `http::Uri`. // // If not, just log it and skip the redirect. let loc = loc.and_then(|url| { if try_uri(&url).is_some() { Some(url) } else { None } }); if loc.is_none() { debug!("Location header had invalid URI: {:?}", val); } loc }); if let Some(loc) = loc { if self.client.referer { if let Some(referer) = make_referer(&loc, &self.url) { self.headers.insert(REFERER, referer); } } let url = self.url.clone(); self.as_mut().urls().push(url); let action = self .client .redirect_policy .check(res.status(), &loc, &self.urls); match action { redirect::ActionKind::Follow => { debug!("redirecting '{}' to '{}'", self.url, loc); self.url = loc; let mut headers = std::mem::replace(self.as_mut().headers(), HeaderMap::new()); remove_sensitive_headers(&mut headers, &self.url, &self.urls); let uri = expect_uri(&self.url); let body = match self.body { Some(Some(ref body)) => Body::reusable(body.clone()), _ => Body::empty(), }; let mut req = hyper::Request::builder() .method(self.method.clone()) .uri(uri.clone()) .body(body.into_stream()) .expect("valid request parts"); // Add cookies from the cookie store. #[cfg(feature = "cookies")] { if let Some(ref cookie_store) = self.client.cookie_store { add_cookie_header(&mut headers, &**cookie_store, &self.url); } } *req.headers_mut() = headers.clone(); std::mem::swap(self.as_mut().headers(), &mut headers); *self.as_mut().in_flight().get_mut() = self.client.hyper.request(req); continue; } redirect::ActionKind::Stop => { debug!("redirect policy disallowed redirection to '{}'", loc); } redirect::ActionKind::Error(err) => { return Poll::Ready(Err(crate::error::redirect(err, self.url.clone()))); } } } } debug!("response '{}' for {}", res.status(), self.url); let res = Response::new( res, self.url.clone(), self.client.accepts, self.timeout.take(), ); return Poll::Ready(Ok(res)); } } } impl fmt::Debug for Pending { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.inner { PendingInner::Request(ref req) => f .debug_struct("Pending") .field("method", &req.method) .field("url", &req.url) .finish(), PendingInner::Error(ref err) => f.debug_struct("Pending").field("error", err).finish(), } } } fn make_referer(next: &Url, previous: &Url) -> Option<HeaderValue> { if next.scheme() == "http" && previous.scheme() == "https" { return None; } let mut referer = previous.clone(); let _ = referer.set_username(""); let _ = referer.set_password(None); referer.set_fragment(None); referer.as_str().parse().ok() } #[cfg(feature = "cookies")] fn add_cookie_header(headers: &mut HeaderMap, cookie_store: &dyn cookie::CookieStore, url: &Url) { if let Some(header) = cookie_store.cookies(url) { headers.insert(crate::header::COOKIE, header); } } #[cfg(feature = "rustls-tls-native-roots")] lazy_static! { static ref NATIVE_ROOTS: std::io::Result<RootCertStore> = rustls_native_certs::load_native_certs().map_err(|e| e.1); } #[cfg(test)] mod tests { #[tokio::test] async fn execute_request_rejects_invald_urls() { let url_str = "hxxps://www.rust-lang.org/"; let url = url::Url::parse(url_str).unwrap(); let result = crate::get(url.clone()).await; assert!(result.is_err()); let err = result.err().unwrap(); assert!(err.is_builder()); assert_eq!(url_str, err.url().unwrap().as_str()); } }
default_headers
models.py
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os.path import time import traceback from cinderclient import exceptions as cinder_exceptions from eventlet import greenthread from eventlet.timeout import Timeout from heatclient import exc as heat_exceptions from novaclient import exceptions as nova_exceptions from oslo_log import log as logging from oslo_utils import timeutils from swiftclient.client import ClientException from trove.backup import models as bkup_models from trove.backup.models import Backup from trove.backup.models import DBBackup from trove.backup.state import BackupState from trove.cluster.models import Cluster from trove.cluster.models import DBCluster from trove.cluster import tasks from trove.common import cfg from trove.common import crypto_utils as cu from trove.common import exception from trove.common.exception import BackupCreationError from trove.common.exception import GuestError from trove.common.exception import GuestTimeout from trove.common.exception import InvalidModelError from trove.common.exception import MalformedSecurityGroupRuleError from trove.common.exception import PollTimeOut from trove.common.exception import TroveError from trove.common.exception import VolumeCreationFailure from trove.common.i18n import _ from trove.common import instance as rd_instance from trove.common.instance import ServiceStatuses from trove.common.notification import ( DBaaSInstanceRestart, DBaaSInstanceUpgrade, EndNotification, StartNotification, TroveInstanceCreate, TroveInstanceModifyVolume, TroveInstanceModifyFlavor, TroveInstanceDelete) import trove.common.remote as remote from trove.common.remote import create_cinder_client from trove.common.remote import create_dns_client from trove.common.remote import create_guest_client from trove.common.remote import create_heat_client from trove.common import server_group as srv_grp from trove.common.strategies.cluster import strategy from trove.common import template from trove.common import utils from trove.common.utils import try_recover from trove.extensions.mysql import models as mysql_models from trove.extensions.security_group.models import ( SecurityGroupInstanceAssociation) from trove.extensions.security_group.models import SecurityGroup from trove.extensions.security_group.models import SecurityGroupRule from trove.instance import models as inst_models from trove.instance.models import BuiltInstance from trove.instance.models import DBInstance from trove.instance.models import FreshInstance from trove.instance.models import Instance from trove.instance.models import InstanceServiceStatus from trove.instance.models import InstanceStatus from trove.instance.tasks import InstanceTasks from trove.module import models as module_models from trove.module import views as module_views from trove.quota.quota import run_with_quotas from trove import rpc LOG = logging.getLogger(__name__) CONF = cfg.CONF VOLUME_TIME_OUT = CONF.volume_time_out # seconds. DNS_TIME_OUT = CONF.dns_time_out # seconds. RESIZE_TIME_OUT = CONF.resize_time_out # seconds. REVERT_TIME_OUT = CONF.revert_time_out # seconds. HEAT_TIME_OUT = CONF.heat_time_out # seconds. USAGE_SLEEP_TIME = CONF.usage_sleep_time # seconds. HEAT_STACK_SUCCESSFUL_STATUSES = [('CREATE', 'CREATE_COMPLETE')] HEAT_RESOURCE_SUCCESSFUL_STATE = 'CREATE_COMPLETE' use_nova_server_volume = CONF.use_nova_server_volume use_heat = CONF.use_heat class NotifyMixin(object): """Notification Mixin This adds the ability to send usage events to an Instance object. """ def _get_service_id(self, datastore_manager, id_map): if datastore_manager in id_map: datastore_manager_id = id_map[datastore_manager] else: datastore_manager_id = cfg.UNKNOWN_SERVICE_ID LOG.error(_("Datastore ID for Manager (%s) is not configured") % datastore_manager) return datastore_manager_id def send_usage_event(self, event_type, **kwargs): event_type = 'trove.instance.%s' % event_type publisher_id = CONF.host # Grab the instance size from the kwargs or from the nova client instance_size = kwargs.pop('instance_size', None) flavor = self.nova_client.flavors.get(self.flavor_id) server = kwargs.pop('server', None) if server is None: server = self.nova_client.servers.get(self.server_id) az = getattr(server, 'OS-EXT-AZ:availability_zone', None) # Default payload created_time = timeutils.isotime(self.db_info.created) payload = { 'availability_zone': az, 'created_at': created_time, 'name': self.name, 'instance_id': self.id, 'instance_name': self.name, 'instance_size': instance_size or flavor.ram, 'instance_type': flavor.name, 'instance_type_id': flavor.id, 'launched_at': created_time, 'nova_instance_id': self.server_id, 'region': CONF.region, 'state_description': self.status, 'state': self.status, 'tenant_id': self.tenant_id, 'user_id': self.context.user, } if CONF.get(self.datastore_version.manager).volume_support: payload.update({ 'volume_size': self.volume_size, 'nova_volume_id': self.volume_id }) payload['service_id'] = self._get_service_id( self.datastore_version.manager, CONF.notification_service_id) # Update payload with all other kwargs payload.update(kwargs) LOG.debug('Sending event: %(event_type)s, %(payload)s' % {'event_type': event_type, 'payload': payload}) notifier = rpc.get_notifier( service="taskmanager", publisher_id=publisher_id) notifier.info(self.context, event_type, payload) class ConfigurationMixin(object): """Configuration Mixin Configuration related tasks for instances and resizes. """ def _render_config(self, flavor): config = template.SingleInstanceConfigTemplate( self.datastore_version, flavor, self.id) config.render() return config def _render_replica_source_config(self, flavor): config = template.ReplicaSourceConfigTemplate( self.datastore_version, flavor, self.id) config.render() return config def _render_replica_config(self, flavor): config = template.ReplicaConfigTemplate( self.datastore_version, flavor, self.id) config.render() return config def _render_config_dict(self, flavor): config = template.SingleInstanceConfigTemplate( self.datastore_version, flavor, self.id) ret = config.render_dict() LOG.debug("the default template dict of mysqld section: %s" % ret) return ret class ClusterTasks(Cluster): def update_statuses_on_failure(self, cluster_id, shard_id=None, status=None): if CONF.update_status_on_fail: if shard_id: db_instances = DBInstance.find_all(cluster_id=cluster_id, shard_id=shard_id).all() else: db_instances = DBInstance.find_all( cluster_id=cluster_id).all() for db_instance in db_instances: db_instance.set_task_status( status or InstanceTasks.BUILDING_ERROR_SERVER) db_instance.save() @classmethod def get_ip(cls, instance): return instance.get_visible_ip_addresses()[0] def _all_instances_ready(self, instance_ids, cluster_id, shard_id=None): """Wait for all instances to get READY.""" return self._all_instances_acquire_status( instance_ids, cluster_id, shard_id, ServiceStatuses.INSTANCE_READY, fast_fail_statuses=[ServiceStatuses.FAILED, ServiceStatuses.FAILED_TIMEOUT_GUESTAGENT]) def _all_instances_shutdown(self, instance_ids, cluster_id, shard_id=None): """Wait for all instances to go SHUTDOWN.""" return self._all_instances_acquire_status( instance_ids, cluster_id, shard_id, ServiceStatuses.SHUTDOWN, fast_fail_statuses=[ServiceStatuses.FAILED, ServiceStatuses.FAILED_TIMEOUT_GUESTAGENT]) def _all_instances_running(self, instance_ids, cluster_id, shard_id=None): """Wait for all instances to become ACTIVE.""" return self._all_instances_acquire_status( instance_ids, cluster_id, shard_id, ServiceStatuses.RUNNING, fast_fail_statuses=[ServiceStatuses.FAILED, ServiceStatuses.FAILED_TIMEOUT_GUESTAGENT]) def _all_instances_acquire_status( self, instance_ids, cluster_id, shard_id, expected_status, fast_fail_statuses=None): def _is_fast_fail_status(status): return ((fast_fail_statuses is not None) and ((status == fast_fail_statuses) or (status in fast_fail_statuses))) def _all_have_status(ids): for instance_id in ids: status = InstanceServiceStatus.find_by( instance_id=instance_id).get_status() if _is_fast_fail_status(status): # if one has failed, no need to continue polling LOG.debug("Instance %s has acquired a fast-fail status %s." % (instance_id, status)) return True if status != expected_status: # if one is not in the expected state, continue polling LOG.debug("Instance %s was %s." % (instance_id, status)) return False return True def _instance_ids_with_failures(ids): LOG.debug("Checking for service failures on instances: %s" % ids) failed_instance_ids = [] for instance_id in ids: status = InstanceServiceStatus.find_by( instance_id=instance_id).get_status() if _is_fast_fail_status(status): failed_instance_ids.append(instance_id) return failed_instance_ids LOG.debug("Polling until all instances acquire %s status: %s" % (expected_status, instance_ids)) try: utils.poll_until(lambda: instance_ids, lambda ids: _all_have_status(ids), sleep_time=USAGE_SLEEP_TIME, time_out=CONF.usage_timeout) except PollTimeOut: LOG.exception(_("Timed out while waiting for all instances " "to become %s.") % expected_status) self.update_statuses_on_failure(cluster_id, shard_id) return False failed_ids = _instance_ids_with_failures(instance_ids) if failed_ids: LOG.error(_("Some instances failed: %s") % failed_ids) self.update_statuses_on_failure(cluster_id, shard_id) return False LOG.debug("All instances have acquired the expected status %s." % expected_status) return True def delete_cluster(self, context, cluster_id): LOG.debug("begin delete_cluster for id: %s" % cluster_id) def all_instances_marked_deleted(): db_instances = DBInstance.find_all(cluster_id=cluster_id, deleted=False).all() return len(db_instances) == 0 try: utils.poll_until(all_instances_marked_deleted, sleep_time=2, time_out=CONF.cluster_delete_time_out) except PollTimeOut: LOG.error(_("timeout for instances to be marked as deleted.")) return LOG.debug("setting cluster %s as deleted." % cluster_id) cluster = DBCluster.find_by(id=cluster_id) cluster.deleted = True cluster.deleted_at = utils.utcnow() cluster.task_status = tasks.ClusterTasks.NONE cluster.save() LOG.debug("end delete_cluster for id: %s" % cluster_id) def rolling_restart_cluster(self, context, cluster_id, delay_sec=0): LOG.debug("Begin rolling cluster restart for id: %s" % cluster_id) def _restart_cluster_instance(instance): LOG.debug("Restarting instance with id: %s" % instance.id) context.notification = ( DBaaSInstanceRestart(context, **request_info)) with StartNotification(context, instance_id=instance.id): with EndNotification(context): instance.update_db(task_status=InstanceTasks.REBOOTING) instance.restart() timeout = Timeout(CONF.cluster_usage_timeout) cluster_notification = context.notification request_info = cluster_notification.serialize(context) try: node_db_inst = DBInstance.find_all(cluster_id=cluster_id).all() for index, db_inst in enumerate(node_db_inst): if index > 0: LOG.debug( "Waiting (%ds) for restarted nodes to rejoin the " "cluster before proceeding." % delay_sec) time.sleep(delay_sec) instance = BuiltInstanceTasks.load(context, db_inst.id) _restart_cluster_instance(instance) except Timeout as t: if t is not timeout: raise # not my timeout LOG.exception(_("Timeout for restarting cluster.")) raise except Exception: LOG.exception(_("Error restarting cluster.") % cluster_id) raise finally: context.notification = cluster_notification timeout.cancel() self.reset_task() LOG.debug("End rolling restart for id: %s." % cluster_id) def rolling_upgrade_cluster(self, context, cluster_id, datastore_version): LOG.debug("Begin rolling cluster upgrade for id: %s." % cluster_id) def _upgrade_cluster_instance(instance): LOG.debug("Upgrading instance with id: %s." % instance.id) context.notification = ( DBaaSInstanceUpgrade(context, **request_info)) with StartNotification( context, instance_id=instance.id, datastore_version_id=datastore_version.id): with EndNotification(context): instance.update_db( datastore_version_id=datastore_version.id, task_status=InstanceTasks.UPGRADING) instance.upgrade(datastore_version) timeout = Timeout(CONF.cluster_usage_timeout) cluster_notification = context.notification request_info = cluster_notification.serialize(context) try: for db_inst in DBInstance.find_all(cluster_id=cluster_id).all(): instance = BuiltInstanceTasks.load( context, db_inst.id) _upgrade_cluster_instance(instance) self.reset_task() except Timeout as t: if t is not timeout: raise # not my timeout LOG.exception(_("Timeout for upgrading cluster.")) self.update_statuses_on_failure( cluster_id, status=InstanceTasks.UPGRADING_ERROR) except Exception: LOG.exception(_("Error upgrading cluster %s.") % cluster_id) self.update_statuses_on_failure( cluster_id, status=InstanceTasks.UPGRADING_ERROR) finally: context.notification = cluster_notification timeout.cancel() LOG.debug("End upgrade_cluster for id: %s." % cluster_id) class FreshInstanceTasks(FreshInstance, NotifyMixin, ConfigurationMixin): def _delete_resources(self, deleted_at): LOG.debug("Begin _delete_resources for instance %s" % self.id) # If volume has "available" status, delete it manually. try: if self.volume_id: volume_client = create_cinder_client(self.context) volume = volume_client.volumes.get(self.volume_id) if volume.status == "available": LOG.info(_("Deleting volume %(v)s for instance: %(i)s.") % {'v': self.volume_id, 'i': self.id}) volume.delete() except Exception: LOG.exception(_("Error deleting volume of instance %(id)s.") % {'id': self.db_info.id}) LOG.debug("End _delete_resource for instance %s" % self.id) def wait_for_instance(self, timeout, flavor): # Make sure the service becomes active before sending a usage # record to avoid over billing a customer for an instance that # fails to build properly. error_message = '' error_details = '' try: utils.poll_until(self._service_is_active, sleep_time=USAGE_SLEEP_TIME, time_out=timeout) LOG.info(_("Created instance %s successfully.") % self.id) TroveInstanceCreate(instance=self, instance_size=flavor['ram']).notify() except PollTimeOut as ex: LOG.error(_("Failed to create instance %s. " "Timeout waiting for instance to become active. " "No usage create-event was sent.") % self.id) self.update_statuses_on_time_out() error_message = "%s" % ex error_details = traceback.format_exc() except Exception as ex: LOG.exception(_("Failed to send usage create-event for " "instance %s.") % self.id) error_message = "%s" % ex error_details = traceback.format_exc() finally: if error_message: inst_models.save_instance_fault( self.id, error_message, error_details, skip_delta=USAGE_SLEEP_TIME + 1) def create_instance(self, flavor, image_id, databases, users, datastore_manager, packages, volume_size, backup_id, availability_zone, root_password, nics, overrides, cluster_config, snapshot, volume_type, modules, scheduler_hints): # It is the caller's responsibility to ensure that # FreshInstanceTasks.wait_for_instance is called after # create_instance to ensure that the proper usage event gets sent LOG.info(_("Creating instance %s.") % self.id) security_groups = None # If security group support is enabled and heat based instance # orchestration is disabled, create a security group. # # Heat based orchestration handles security group(resource) # in the template definition. if CONF.trove_security_groups_support and not use_heat: try: security_groups = self._create_secgroup(datastore_manager) except Exception as e: msg = (_("Error creating security group for instance: %s") % self.id) err = inst_models.InstanceTasks.BUILDING_ERROR_SEC_GROUP self._log_and_raise(e, msg, err) else: LOG.debug("Successfully created security group for " "instance: %s" % self.id) files = self.get_injected_files(datastore_manager) cinder_volume_type = volume_type or CONF.cinder_volume_type if use_heat: msg = _("Support for heat templates in Trove is scheduled for " "removal. You will no longer be able to provide a heat " "template to Trove for the provisioning of resources.") LOG.warning(msg) volume_info = self._create_server_volume_heat( flavor, image_id, datastore_manager, volume_size, availability_zone, nics, files, cinder_volume_type) elif use_nova_server_volume: volume_info = self._create_server_volume( flavor['id'], image_id, security_groups, datastore_manager, volume_size, availability_zone, nics, files, scheduler_hints) else: volume_info = self._create_server_volume_individually( flavor['id'], image_id, security_groups, datastore_manager, volume_size, availability_zone, nics, files, cinder_volume_type, scheduler_hints) config = self._render_config(flavor) backup_info = None if backup_id is not None: backup = bkup_models.Backup.get_by_id(self.context, backup_id) backup_info = {'id': backup_id, 'instance_id': backup.instance_id, 'location': backup.location, 'type': backup.backup_type, 'checksum': backup.checksum, } self._guest_prepare(flavor['ram'], volume_info, packages, databases, users, backup_info, config.config_contents, root_password, overrides, cluster_config, snapshot, modules) if root_password: self.report_root_enabled() if not self.db_info.task_status.is_error: self.reset_task_status() # when DNS is supported, we attempt to add this after the # instance is prepared. Otherwise, if DNS fails, instances # end up in a poorer state and there's no tooling around # re-sending the prepare call; retrying DNS is much easier. try: self._create_dns_entry() except Exception as e: msg = _("Error creating DNS entry for instance: %s") % self.id err = inst_models.InstanceTasks.BUILDING_ERROR_DNS self._log_and_raise(e, msg, err) def attach_replication_slave(self, snapshot, flavor): LOG.debug("Calling attach_replication_slave for %s.", self.id) try: replica_config = self._render_replica_config(flavor) self.guest.attach_replication_slave(snapshot, replica_config.config_contents) except GuestError as e: msg = (_("Error attaching instance %s " "as replica.") % self.id) err = inst_models.InstanceTasks.BUILDING_ERROR_REPLICA self._log_and_raise(e, msg, err) def get_replication_master_snapshot(self, context, slave_of_id, flavor, backup_id=None, replica_number=1): # First check to see if we need to take a backup master = BuiltInstanceTasks.load(context, slave_of_id) backup_required = master.backup_required_for_replication() if backup_required: # if we aren't passed in a backup id, look it up to possibly do # an incremental backup, thus saving time if not backup_id: backup = Backup.get_last_completed( context, slave_of_id, include_incremental=True) if backup: backup_id = backup.id else: LOG.debug('Skipping replication backup, as none is required.') snapshot_info = { 'name': "Replication snapshot for %s" % self.id, 'description': "Backup image used to initialize " "replication slave", 'instance_id': slave_of_id, 'parent_id': backup_id, 'tenant_id': self.tenant_id, 'state': BackupState.NEW, 'datastore_version_id': self.datastore_version.id, 'deleted': False, 'replica_number': replica_number, } replica_backup_id = None if backup_required: # Only do a backup if it's the first replica if replica_number == 1: try: db_info = DBBackup.create(**snapshot_info) replica_backup_id = db_info.id except InvalidModelError: msg = (_("Unable to create replication snapshot record " "for instance: %s") % self.id) LOG.exception(msg) raise BackupCreationError(msg) if backup_id: # Look up the parent backup info or fail early if not # found or if the user does not have access to the parent. _parent = Backup.get_by_id(context, backup_id) parent = { 'location': _parent.location, 'checksum': _parent.checksum, } snapshot_info.update({ 'parent': parent, }) else: # we've been passed in the actual replica backup id, # so just use it replica_backup_id = backup_id try: snapshot_info.update({ 'id': replica_backup_id, 'datastore': master.datastore.name, 'datastore_version': master.datastore_version.name, }) snapshot = master.get_replication_snapshot( snapshot_info, flavor=master.flavor_id) snapshot.update({ 'config': self._render_replica_config(flavor).config_contents }) return snapshot except Exception as e_create: msg_create = ( _("Error creating replication snapshot from " "instance %(source)s for new replica %(replica)s.") % {'source': slave_of_id, 'replica': self.id}) err = inst_models.InstanceTasks.BUILDING_ERROR_REPLICA # if the delete of the 'bad' backup fails, it'll mask the # create exception, so we trap it here try: # Only try to delete the backup if it's the first replica if replica_number == 1 and backup_required: Backup.delete(context, replica_backup_id) except Exception as e_delete: LOG.error(msg_create) # Make sure we log any unexpected errors from the create if not isinstance(e_create, TroveError): LOG.exception(e_create) msg_delete = ( _("An error occurred while deleting a bad " "replication snapshot from instance %(source)s.") % {'source': slave_of_id}) # we've already logged the create exception, so we'll raise # the delete (otherwise the create will be logged twice) self._log_and_raise(e_delete, msg_delete, err) # the delete worked, so just log the original problem with create self._log_and_raise(e_create, msg_create, err) def report_root_enabled(self): mysql_models.RootHistory.create(self.context, self.id, 'root') def update_statuses_on_time_out(self): if CONF.update_status_on_fail: # Updating service status service = InstanceServiceStatus.find_by(instance_id=self.id) service.set_status(ServiceStatuses. FAILED_TIMEOUT_GUESTAGENT) service.save() LOG.error(_("Service status: %(status)s\n" "Service error description: %(desc)s") % {'status': ServiceStatuses. FAILED_TIMEOUT_GUESTAGENT.api_status, 'desc': ServiceStatuses. FAILED_TIMEOUT_GUESTAGENT.description}) # Updating instance status db_info = DBInstance.find_by(id=self.id, deleted=False) db_info.set_task_status(InstanceTasks. BUILDING_ERROR_TIMEOUT_GA) db_info.save() LOG.error(_("Trove instance status: %(action)s\n" "Trove instance status description: %(text)s") % {'action': InstanceTasks. BUILDING_ERROR_TIMEOUT_GA.action, 'text': InstanceTasks. BUILDING_ERROR_TIMEOUT_GA.db_text}) def _service_is_active(self): """ Check that the database guest is active. This function is meant to be called with poll_until to check that the guest is alive before sending a 'create' message. This prevents over billing a customer for an instance that they can never use. Returns: boolean if the service is active. Raises: TroveError if the service is in a failure state. """ service = InstanceServiceStatus.find_by(instance_id=self.id) status = service.get_status() if (status == rd_instance.ServiceStatuses.RUNNING or status == rd_instance.ServiceStatuses.INSTANCE_READY): return True elif status not in [rd_instance.ServiceStatuses.NEW, rd_instance.ServiceStatuses.BUILDING, rd_instance.ServiceStatuses.UNKNOWN, rd_instance.ServiceStatuses.DELETED]: raise TroveError(_("Service not active, status: %s") % status) c_id = self.db_info.compute_instance_id server = self.nova_client.servers.get(c_id) server_status = server.status if server_status in [InstanceStatus.ERROR, InstanceStatus.FAILED]: server_fault_message = 'No fault found' try: server_fault_message = server.fault.get('message', 'Unknown') except AttributeError: pass server_message = "\nServer error: %s" % server_fault_message raise TroveError(_("Server not active, status: %(status)s" "%(srv_msg)s") % {'status': server_status, 'srv_msg': server_message}) return False def _create_server_volume(self, flavor_id, image_id, security_groups, datastore_manager, volume_size, availability_zone, nics, files, scheduler_hints): LOG.debug("Begin _create_server_volume for id: %s" % self.id) try: userdata = self._prepare_userdata(datastore_manager) name = self.hostname or self.name volume_desc = ("datastore volume for %s" % self.id) volume_name = ("datastore-%s" % self.id) volume_ref = {'size': volume_size, 'name': volume_name, 'description': volume_desc} config_drive = CONF.use_nova_server_config_drive server = self.nova_client.servers.create( name, image_id, flavor_id, files=files, volume=volume_ref, security_groups=security_groups, availability_zone=availability_zone, nics=nics, config_drive=config_drive, userdata=userdata, scheduler_hints=scheduler_hints) server_dict = server._info LOG.debug("Created new compute instance %(server_id)s " "for id: %(id)s\nServer response: %(response)s" % {'server_id': server.id, 'id': self.id, 'response': server_dict}) volume_id = None for volume in server_dict.get('os:volumes', []): volume_id = volume.get('id') # Record the server ID and volume ID in case something goes wrong. self.update_db(compute_instance_id=server.id, volume_id=volume_id) except Exception as e: msg = _("Error creating server and volume for " "instance %s") % self.id LOG.debug("End _create_server_volume for id: %s" % self.id) err = inst_models.InstanceTasks.BUILDING_ERROR_SERVER self._log_and_raise(e, msg, err) device_path = self.device_path mount_point = CONF.get(datastore_manager).mount_point volume_info = {'device_path': device_path, 'mount_point': mount_point} LOG.debug("End _create_server_volume for id: %s" % self.id) return volume_info def _build_sg_rules_mapping(self, rule_ports): final = [] cidr = CONF.trove_security_group_rule_cidr for port_or_range in set(rule_ports): from_, to_ = utils.gen_ports(port_or_range) final.append({'cidr': cidr, 'from_': str(from_), 'to_': str(to_)}) return final def _create_server_volume_heat(self, flavor, image_id, datastore_manager, volume_size, availability_zone, nics, files, volume_type): LOG.debug("Begin _create_server_volume_heat for id: %s" % self.id) try: client = create_heat_client(self.context) tcp_rules_mapping_list = self._build_sg_rules_mapping(CONF.get( datastore_manager).tcp_ports) udp_ports_mapping_list = self._build_sg_rules_mapping(CONF.get( datastore_manager).udp_ports) ifaces, ports = self._build_heat_nics(nics) template_obj = template.load_heat_template(datastore_manager) heat_template_unicode = template_obj.render( volume_support=self.volume_support, ifaces=ifaces, ports=ports, tcp_rules=tcp_rules_mapping_list, udp_rules=udp_ports_mapping_list, datastore_manager=datastore_manager, files=files) try: heat_template = heat_template_unicode.encode('utf-8') except UnicodeEncodeError: raise TroveError(_("Failed to utf-8 encode Heat template.")) parameters = {"Flavor": flavor["name"], "VolumeSize": volume_size, "VolumeType": volume_type, "InstanceId": self.id, "ImageId": image_id, "DatastoreManager": datastore_manager, "AvailabilityZone": availability_zone, "TenantId": self.tenant_id} stack_name = 'trove-%s' % self.id client.stacks.create(stack_name=stack_name, template=heat_template, parameters=parameters) try: utils.poll_until( lambda: client.stacks.get(stack_name), lambda stack: stack.stack_status in ['CREATE_COMPLETE', 'CREATE_FAILED'], sleep_time=USAGE_SLEEP_TIME, time_out=HEAT_TIME_OUT) except PollTimeOut: raise TroveError(_("Failed to obtain Heat stack status. " "Timeout occurred.")) stack = client.stacks.get(stack_name) if ((stack.action, stack.stack_status) not in HEAT_STACK_SUCCESSFUL_STATUSES): raise TroveError(_("Failed to create Heat stack.")) resource = client.resources.get(stack.id, 'BaseInstance') if resource.resource_status != HEAT_RESOURCE_SUCCESSFUL_STATE: raise TroveError(_("Failed to provision Heat base instance.")) instance_id = resource.physical_resource_id if self.volume_support: resource = client.resources.get(stack.id, 'DataVolume') if resource.resource_status != HEAT_RESOURCE_SUCCESSFUL_STATE: raise TroveError(_("Failed to provision Heat data " "volume.")) volume_id = resource.physical_resource_id self.update_db(compute_instance_id=instance_id, volume_id=volume_id) else: self.update_db(compute_instance_id=instance_id) if CONF.trove_security_groups_support: resource = client.resources.get(stack.id, 'DatastoreSG') name = "%s_%s" % ( CONF.trove_security_group_name_prefix, self.id) description = _("Security Group for %s") % self.id SecurityGroup.create( id=resource.physical_resource_id, name=name, description=description, user=self.context.user, tenant_id=self.context.tenant) SecurityGroupInstanceAssociation.create( security_group_id=resource.physical_resource_id, instance_id=self.id) except (TroveError, heat_exceptions.HTTPNotFound, heat_exceptions.HTTPException) as e: msg = _("Error occurred during Heat stack creation for " "instance %s.") % self.id err = inst_models.InstanceTasks.BUILDING_ERROR_SERVER self._log_and_raise(e, msg, err) device_path = self.device_path mount_point = CONF.get(datastore_manager).mount_point volume_info = {'device_path': device_path, 'mount_point': mount_point} LOG.debug("End _create_server_volume_heat for id: %s" % self.id) return volume_info def _create_server_volume_individually(self, flavor_id, image_id, security_groups, datastore_manager, volume_size, availability_zone, nics, files, volume_type, scheduler_hints): LOG.debug("Begin _create_server_volume_individually for id: %s" % self.id) server = None volume_info = self._build_volume_info(datastore_manager, volume_size=volume_size, volume_type=volume_type) block_device_mapping = volume_info['block_device'] try: server = self._create_server(flavor_id, image_id, security_groups, datastore_manager, block_device_mapping, availability_zone, nics, files, scheduler_hints) server_id = server.id # Save server ID. self.update_db(compute_instance_id=server_id) except Exception as e: msg = _("Failed to create server for instance %s") % self.id err = inst_models.InstanceTasks.BUILDING_ERROR_SERVER self._log_and_raise(e, msg, err) LOG.debug("End _create_server_volume_individually for id: %s" % self.id) return volume_info def _build_volume_info(self, datastore_manager, volume_size=None, volume_type=None): volume_info = None volume_support = self.volume_support device_path = self.device_path mount_point = CONF.get(datastore_manager).mount_point LOG.debug("trove volume support = %s" % volume_support) if volume_support: try: volume_info = self._create_volume( volume_size, volume_type, datastore_manager) except Exception as e: msg = _("Failed to create volume for instance %s") % self.id err = inst_models.InstanceTasks.BUILDING_ERROR_VOLUME self._log_and_raise(e, msg, err) else: LOG.debug("device_path = %(path)s\n" "mount_point = %(point)s" % { "path": device_path, "point": mount_point }) volume_info = { 'block_device': None, 'device_path': device_path, 'mount_point': mount_point, 'volumes': None, } return volume_info def _log_and_raise(self, exc, message, task_status): LOG.error(_("%(message)s\n%(exc)s\n%(trace)s") % {"message": message, "exc": exc, "trace": traceback.format_exc()}) self.update_db(task_status=task_status) exc_message = '\n%s' % exc if exc else '' full_message = "%s%s" % (message, exc_message) raise TroveError(message=full_message) def _create_volume(self, volume_size, volume_type, datastore_manager): LOG.debug("Begin _create_volume for id: %s" % self.id) volume_client = create_cinder_client(self.context, self.region_name) volume_desc = ("datastore volume for %s" % self.id) volume_ref = volume_client.volumes.create( volume_size, name="datastore-%s" % self.id, description=volume_desc, volume_type=volume_type) # Record the volume ID in case something goes wrong. self.update_db(volume_id=volume_ref.id) utils.poll_until( lambda: volume_client.volumes.get(volume_ref.id), lambda v_ref: v_ref.status in ['available', 'error'], sleep_time=2, time_out=VOLUME_TIME_OUT) v_ref = volume_client.volumes.get(volume_ref.id) if v_ref.status in ['error']: raise VolumeCreationFailure() LOG.debug("End _create_volume for id: %s" % self.id) return self._build_volume(v_ref, datastore_manager) def _build_volume(self, v_ref, datastore_manager): LOG.debug("Created volume %s" % v_ref) # The mapping is in the format: # <id>:[<type>]:[<size(GB)>]:[<delete_on_terminate>] # setting the delete_on_terminate instance to true=1 mapping = "%s:%s:%s:%s" % (v_ref.id, '', v_ref.size, 1) bdm = CONF.block_device_mapping block_device = {bdm: mapping} created_volumes = [{'id': v_ref.id, 'size': v_ref.size}] device_path = self.device_path mount_point = CONF.get(datastore_manager).mount_point LOG.debug("block_device = %(device)s\n" "volume = %(volume)s\n" "device_path = %(path)s\n" "mount_point = %(point)s" % {"device": block_device, "volume": created_volumes, "path": device_path, "point": mount_point}) volume_info = {'block_device': block_device, 'device_path': device_path, 'mount_point': mount_point, 'volumes': created_volumes} return volume_info def _prepare_userdata(self, datastore_manager): userdata = None cloudinit = os.path.join(CONF.get('cloudinit_location'), "%s.cloudinit" % datastore_manager) if os.path.isfile(cloudinit): with open(cloudinit, "r") as f: userdata = f.read() return userdata def _create_server(self, flavor_id, image_id, security_groups, datastore_manager, block_device_mapping, availability_zone, nics, files={}, scheduler_hints=None): userdata = self._prepare_userdata(datastore_manager) name = self.hostname or self.name bdmap = block_device_mapping config_drive = CONF.use_nova_server_config_drive server = self.nova_client.servers.create( name, image_id, flavor_id, files=files, userdata=userdata, security_groups=security_groups, block_device_mapping=bdmap, availability_zone=availability_zone, nics=nics, config_drive=config_drive, scheduler_hints=scheduler_hints) LOG.debug("Created new compute instance %(server_id)s " "for instance %(id)s" % {'server_id': server.id, 'id': self.id}) return server def _guest_prepare(self, flavor_ram, volume_info, packages, databases, users, backup_info=None, config_contents=None, root_password=None, overrides=None, cluster_config=None, snapshot=None, modules=None): LOG.debug("Entering guest_prepare") # Now wait for the response from the create to do additional work self.guest.prepare(flavor_ram, packages, databases, users, device_path=volume_info['device_path'], mount_point=volume_info['mount_point'], backup_info=backup_info, config_contents=config_contents, root_password=root_password, overrides=overrides, cluster_config=cluster_config, snapshot=snapshot, modules=modules) def _create_dns_entry(self): dns_support = CONF.trove_dns_support LOG.debug("trove dns support = %s" % dns_support) if dns_support: LOG.debug("%(gt)s: Creating dns entry for instance: %(id)s" % {'gt': greenthread.getcurrent(), 'id': self.id}) dns_client = create_dns_client(self.context) def get_server(): c_id = self.db_info.compute_instance_id return self.nova_client.servers.get(c_id) def ip_is_available(server): LOG.debug("Polling for ip addresses: $%s " % server.addresses) if server.addresses != {}: return True elif (server.addresses == {} and server.status != InstanceStatus.ERROR): return False elif (server.addresses == {} and server.status == InstanceStatus.ERROR): LOG.error(_("Failed to create DNS entry for instance " "%(instance)s. Server status was " "%(status)s).") % {'instance': self.id, 'status': server.status}) raise TroveError(status=server.status) utils.poll_until(get_server, ip_is_available, sleep_time=1, time_out=DNS_TIME_OUT) server = self.nova_client.servers.get( self.db_info.compute_instance_id) self.db_info.addresses = server.addresses LOG.debug("Creating dns entry...") ip = self.dns_ip_address if not ip: raise TroveError(_("Failed to create DNS entry for instance " "%s. No IP available.") % self.id) dns_client.create_instance_entry(self.id, ip) LOG.debug("Successfully created DNS entry for instance: %s" % self.id) else: LOG.debug("%(gt)s: DNS not enabled for instance: %(id)s" % {'gt': greenthread.getcurrent(), 'id': self.id}) def _create_secgroup(self, datastore_manager): security_group = SecurityGroup.create_for_instance( self.id, self.context, self.region_name) tcp_ports = CONF.get(datastore_manager).tcp_ports udp_ports = CONF.get(datastore_manager).udp_ports icmp = CONF.get(datastore_manager).icmp self._create_rules(security_group, tcp_ports, 'tcp') self._create_rules(security_group, udp_ports, 'udp') if icmp: self._create_rules(security_group, None, 'icmp') return [security_group["name"]] def _create_rules(self, s_group, ports, protocol): err = inst_models.InstanceTasks.BUILDING_ERROR_SEC_GROUP err_msg = _("Failed to create security group rules for instance " "%(instance_id)s: Invalid port format - " "FromPort = %(from)s, ToPort = %(to)s") def set_error_and_raise(port_or_range): from_port, to_port = port_or_range self.update_db(task_status=err) msg = err_msg % {'instance_id': self.id, 'from': from_port, 'to': to_port} raise MalformedSecurityGroupRuleError(message=msg) cidr = CONF.trove_security_group_rule_cidr if protocol == 'icmp': SecurityGroupRule.create_sec_group_rule( s_group, 'icmp', None, None, cidr, self.context, self.region_name) else: for port_or_range in set(ports): try: from_, to_ = (None, None) from_, to_ = utils.gen_ports(port_or_range) SecurityGroupRule.create_sec_group_rule( s_group, protocol, int(from_), int(to_), cidr, self.context, self.region_name) except (ValueError, TroveError): set_error_and_raise([from_, to_]) def _build_heat_nics(self, nics): ifaces = [] ports = [] if nics: for idx, nic in enumerate(nics): iface_id = nic.get('port-id') if iface_id: ifaces.append(iface_id) continue net_id = nic.get('net-id') if net_id: port = {} port['name'] = "Port%s" % idx port['net_id'] = net_id fixed_ip = nic.get('v4-fixed-ip') if fixed_ip: port['fixed_ip'] = fixed_ip ports.append(port) ifaces.append("{Ref: Port%s}" % idx) return ifaces, ports class BuiltInstanceTasks(BuiltInstance, NotifyMixin, ConfigurationMixin): """ Performs the various asynchronous instance related tasks. """ def _delete_resources(self, deleted_at): LOG.debug("Begin _delete_resources for instance %s" % self.id) server_id = self.db_info.compute_instance_id old_server = self.nova_client.servers.get(server_id) try: # The server may have already been marked as 'SHUTDOWN' # but check for 'ACTIVE' in case of any race condition # We specifically don't want to attempt to stop db if # the server is in 'ERROR' or 'FAILED" state, as it will # result in a long timeout if self.server_status_matches(['ACTIVE', 'SHUTDOWN'], server=self): LOG.debug("Stopping datastore on instance %s before deleting " "any resources." % self.id) self.guest.stop_db() except Exception: LOG.exception(_("Error stopping the datastore before attempting " "to delete instance id %s.") % self.id) try: if use_heat: # Delete the server via heat heatclient = create_heat_client(self.context) name = 'trove-%s' % self.id heatclient.stacks.delete(name) else: self.server.delete() except Exception as ex: LOG.exception(_("Error during delete compute server %s") % self.server.id) try: dns_support = CONF.trove_dns_support LOG.debug("trove dns support = %s" % dns_support) if dns_support: dns_api = create_dns_client(self.context) dns_api.delete_instance_entry(instance_id=self.db_info.id) except Exception as ex: LOG.exception(_("Error during dns entry of instance %(id)s: " "%(ex)s") % {'id': self.db_info.id, 'ex': ex}) try: srv_grp.ServerGroup.delete(self.context, self.server_group) except Exception: LOG.exception(_("Error during delete server group for %s") % self.id) # Poll until the server is gone. def server_is_finished(): try: server = self.nova_client.servers.get(server_id) if not self.server_status_matches(['SHUTDOWN', 'ACTIVE'], server=server): LOG.error(_("Server %(server_id)s entered ERROR status " "when deleting instance %(instance_id)s!") % {'server_id': server.id, 'instance_id': self.id}) return False except nova_exceptions.NotFound: return True try: utils.poll_until(server_is_finished, sleep_time=2, time_out=CONF.server_delete_time_out) except PollTimeOut: LOG.exception(_("Failed to delete instance %(instance_id)s: " "Timeout deleting compute server %(server_id)s") % {'instance_id': self.id, 'server_id': server_id}) # If volume has been resized it must be manually removed in cinder try: if self.volume_id: volume_client = create_cinder_client(self.context, self.region_name) volume = volume_client.volumes.get(self.volume_id) if volume.status == "available": LOG.info(_("Deleting volume %(v)s for instance: %(i)s.") % {'v': self.volume_id, 'i': self.id}) volume.delete() except Exception: LOG.exception(_("Error deleting volume of instance %(id)s.") % {'id': self.db_info.id}) TroveInstanceDelete(instance=self, deleted_at=timeutils.isotime(deleted_at), server=old_server).notify() LOG.debug("End _delete_resources for instance %s" % self.id) def server_status_matches(self, expected_status, server=None): if not server: server = self.server return server.status.upper() in ( status.upper() for status in expected_status) def resize_volume(self, new_size): LOG.info(_("Resizing volume for instance %(instance_id)s from " "%(old_size)s GB to %(new_size)s GB.") % {'instance_id': self.id, 'old_size': self.volume_size, 'new_size': new_size}) action = ResizeVolumeAction(self, self.volume_size, new_size) action.execute() LOG.info(_("Resized volume for instance %s successfully.") % self.id) def resize_flavor(self, old_flavor, new_flavor): LOG.info(_("Resizing instance %(instance_id)s from flavor " "%(old_flavor)s to %(new_flavor)s.") % {'instance_id': self.id, 'old_flavor': old_flavor['id'], 'new_flavor': new_flavor['id']}) action = ResizeAction(self, old_flavor, new_flavor) action.execute() LOG.info(_("Resized instance %s successfully.") % self.id) def migrate(self, host): LOG.info(_("Initiating migration to host %s.") % host) action = MigrateAction(self, host) action.execute() def create_backup(self, backup_info): LOG.info(_("Initiating backup for instance %s.") % self.id) self.guest.create_backup(backup_info) def backup_required_for_replication(self): LOG.debug("Seeing if replication backup is required for instance %s." % self.id) return self.guest.backup_required_for_replication() def get_replication_snapshot(self, snapshot_info, flavor): def _get_replication_snapshot(): LOG.debug("Calling get_replication_snapshot on %s.", self.id) try: rep_source_config = self._render_replica_source_config(flavor) result = self.guest.get_replication_snapshot( snapshot_info, rep_source_config.config_contents) LOG.debug("Got replication snapshot from guest successfully.") return result except Exception: LOG.exception(_("Failed to get replication snapshot from %s.") % self.id) raise return run_with_quotas(self.context.tenant, {'backups': 1}, _get_replication_snapshot) def detach_replica(self, master, for_failover=False): LOG.debug("Calling detach_replica on %s" % self.id) try: self.guest.detach_replica(for_failover) self.update_db(slave_of_id=None) self.slave_list = None except (GuestError, GuestTimeout): LOG.exception(_("Failed to detach replica %s.") % self.id) raise finally: if not for_failover: self.reset_task_status() def attach_replica(self, master): LOG.debug("Calling attach_replica on %s" % self.id) try: replica_info = master.guest.get_replica_context() flavor = self.nova_client.flavors.get(self.flavor_id) slave_config = self._render_replica_config(flavor).config_contents self.guest.attach_replica(replica_info, slave_config) self.update_db(slave_of_id=master.id) self.slave_list = None except (GuestError, GuestTimeout): LOG.exception(_("Failed to attach replica %s.") % self.id) raise def make_read_only(self, read_only): LOG.debug("Calling make_read_only on %s" % self.id) self.guest.make_read_only(read_only) def _get_floating_ips(self): """Returns floating ips as a dict indexed by the ip.""" floating_ips = {} for ip in self.nova_client.floating_ips.list(): floating_ips.update({ip.ip: ip}) return floating_ips def detach_public_ips(self): LOG.debug("Begin detach_public_ips for instance %s" % self.id) removed_ips = [] server_id = self.db_info.compute_instance_id nova_instance = self.nova_client.servers.get(server_id) floating_ips = self._get_floating_ips() for ip in self.get_visible_ip_addresses(): if ip in floating_ips: nova_instance.remove_floating_ip(ip) removed_ips.append(ip) return removed_ips def attach_public_ips(self, ips): LOG.debug("Begin attach_public_ips for instance %s" % self.id) server_id = self.db_info.compute_instance_id nova_instance = self.nova_client.servers.get(server_id) for ip in ips: nova_instance.add_floating_ip(ip) def enable_as_master(self): LOG.debug("Calling enable_as_master on %s" % self.id) flavor = self.nova_client.flavors.get(self.flavor_id) replica_source_config = self._render_replica_source_config(flavor) self.update_db(slave_of_id=None) self.slave_list = None self.guest.enable_as_master(replica_source_config.config_contents) def get_last_txn(self): LOG.debug("Calling get_last_txn on %s" % self.id) return self.guest.get_last_txn() def get_latest_txn_id(self): LOG.debug("Calling get_latest_txn_id on %s" % self.id) return self.guest.get_latest_txn_id() def wait_for_txn(self, txn): LOG.debug("Calling wait_for_txn on %s" % self.id) if txn: self.guest.wait_for_txn(txn) def cleanup_source_on_replica_detach(self, replica_info): LOG.debug("Calling cleanup_source_on_replica_detach on %s" % self.id) self.guest.cleanup_source_on_replica_detach(replica_info) def demote_replication_master(self): LOG.debug("Calling demote_replication_master on %s" % self.id) self.guest.demote_replication_master() def reboot(self): try: # Issue a guest stop db call to shutdown the db if running LOG.debug("Stopping datastore on instance %s." % self.id) try: self.guest.stop_db() except (exception.GuestError, exception.GuestTimeout) as e: # Acceptable to be here if db was already in crashed state # Also we check guest state before issuing reboot LOG.debug(str(e)) self._refresh_datastore_status() if not (self.datastore_status_matches( rd_instance.ServiceStatuses.SHUTDOWN) or self.datastore_status_matches( rd_instance.ServiceStatuses.CRASHED)): # We will bail if db did not get stopped or is blocked LOG.error(_("Cannot reboot instance. DB status is %s.") % self.datastore_status.status) return LOG.debug("The guest service status is %s." % self.datastore_status.status) LOG.info(_("Rebooting instance %s.") % self.id) self.server.reboot() # Poll nova until instance is active reboot_time_out = CONF.reboot_time_out def update_server_info(): self.refresh_compute_server_info() return self.server_status_matches(['ACTIVE']) utils.poll_until( update_server_info, sleep_time=2, time_out=reboot_time_out) # Set the status to PAUSED. The guest agent will reset the status # when the reboot completes and MySQL is running. self.set_datastore_status_to_paused() LOG.info(_("Rebooted instance %s successfully.") % self.id) except Exception as e: LOG.error(_("Failed to reboot instance %(id)s: %(e)s") % {'id': self.id, 'e': str(e)}) finally: LOG.debug("Rebooting FINALLY %s" % self.id) self.reset_task_status() def restart(self): LOG.info(_("Initiating datastore restart on instance %s.") % self.id) try: self.guest.restart() except GuestError: LOG.error(_("Failed to initiate datastore restart on instance " "%s.") % self.id) finally: self.reset_task_status() def guest_log_list(self):
def guest_log_action(self, log_name, enable, disable, publish, discard): LOG.info(_("Processing guest log for instance %s.") % self.id) try: return self.guest.guest_log_action(log_name, enable, disable, publish, discard) except GuestError: LOG.error(_("Failed to process guest log for instance %s.") % self.id) finally: self.reset_task_status() def refresh_compute_server_info(self): """Refreshes the compute server field.""" server = self.nova_client.servers.get(self.server.id) self.server = server def _refresh_datastore_status(self): """ Gets the latest instance service status from datastore and updates the reference on this BuiltInstanceTask reference """ self.datastore_status = InstanceServiceStatus.find_by( instance_id=self.id) def set_datastore_status_to_paused(self): """ Updates the InstanceServiceStatus for this BuiltInstance to PAUSED. This does not change the reference for this BuiltInstanceTask """ datastore_status = InstanceServiceStatus.find_by(instance_id=self.id) datastore_status.status = rd_instance.ServiceStatuses.PAUSED datastore_status.save() def upgrade(self, datastore_version): LOG.debug("Upgrading instance %s to new datastore version %s", self, datastore_version) def server_finished_rebuilding(): self.refresh_compute_server_info() return not self.server_status_matches(['REBUILD']) try: upgrade_info = self.guest.pre_upgrade() if self.volume_id: volume = self.volume_client.volumes.get(self.volume_id) volume_device = self._fix_device_path( volume.attachments[0]['device']) # BUG(1650518): Cleanup in the Pike release some instances # that we will be upgrading will be pre secureserialier # and will have no instance_key entries. If this is one of # those instances, make a key. That will make it appear in # the injected files that are generated next. From this # point, and until the guest comes up, attempting to send # messages to it will fail because the RPC framework will # encrypt messages to a guest which potentially doesn't # have the code to handle it. if CONF.enable_secure_rpc_messaging and ( self.db_info.encrypted_key is None): encrypted_key = cu.encode_data(cu.encrypt_data( cu.generate_random_key(), CONF.inst_rpc_key_encr_key)) self.update_db(encrypted_key=encrypted_key) LOG.debug("Generated unique RPC encryption key for " "instance = %s, key = %s" % (self.id, encrypted_key)) injected_files = self.get_injected_files( datastore_version.manager) LOG.debug("Rebuilding instance %(instance)s with image %(image)s.", {'instance': self, 'image': datastore_version.image_id}) self.server.rebuild(datastore_version.image_id, files=injected_files) utils.poll_until( server_finished_rebuilding, sleep_time=2, time_out=600) if not self.server_status_matches(['ACTIVE']): raise TroveError(_("Instance %(instance)s failed to " "upgrade to %(datastore_version)s") % {'instance': self, 'datastore_version': datastore_version}) if volume: upgrade_info['device'] = volume_device self.guest.post_upgrade(upgrade_info) self.reset_task_status() except Exception as e: LOG.exception(e) err = inst_models.InstanceTasks.BUILDING_ERROR_SERVER self.update_db(task_status=err) raise e # Some cinder drivers appear to return "vdb" instead of "/dev/vdb". # We need to account for that. def _fix_device_path(self, device): if device.startswith("/dev"): return device else: return "/dev/%s" % device class BackupTasks(object): @classmethod def _parse_manifest(cls, manifest): # manifest is in the format 'container/prefix' # where prefix can be 'path' or 'lots/of/paths' try: container_index = manifest.index('/') prefix_index = container_index + 1 except ValueError: return None, None container = manifest[:container_index] prefix = manifest[prefix_index:] return container, prefix @classmethod def delete_files_from_swift(cls, context, filename): container = CONF.backup_swift_container client = remote.create_swift_client(context) obj = client.head_object(container, filename) if 'x-static-large-object' in obj: # Static large object LOG.debug("Deleting large object file: %(cont)s/%(filename)s" % {'cont': container, 'filename': filename}) client.delete_object(container, filename, query_string='multipart-manifest=delete') else: # Single object LOG.debug("Deleting object file: %(cont)s/%(filename)s" % {'cont': container, 'filename': filename}) client.delete_object(container, filename) @classmethod def delete_backup(cls, context, backup_id): """Delete backup from swift.""" LOG.info(_("Deleting backup %s.") % backup_id) backup = bkup_models.Backup.get_by_id(context, backup_id) try: filename = backup.filename if filename: BackupTasks.delete_files_from_swift(context, filename) except ValueError: backup.delete() except ClientException as e: if e.http_status == 404: # Backup already deleted in swift backup.delete() else: LOG.exception(_("Error occurred when deleting from swift. " "Details: %s") % e) backup.state = bkup_models.BackupState.DELETE_FAILED backup.save() raise TroveError(_("Failed to delete swift object for backup " "%s.") % backup_id) else: backup.delete() LOG.info(_("Deleted backup %s successfully.") % backup_id) class ModuleTasks(object): @classmethod def reapply_module(cls, context, module_id, md5, include_clustered, batch_size, batch_delay, force): """Reapply module.""" LOG.info(_("Reapplying module %s.") % module_id) batch_size = batch_size or CONF.module_reapply_max_batch_size batch_delay = batch_delay or CONF.module_reapply_min_batch_delay # Don't let non-admin bypass the safeguards if not context.is_admin: batch_size = min(batch_size, CONF.module_reapply_max_batch_size) batch_delay = max(batch_delay, CONF.module_reapply_min_batch_delay) modules = module_models.Modules.load_by_ids(context, [module_id]) current_md5 = modules[0].md5 LOG.debug("MD5: %s Force: %s." % (md5, force)) # Process all the instances instance_modules = module_models.InstanceModules.load_all( context, module_id=module_id, md5=md5) total_count = instance_modules.count() reapply_count = 0 skipped_count = 0 if instance_modules: module_list = module_views.convert_modules_to_list(modules) for instance_module in instance_modules: instance_id = instance_module.instance_id if (instance_module.md5 != current_md5 or force) and ( not md5 or md5 == instance_module.md5): instance = BuiltInstanceTasks.load(context, instance_id, needs_server=False) if instance and ( include_clustered or not instance.cluster_id): try: module_models.Modules.validate( modules, instance.datastore.id, instance.datastore_version.id) client = create_guest_client(context, instance_id) client.module_apply(module_list) Instance.add_instance_modules( context, instance_id, modules) reapply_count += 1 except exception.ModuleInvalid as ex: LOG.info(_("Skipping: %s") % ex) skipped_count += 1 # Sleep if we've fired off too many in a row. if (batch_size and not reapply_count % batch_size and (reapply_count + skipped_count) < total_count): LOG.debug("Applied module to %d of %d instances - " "sleeping for %ds" % (reapply_count, total_count, batch_delay)) time.sleep(batch_delay) else: LOG.debug("Instance '%s' not found or doesn't match " "criteria, skipping reapply." % instance_id) skipped_count += 1 else: LOG.debug("Instance '%s' does not match " "criteria, skipping reapply." % instance_id) skipped_count += 1 LOG.info(_("Reapplied module to %(num)d instances (skipped %(skip)d).") % {'num': reapply_count, 'skip': skipped_count}) class ResizeVolumeAction(object): """Performs volume resize action.""" def __init__(self, instance, old_size, new_size): self.instance = instance self.old_size = int(old_size) self.new_size = int(new_size) def get_mount_point(self): mount_point = CONF.get( self.instance.datastore_version.manager).mount_point return mount_point def get_device_path(self): return self.instance.device_path def _fail(self, orig_func): LOG.exception(_("%(func)s encountered an error when " "attempting to resize the volume for " "instance %(id)s. Setting service " "status to failed.") % {'func': orig_func.__name__, 'id': self.instance.id}) service = InstanceServiceStatus.find_by(instance_id=self.instance.id) service.set_status(ServiceStatuses.FAILED) service.save() def _recover_restart(self, orig_func): LOG.exception(_("%(func)s encountered an error when attempting to " "resize the volume for instance %(id)s. Trying to " "recover by restarting the " "guest.") % {'func': orig_func.__name__, 'id': self.instance.id}) self.instance.restart() def _recover_mount_restart(self, orig_func): LOG.exception(_("%(func)s encountered an error when attempting to " "resize the volume for instance %(id)s. Trying to " "recover by mounting the volume and then restarting " "the guest.") % {'func': orig_func.__name__, 'id': self.instance.id}) self._mount_volume() self.instance.restart() def _recover_full(self, orig_func): LOG.exception(_("%(func)s encountered an error when attempting to " "resize the volume for instance %(id)s. Trying to " "recover by attaching and" " mounting the volume and then restarting the " "guest.") % {'func': orig_func.__name__, 'id': self.instance.id}) self._attach_volume() self._mount_volume() self.instance.restart() def _stop_db(self): LOG.debug("Instance %s calling stop_db." % self.instance.id) self.instance.guest.stop_db() @try_recover def _unmount_volume(self): LOG.debug("Unmounting the volume on instance %(id)s" % { 'id': self.instance.id}) mount_point = self.get_mount_point() device_path = self.get_device_path() self.instance.guest.unmount_volume(device_path=device_path, mount_point=mount_point) LOG.debug("Successfully unmounted the volume %(vol_id)s for " "instance %(id)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id}) @try_recover def _detach_volume(self): LOG.debug("Detach volume %(vol_id)s from instance %(id)s" % { 'vol_id': self.instance.volume_id, 'id': self.instance.id}) self.instance.nova_client.volumes.delete_server_volume( self.instance.server.id, self.instance.volume_id) def volume_available(): volume = self.instance.volume_client.volumes.get( self.instance.volume_id) return volume.status == 'available' utils.poll_until(volume_available, sleep_time=2, time_out=CONF.volume_time_out) LOG.debug("Successfully detached volume %(vol_id)s from instance " "%(id)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id}) @try_recover def _attach_volume(self): device_path = self.get_device_path() LOG.debug("Attach volume %(vol_id)s to instance %(id)s at " "%(dev)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id, 'dev': device_path}) self.instance.nova_client.volumes.create_server_volume( self.instance.server.id, self.instance.volume_id, device_path) def volume_in_use(): volume = self.instance.volume_client.volumes.get( self.instance.volume_id) return volume.status == 'in-use' utils.poll_until(volume_in_use, sleep_time=2, time_out=CONF.volume_time_out) LOG.debug("Successfully attached volume %(vol_id)s to instance " "%(id)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id}) @try_recover def _resize_fs(self): LOG.debug("Resizing the filesystem for instance %(id)s" % { 'id': self.instance.id}) mount_point = self.get_mount_point() device_path = self.get_device_path() self.instance.guest.resize_fs(device_path=device_path, mount_point=mount_point) LOG.debug("Successfully resized volume %(vol_id)s filesystem for " "instance %(id)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id}) @try_recover def _mount_volume(self): LOG.debug("Mount the volume on instance %(id)s" % { 'id': self.instance.id}) mount_point = self.get_mount_point() device_path = self.get_device_path() self.instance.guest.mount_volume(device_path=device_path, mount_point=mount_point) LOG.debug("Successfully mounted the volume %(vol_id)s on instance " "%(id)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id}) @try_recover def _extend(self): LOG.debug("Extending volume %(vol_id)s for instance %(id)s to " "size %(size)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id, 'size': self.new_size}) self.instance.volume_client.volumes.extend(self.instance.volume_id, self.new_size) LOG.debug("Successfully extended the volume %(vol_id)s for instance " "%(id)s" % {'vol_id': self.instance.volume_id, 'id': self.instance.id}) def _verify_extend(self): try: volume = self.instance.volume_client.volumes.get( self.instance.volume_id) if not volume: msg = (_('Failed to get volume %(vol_id)s') % { 'vol_id': self.instance.volume_id}) raise cinder_exceptions.ClientException(msg) def volume_is_new_size(): volume = self.instance.volume_client.volumes.get( self.instance.volume_id) return volume.size == self.new_size utils.poll_until(volume_is_new_size, sleep_time=2, time_out=CONF.volume_time_out) self.instance.update_db(volume_size=self.new_size) except PollTimeOut: LOG.exception(_("Timeout trying to extend the volume %(vol_id)s " "for instance %(id)s") % { 'vol_id': self.instance.volume_id, 'id': self.instance.id}) volume = self.instance.volume_client.volumes.get( self.instance.volume_id) if volume.status == 'extending': self._fail(self._verify_extend) elif volume.size != self.new_size: self.instance.update_db(volume_size=volume.size) self._recover_full(self._verify_extend) raise except Exception: LOG.exception(_("Error encountered trying to verify extend for " "the volume %(vol_id)s for instance %(id)s") % { 'vol_id': self.instance.volume_id, 'id': self.instance.id}) self._recover_full(self._verify_extend) raise def _resize_active_volume(self): LOG.debug("Begin _resize_active_volume for id: %(id)s" % { 'id': self.instance.id}) self._stop_db() self._unmount_volume(recover_func=self._recover_restart) self._detach_volume(recover_func=self._recover_mount_restart) self._extend(recover_func=self._recover_full) self._verify_extend() # if anything fails after this point, recovery is futile self._attach_volume(recover_func=self._fail) self._resize_fs(recover_func=self._fail) self._mount_volume(recover_func=self._fail) self.instance.restart() LOG.debug("End _resize_active_volume for id: %(id)s" % { 'id': self.instance.id}) def execute(self): LOG.debug("%(gt)s: Resizing instance %(id)s volume for server " "%(server_id)s from %(old_volume_size)s to " "%(new_size)r GB" % {'gt': greenthread.getcurrent(), 'id': self.instance.id, 'server_id': self.instance.server.id, 'old_volume_size': self.old_size, 'new_size': self.new_size}) if self.instance.server.status == InstanceStatus.ACTIVE: self._resize_active_volume() self.instance.reset_task_status() # send usage event for size reported by cinder volume = self.instance.volume_client.volumes.get( self.instance.volume_id) launched_time = timeutils.isotime(self.instance.updated) modified_time = timeutils.isotime(self.instance.updated) TroveInstanceModifyVolume(instance=self.instance, old_volume_size=self.old_size, launched_at=launched_time, modify_at=modified_time, volume_size=volume.size, ).notify() else: self.instance.reset_task_status() msg = _("Failed to resize instance %(id)s volume for server " "%(server_id)s. The instance must be in state %(state)s " "not %(inst_state)s.") % { 'id': self.instance.id, 'server_id': self.instance.server.id, 'state': InstanceStatus.ACTIVE, 'inst_state': self.instance.server.status} raise TroveError(msg) class ResizeActionBase(object): """Base class for executing a resize action.""" def __init__(self, instance): """ Creates a new resize action for a given instance :param instance: reference to existing instance that will be resized :type instance: trove.taskmanager.models.BuiltInstanceTasks """ self.instance = instance def _assert_guest_is_ok(self): # The guest will never set the status to PAUSED. self.instance.set_datastore_status_to_paused() # Now we wait until it sets it to anything at all, # so we know it's alive. utils.poll_until( self._guest_is_awake, sleep_time=2, time_out=RESIZE_TIME_OUT) def _assert_nova_status_is_ok(self): # Make sure Nova thinks things went well. if not self.instance.server_status_matches(["VERIFY_RESIZE"]): msg = "Migration failed! status=%(act_status)s and " \ "not %(exp_status)s" % { "act_status": self.instance.server.status, "exp_status": 'VERIFY_RESIZE'} raise TroveError(msg) def _assert_datastore_is_ok(self): # Tell the guest to turn on datastore, and ensure the status becomes # RUNNING. self._start_datastore() utils.poll_until( self._datastore_is_online, sleep_time=2, time_out=RESIZE_TIME_OUT) def _assert_datastore_is_offline(self): # Tell the guest to turn off MySQL, and ensure the status becomes # SHUTDOWN. self.instance.guest.stop_db(do_not_start_on_reboot=True) utils.poll_until( self._datastore_is_offline, sleep_time=2, time_out=RESIZE_TIME_OUT) def _assert_processes_are_ok(self): """Checks the procs; if anything is wrong, reverts the operation.""" # Tell the guest to turn back on, and make sure it can start. self._assert_guest_is_ok() LOG.debug("Nova guest is ok.") self._assert_datastore_is_ok() LOG.debug("Datastore is ok.") def _confirm_nova_action(self): LOG.debug("Instance %s calling Compute confirm resize..." % self.instance.id) self.instance.server.confirm_resize() def _datastore_is_online(self): self.instance._refresh_datastore_status() return self.instance.is_datastore_running def _datastore_is_offline(self): self.instance._refresh_datastore_status() return (self.instance.datastore_status_matches( rd_instance.ServiceStatuses.SHUTDOWN)) def _revert_nova_action(self): LOG.debug("Instance %s calling Compute revert resize..." % self.instance.id) self.instance.server.revert_resize() def execute(self): """Initiates the action.""" try: LOG.debug("Instance %s calling stop_db..." % self.instance.id) self._assert_datastore_is_offline() self._perform_nova_action() finally: if self.instance.db_info.task_status != ( inst_models.InstanceTasks.NONE): self.instance.reset_task_status() def _guest_is_awake(self): self.instance._refresh_datastore_status() return not self.instance.datastore_status_matches( rd_instance.ServiceStatuses.PAUSED) def _perform_nova_action(self): """Calls Nova to resize or migrate an instance, and confirms.""" LOG.debug("Begin resize method _perform_nova_action instance: %s" % self.instance.id) need_to_revert = False try: LOG.debug("Initiating nova action") self._initiate_nova_action() LOG.debug("Waiting for nova action") self._wait_for_nova_action() LOG.debug("Asserting nova status is ok") self._assert_nova_status_is_ok() need_to_revert = True LOG.debug("* * * REVERT BARRIER PASSED * * *") LOG.debug("Asserting nova action success") self._assert_nova_action_was_successful() LOG.debug("Asserting processes are OK") self._assert_processes_are_ok() LOG.debug("Confirming nova action") self._confirm_nova_action() except Exception: LOG.exception(_("Exception during nova action.")) if need_to_revert: LOG.error(_("Reverting action for instance %s") % self.instance.id) self._revert_nova_action() self._wait_for_revert_nova_action() if self.instance.server_status_matches(['ACTIVE']): LOG.error(_("Restarting datastore.")) self.instance.guest.restart() else: LOG.error(_("Cannot restart datastore because " "Nova server status is not ACTIVE")) LOG.error(_("Error resizing instance %s.") % self.instance.id) raise LOG.debug("Recording success") self._record_action_success() LOG.debug("End resize method _perform_nova_action instance: %s" % self.instance.id) def _wait_for_nova_action(self): # Wait for the flavor to change. def update_server_info(): self.instance.refresh_compute_server_info() return not self.instance.server_status_matches(['RESIZE']) utils.poll_until( update_server_info, sleep_time=2, time_out=RESIZE_TIME_OUT) def _wait_for_revert_nova_action(self): # Wait for the server to return to ACTIVE after revert. def update_server_info(): self.instance.refresh_compute_server_info() return self.instance.server_status_matches(['ACTIVE']) utils.poll_until( update_server_info, sleep_time=2, time_out=REVERT_TIME_OUT) class ResizeAction(ResizeActionBase): def __init__(self, instance, old_flavor, new_flavor): """ :type instance: trove.taskmanager.models.BuiltInstanceTasks :type old_flavor: dict :type new_flavor: dict """ super(ResizeAction, self).__init__(instance) self.old_flavor = old_flavor self.new_flavor = new_flavor self.new_flavor_id = new_flavor['id'] def _assert_nova_action_was_successful(self): # Do check to make sure the status and flavor id are correct. if str(self.instance.server.flavor['id']) != str(self.new_flavor_id): msg = "Assertion failed! flavor_id=%s and not %s" \ % (self.instance.server.flavor['id'], self.new_flavor_id) raise TroveError(msg) def _initiate_nova_action(self): self.instance.server.resize(self.new_flavor_id) def _revert_nova_action(self): LOG.debug("Instance %s calling Compute revert resize... " "Repairing config." % self.instance.id) try: config = self.instance._render_config(self.old_flavor) config = {'config_contents': config.config_contents} self.instance.guest.reset_configuration(config) except GuestTimeout: LOG.exception(_("Error sending reset_configuration call.")) LOG.debug("Reverting resize.") super(ResizeAction, self)._revert_nova_action() def _record_action_success(self): LOG.debug("Updating instance %(id)s to flavor_id %(flavor_id)s." % {'id': self.instance.id, 'flavor_id': self.new_flavor_id}) self.instance.update_db(flavor_id=self.new_flavor_id, task_status=inst_models.InstanceTasks.NONE) update_time = timeutils.isotime(self.instance.updated) TroveInstanceModifyFlavor(instance=self.instance, old_instance_size=self.old_flavor['ram'], instance_size=self.new_flavor['ram'], launched_at=update_time, modify_at=update_time, server=self.instance.server).notify() def _start_datastore(self): config = self.instance._render_config(self.new_flavor) self.instance.guest.start_db_with_conf_changes(config.config_contents) class MigrateAction(ResizeActionBase): def __init__(self, instance, host=None): super(MigrateAction, self).__init__(instance) self.instance = instance self.host = host def _assert_nova_action_was_successful(self): LOG.debug("Currently no assertions for a Migrate Action") def _initiate_nova_action(self): LOG.debug("Migrating instance %(instance)s without flavor change ...\n" "Forcing migration to host(%(host)s)" % {"instance": self.instance.id, "host": self.host}) self.instance.server.migrate(force_host=self.host) def _record_action_success(self): LOG.debug("Successfully finished Migration to " "%(hostname)s: %(id)s" % {'hostname': self.instance.hostname, 'id': self.instance.id}) def _start_datastore(self): self.instance.guest.restart() def load_cluster_tasks(context, cluster_id): manager = Cluster.manager_from_cluster_id(context, cluster_id) strat = strategy.load_taskmanager_strategy(manager) task_manager_cluster_tasks_class = strat.task_manager_cluster_tasks_class return ClusterTasks.load(context, cluster_id, task_manager_cluster_tasks_class)
LOG.info(_("Retrieving guest log list for instance %s.") % self.id) try: return self.guest.guest_log_list() except GuestError: LOG.error(_("Failed to retrieve guest log list for instance " "%s.") % self.id) finally: self.reset_task_status()
main.py
#!/usr/bin/env python3 # -*- encoding: utf-8 -*- from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, InlineQueryHandler import telegram as tg import requests import json import os import io import time import logging from datetime import timedelta import translate import random import praw REDDIT_BOT_ID = os.environ['REDDIT_BOT_ID'] REDDIT_BOT_SECRET = os.environ['REDDIT_BOT_SECRET'] REDDIT_USER_AGENT = os.environ['REDDIT_USER_AGENT'] USER_AGENT_BROWSER = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36' royalTitles = ["Lé", "Baron", "König", "Archlord", "Genius", "Ritter", "Curry", "Burger", "Mc", "Doktor", "Gentoomaster", "Chef", "Lead Developer"] firstFrag = ["Schm", "J", "Hans-J", "K", "G", "Gr", "B", "Str", "Kr", "Rask"] secondFrag = ["oerg", "öck", "öhhhrk", "öhrp", "egor", "oeg", "ock"] thirdFrag = ["inger", "erino", "aroni", "us", "sell", "topus", "thulu", "tain", "rid", "odil", "ette", "nikov"] nobleAnnex = ["I.", "II.", "III.", "Royale", "dem Allmächtigen", "dem Weisen", "dem hochgradig Intelligenten", "dem Unendlichen", "dem Allwissenden", "dem Gentoobändiger", "dem Meisterinformatiker"] wisdoms = ["Linux ist voll doof!", "Ich stehe immer um 7.00 Uhr auf!", "Tut schön viel Frischkäse in die Nudelsoße!", "Mensen um 11.00 Uhr ist eine super Sache!", "Ich habe WinRar gekauft!", "Für einen längeren XP-Supportzeitraum!", "Fasst meinen Laptopbildschirm an!", "Natürlich code ich dieses Feature für euch, ganz ohne Pull Request!", "Maxime ist ein toller Papa!", "Hirtenkäsepizza ist die beste!", "Sauerkraut ist doch ekelhaft!", "Mein Lieblingsbrowser ist ja der Internet Explorer!", "Rechtschreibfehler in Kommentaren? Voll okay!", "Party? Warum nicht bei mir zu Hause?", "Irgendwas mit dynamisch Parameter injecten!", "Wie war das mit den Speisezeiten?", "Ich kaufe nur bei Nvidia!", "Wer braucht schon Open Source...", "KöckOS? Kommt noch diese Woche raus!", "Die besten Witze sind Deine-Mutter-Witze!", "Mein Lieblings-OS ist iOS!", "Ein Halloumiburger ist eine eigenständige Mahlzeit!", "Ich kaufe mir ein MacBook!", "Ich fange wieder mit Medieninformatik an!", "Ich liebe Ubuntu!", "Verschlüsselung ist doch Unsinn!", "Machen wir alle ne gemeinsame WG auf?"] haes = ["HÄ?", "VALORANT?", "WIE", "WANN", "WO", "Geller muss erst noch zu Ende essen!", "???", "*Random Katzenbild*", "Erstmal Valorant!", "ICH HASSE EUCH ALLE", "HÄÄÄ", "ICH ARBEITE", "ICH HASSE DEN", "FUCK YOU", "WIRKLICH", "BITTE", "Natürlich ist das gelb!", "Es gibt Kuchen!", "Wir haben wieder viel zu viel Lasagne!", "Oke", "WAS", "WAS MEINST DU", "WAS WILLST DU DENN JETZT SCHON WIEDER", "Alter", "Wirst schon sehen", "Denk nach du Schwamm", "Stop", "NICHT COOL", "TROLL NICHT RUM", "Uff", "AAAAARGH", "Kann den jemand kicken?", "DU HAST NUR ANGST VOR MIR", "EKELHAFT", "ICH HASSE ALLES", "WOFÜR", "ICH BIN IMMER SO", "KUCHEN", "LASAGNE", "SCHANDE", "WARUM ICH", "ICH LIEBE ARBEITEN", "ICH HASSE UNPÜNKTLICHKEIT", "IDIOT", "HEY", "WO SEID IHR", "WAS SONST", "KIBA", "HAHA", "VERSTEHT IHR DAS NICHT", "SEID IHR DUMM ODER WAS", "WTF", "RED DEUTSCH MIT MIR", "OMG", "LOL", ":)", "MIR IST LANGWEILIG", "ALS OB IHR ALLE SCHON SCHLAFT", "HALLO", "WEIß ICH NICHT", "WER DENKT SICH DAS AUS", "ICH SPRING LIEBER AUS DEM FENSTER", "NE"] class NotifyUserException(Exception): """Raised whenever an error needs to be propagated to the user""" pass def start(update, context): context.bot.send_message(chat_id=update.message.chat_id, text="Reichenbach is never an option!") def echoText(update, context): context.bot.send_message(chat_id=update.message.chat_id, text=update.message.text) def echoSticker(update, context): sticker = update.message.s
xt): params = context.args if len(params) < 1: daysToAdd = 0 else: try: daysToAdd = int(params[0]) except ValueError: context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be an integer value. Aborting.") return day = update.message.date.date() + timedelta(days=daysToAdd) url = "https://openmensa.org/api/v2/canteens/79/days/" + day.strftime("%Y-%m-%d") + "/meals" resp = requests.get(url) if not resp.ok: context.bot.send_message(chat_id=update.message.chat_id, text="I failed miserably. Disgrace!") return jsonData = json.loads(resp.content) for elem in jsonData: mealNotes = elem["notes"] if "vegetarisch" in mealNotes or "vegan" in mealNotes: context.bot.send_message(chat_id=update.message.chat_id, text="*" + elem["name"] + "*", parse_mode="Markdown") else: context.bot.send_message(chat_id=update.message.chat_id, text="_" + elem["name"] + "_", parse_mode="Markdown") def andre(update, context): context.bot.send_message(chat_id=update.message.chat_id, text="Höhöhö Reichenbach!") def leon(update, context): joke = dadJoke() context.bot.send_message(chat_id=update.message.chat_id, text=joke) def loen(update, context): joke = dadJoke() translator = translate.Translator(from_lang='en', to_lang='de') translatedJoke = translator.translate(joke) context.bot.send_message(chat_id=update.message.chat_id, text=translatedJoke) def dadJoke(): headers = {'Accept': 'text/plain '} resp = requests.get("https://icanhazdadjoke.com/", headers=headers) if not resp.ok: return "I failed miserably. Disgrace!" return resp.text def georg(update, context): context.bot.send_message(chat_id=update.message.chat_id, text="https://wiki.archlinux.org/index.php/Installation_guide") def maxime(update, context): context.bot.send_sticker(chat_id=update.message.chat_id, sticker="CAADBQADfAMAAukKyAPfAAFRgAuYdNoWBA") def andrey(update, context): context.bot.send_message(chat_id=update.message.chat_id, text="11.00 Bois. Yeef!") def steffuu(update, context): context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(haes)) def getXkcd(id, rand): resp = requests.get("https://xkcd.com/info.0.json") if not resp.ok: raise NotifyUserException("I failed miserably. Disgrace!") jsonData = json.loads(resp.content) upperLimit = jsonData["num"] if rand: id = random.randint(1, upperLimit) elif id > upperLimit: raise NotifyUserException("Id not in range. Maximum id currently is " + str(upperLimit) + ".") resp = requests.get("https://xkcd.com/" + str(id) + "/info.0.json") if not resp.ok: raise NotifyUserException("I failed miserably. Disgrace!") jsonData = json.loads(resp.content) return (id, jsonData["img"], jsonData["title"]) def xkcd(update, context): params = context.args rand = False id = 0 if len(params) < 1: rand = True else: try: id = int(params[0]) except ValueError: context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.") return if id < 1: context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.") return try: xkcd = getXkcd(id, rand) except NotifyUserException as error: context.bot.send_message(chat_id=update.message.chat_id, text=str(error)) return context.bot.send_photo(chat_id=update.message.chat_id, photo=xkcd[1], caption=str(xkcd[0]) + " - " + xkcd[2]) def decision(update, context): headers = {'Accept': 'text/plain '} resp = requests.get("https://yesno.wtf/api/", headers=headers) if not resp.ok: raise NotifyUserException("oof") data = json.loads(resp.text) context.bot.send_animation(chat_id=update.message.chat_id, animation=data["image"], caption=data["answer"]) def subredditImg(subreddit, offset=0, count=5): imageFileEndings = [".png", ".jpg", ".jpeg", ".webp", ".gif"] reddit = praw.Reddit(client_id=REDDIT_BOT_ID, client_secret=REDDIT_BOT_SECRET, user_agent=REDDIT_USER_AGENT) images = [] for post in reddit.subreddit(subreddit).hot(limit=count): for ending in imageFileEndings: if str(post.url).endswith(ending): images.append(post.url) return images def r(update, context): params = context.args offset = 0 if len(params) < 1: context.bot.send_message(chat_id=update.message.chat_id, text="The first parameter has to be a string identifying the requested subreddit. Aborting.") return subreddit = params[0] if len(params) > 1: try: offset = int(params[1]) except ValueError: context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.") return if offset < 0: context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.") return try: images = subredditImg(subreddit) except Exception: context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.") return if len(images) == 0: context.bot.send_message(chat_id=update.message.chat_id, text="There are no images in the top 5 posts.") return for image in images: context.bot.send_photo(chat_id=update.message.chat_id, photo=image) def cat(update, context): context.bot.send_photo( chat_id=update.message.chat_id, photo="https://thiscatdoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024)) ) def horse(update, context): context.bot.send_photo( chat_id=update.message.chat_id, photo="https://thishorsedoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024)) ) def person(update, context): resp = requests.get("https://thispersondoesnotexist.com/image?time=" + str(time.time()) + str(random.randint(1, 1024)), headers={'User-Agent': 'USER_AGENT_BROWSER'}) if not resp.ok: context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.") return with io.BytesIO(resp.content) as buf: context.bot.send_photo(chat_id=update.message.chat_id, photo=buf) def wisdom(update, context): wisdom = createWisdomString() context.bot.send_message(chat_id=update.message.chat_id, text=wisdom) def createWisdomString(): optionalNoble = None optionalThird = None optionalAnnex = None if bool(random.getrandbits(1)): optionalNoble = random.choice(royalTitles) if bool(random.getrandbits(1)): optionalThird = random.choice(thirdFrag) if bool(random.getrandbits(1)): optionalAnnex = random.choice(nobleAnnex) mainBody = random.choice(firstFrag) + random.choice(secondFrag) output = "Die heutige Weisheit von " if optionalNoble: output += optionalNoble + " " + mainBody else: output += mainBody if optionalThird: output += optionalThird if optionalAnnex: output += " " + optionalAnnex output += ": " + random.choice(wisdoms) return output def choose(update, context): params = context.args if len(params) < 1: context.bot.send_message(chat_id=update.message.chat_id, text="You know, I can't choose if there is nothing to choose from. Wise words!") return elif len(params) == 1: context.bot.send_message(chat_id=update.message.chat_id, text="How the hell am I supposed to choose when only value is entered? Gosh.") return else: context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(params) + " shall be my answer!") def inlineR(update, context): query = update.inline_query.query results = [] try: images = subredditImg(query, count=40) except Exception: results.append(tg.InlineQueryResultArticle(0, "No", tg.InputTextMessageContent("No!"))) else: if len(images) == 0: results.append(tg.InlineQueryResultArticle(0, "No", "No!", )) else: for img in images: results.append(tg.InlineQueryResultPhoto(img, img, img)) finally: update.inline_query.answer(results) def main(): logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) API_TOKEN = os.environ['TELEGRAM_APITOKEN'] APP_ADDR = os.environ['APP_ADDRESS'] PORT = int(os.environ.get('PORT', '8443')) updater = Updater(token=API_TOKEN, use_context=True) startHandler = CommandHandler('start', start) updater.dispatcher.add_handler(startHandler) mensaHandler = CommandHandler('mensa', mensa) updater.dispatcher.add_handler(mensaHandler) andreHandler = CommandHandler('andre', andre) updater.dispatcher.add_handler(andreHandler) leonHandler = CommandHandler('leon', leon) updater.dispatcher.add_handler(leonHandler) georgHandler = CommandHandler('georg', georg) updater.dispatcher.add_handler(georgHandler) loenHandler = CommandHandler('loen', loen) updater.dispatcher.add_handler(loenHandler) maximeHandler = CommandHandler('maxime', maxime) updater.dispatcher.add_handler(maximeHandler) andreyHandler = CommandHandler('andrey', andrey) updater.dispatcher.add_handler(andreyHandler) steffuuHandler = CommandHandler('steffuu', steffuu) updater.dispatcher.add_handler(steffuuHandler) xkcdHandler = CommandHandler('xkcd', xkcd) updater.dispatcher.add_handler(xkcdHandler) decisionHandler = CommandHandler('decision', decision) updater.dispatcher.add_handler(decisionHandler) redditImgHandler = CommandHandler('r', r) updater.dispatcher.add_handler(redditImgHandler) echoHandlerText = MessageHandler(Filters.text, echoText) updater.dispatcher.add_handler(echoHandlerText) echoHandlerSticker = MessageHandler(Filters.sticker, echoSticker) updater.dispatcher.add_handler(echoHandlerSticker) catHandler = CommandHandler('cat', cat) updater.dispatcher.add_handler(catHandler) horseHandler = CommandHandler('horse', horse) updater.dispatcher.add_handler(horseHandler) personHandler = CommandHandler('person', person) updater.dispatcher.add_handler(personHandler) wisdomHandler = CommandHandler('wisdom', wisdom) updater.dispatcher.add_handler(wisdomHandler) chooseHandler = CommandHandler('choose', choose) updater.dispatcher.add_handler(chooseHandler) inlineRedditHandler = InlineQueryHandler(inlineR) updater.dispatcher.add_handler(inlineRedditHandler) updater.start_webhook(listen="0.0.0.0", port=PORT, url_path=API_TOKEN) updater.bot.set_webhook(APP_ADDR + API_TOKEN) updater.idle() if __name__ == "__main__": main()
ticker context.bot.send_sticker(chat_id=update.message.chat_id, sticker=sticker) def mensa(update, conte
collection-file-viewer-action.tsx
// Copyright (C) The Arvados Authors. All rights reserved.
// SPDX-License-Identifier: AGPL-3.0 import { connect } from "react-redux"; import { RootState } from "../../../store/store"; import { FileViewerAction } from 'views-components/context-menu/actions/file-viewer-action'; import { getNodeValue } from "models/tree"; import { ContextMenuKind } from 'views-components/context-menu/context-menu'; import { getInlineFileUrl, sanitizeToken, isInlineFileUrlSafe } from "./helpers"; const mapStateToProps = (state: RootState) => { const { resource } = state.contextMenu; const currentCollectionUuid = state.collectionPanel.item ? state.collectionPanel.item.uuid : ''; if (resource && [ ContextMenuKind.COLLECTION_FILE_ITEM, ContextMenuKind.READONLY_COLLECTION_FILE_ITEM, ContextMenuKind.COLLECTION_DIRECTORY_ITEM, ContextMenuKind.READONLY_COLLECTION_DIRECTORY_ITEM ].indexOf(resource.menuKind as ContextMenuKind) > -1) { const file = getNodeValue(resource.uuid)(state.collectionPanelFiles); const shouldShowInlineUrl = isInlineFileUrlSafe( file ? file.url : "", state.auth.config.keepWebServiceUrl, state.auth.config.keepWebInlineServiceUrl ) || state.auth.config.clusterConfig.Collections.TrustAllContent; if (file && shouldShowInlineUrl) { const fileUrl = sanitizeToken(getInlineFileUrl( file.url, state.auth.config.keepWebServiceUrl, state.auth.config.keepWebInlineServiceUrl), true); return { href: fileUrl, kind: 'file', currentCollectionUuid }; } } return {}; }; export const CollectionFileViewerAction = connect(mapStateToProps)(FileViewerAction);
//
index.tsx
import * as React from 'react'; import { Component } from 'react'; import * as md from './index.md'; import Markdown from '@docs/components/Markdown/'; import CodeBox from '@docs/components/CodeBox/'; import ApiBox from '@docs/components/ApiBox/'; import Api from './api'; import CollapseDemo from './demo/collapseDemo'; import * as collapseDemoMd from './demo/collapseDemo.md'; const collapseDemoCode = require('!raw-loader!./demo/collapseDemo'); import CollapseCustom from './demo/collapseCustom'; import * as collapseCustomMd from './demo/collapseCustom.md'; const collapseCustomCode = require('!raw-loader!./demo/collapseCustom'); export default class
extends Component { render() { return ( <div> <Markdown text={md}/> <CodeBox text={collapseDemoMd} demo={<CollapseDemo/>} code={collapseDemoCode}/> <CodeBox text={collapseCustomMd} demo={<CollapseCustom/>} code={collapseCustomCode}/> <ApiBox api={Api}/> </div> ); } }
CollapsePage
SharedVideoDialog.js
// @flow import { FieldTextStateless } from '@atlaskit/field-text'; import React from 'react'; import { Dialog } from '../../../base/dialog'; import { translate } from '../../../base/i18n'; import { getFieldValue } from '../../../base/react'; import { connect } from '../../../base/redux'; import AbstractSharedVideoDialog from '../AbstractSharedVideoDialog'; /** * Component that renders the video share dialog. * * @returns {React$Element<any>} */ class
extends AbstractSharedVideoDialog<*> { /** * Instantiates a new component. * * @inheritdoc */ constructor(props) { super(props); this.state = { value: '', okDisabled: true }; this._onChange = this._onChange.bind(this); this._onSubmitValue = this._onSubmitValue.bind(this); } _onChange: Object => void; /** * Callback for the onChange event of the field. * * @param {Object} evt - The static event. * @returns {void} */ _onChange(evt: Object) { const linkValue = getFieldValue(evt); this.setState({ value: linkValue, okDisabled: !linkValue }); } _onSubmitValue: () => boolean; /** * Callback to be invoked when the value of the link input is submitted. * * @returns {boolean} */ _onSubmitValue() { return super._onSetVideoLink(this.state.value); } /** * Implements React's {@link Component#render()}. * * @inheritdoc */ render() { const { t } = this.props; return ( <Dialog hideCancelButton = { false } okDisabled = { this.state.okDisabled } okKey = { t('dialog.Share') } onSubmit = { this._onSubmitValue } titleKey = { t('dialog.shareVideoTitle') } width = { 'small' }> <FieldTextStateless autoFocus = { true } className = 'input-control' compact = { false } label = { t('dialog.videoLink') } name = 'sharedVideoUrl' onChange = { this._onChange } placeholder = { t('dialog.sharedVideoLinkPlaceholder') } shouldFitContainer = { true } type = 'text' value = { this.state.value } /> </Dialog> ); } _onChange: Object => void; } export default translate(connect()(SharedVideoDialog));
SharedVideoDialog
pedersen.go
package pedersen import ( "fmt" "io" "github.com/cronokirby/safenum" "github.com/Zondax/multi-party-sig/internal/params" "github.com/Zondax/multi-party-sig/pkg/math/arith" ) type Error string const ( ErrNilFields Error = "contains nil field" ErrSEqualT Error = "S cannot be equal to T" ErrNotValidModN Error = "S and T must be in [1,…,N-1] and coprime to N" ) func (e Error) Error() string { return fmt.Sprintf("pedersen: %s", string(e)) } type Parameters struct { n *arith.Modulus s, t *safenum.Nat } // New returns a new set of Pedersen parameters. // Assumes ValidateParameters(n, s, t) returns nil. func New(n *arith.Modulus, s, t *safenum.Nat) *Parameters {
// ValidateParameters check n, s and t, and returns an error if any of the following is true: // - n, s, or t is nil. // - s, t are not in [1, …,n-1]. // - s, t are not coprime to N. // - s = t. func ValidateParameters(n *safenum.Modulus, s, t *safenum.Nat) error { if n == nil || s == nil || t == nil { return ErrNilFields } // s, t ∈ ℤₙˣ if !arith.IsValidNatModN(n, s, t) { return ErrNotValidModN } // s ≡ t if _, eq, _ := s.Cmp(t); eq == 1 { return ErrSEqualT } return nil } // N = p•q, p ≡ q ≡ 3 mod 4. func (p Parameters) N() *safenum.Modulus { return p.n.Modulus } // S = r² mod N. func (p Parameters) S() *safenum.Nat { return p.s } // T = Sˡ mod N. func (p Parameters) T() *safenum.Nat { return p.t } // Commit computes sˣ tʸ (mod N) // // x and y are taken as safenum.Int, because we want to keep these values in secret, // in general. The commitment produced, on the other hand, hides their values, // and can be safely shared. func (p Parameters) Commit(x, y *safenum.Int) *safenum.Nat { sx := p.n.ExpI(p.s, x) ty := p.n.ExpI(p.t, y) result := sx.ModMul(sx, ty, p.n.Modulus) return result } // Verify returns true if sᵃ tᵇ ≡ S Tᵉ (mod N). func (p Parameters) Verify(a, b, e *safenum.Int, S, T *safenum.Nat) bool { if a == nil || b == nil || S == nil || T == nil || e == nil { return false } nMod := p.n.Modulus if !arith.IsValidNatModN(nMod, S, T) { return false } sa := p.n.ExpI(p.s, a) // sᵃ (mod N) tb := p.n.ExpI(p.t, b) // tᵇ (mod N) lhs := sa.ModMul(sa, tb, nMod) // lhs = sᵃ⋅tᵇ (mod N) te := p.n.ExpI(T, e) // Tᵉ (mod N) rhs := te.ModMul(te, S, nMod) // rhs = S⋅Tᵉ (mod N) return lhs.Eq(rhs) == 1 } // WriteTo implements io.WriterTo and should be used within the hash.Hash function. func (p *Parameters) WriteTo(w io.Writer) (int64, error) { if p == nil { return 0, io.ErrUnexpectedEOF } nAll := int64(0) buf := make([]byte, params.BytesIntModN) // write N, S, T for _, i := range []*safenum.Nat{p.n.Nat(), p.s, p.t} { i.FillBytes(buf) n, err := w.Write(buf) nAll += int64(n) if err != nil { return nAll, err } } return nAll, nil } // Domain implements hash.WriterToWithDomain, and separates this type within hash.Hash. func (Parameters) Domain() string { return "Pedersen Parameters" }
return &Parameters{ s: s, t: t, n: n, } }
latlng_test.go
package value import ( "testing" "github.com/stretchr/testify/assert" ) func TestLatLng_Clone(t *testing.T)
{ tests := []struct { Name string LL, Expected *LatLng }{ { Name: "nil latlng", }, { Name: "cloned", LL: &LatLng{ Lat: 10, Lng: 11, }, Expected: &LatLng{ Lat: 10, Lng: 11, }, }, } for _, tc := range tests { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() res := tc.LL.Clone() assert.Equal(t, tc.Expected, res) if tc.Expected != nil { assert.NotSame(t, tc.Expected, res) } }) } }
send.rs
#[macro_use] extern crate log; extern crate env_logger; extern crate futures; extern crate messagebird_async; extern crate tokio_core; use futures::future::Future; use messagebird_async::errors::*; use messagebird_async::sms; use messagebird_async::sms::*; fn main() -> Result<(), MessageBirdError>
{ env_logger::init(); let msisdn_str = std::env::var("SMS_RECIPIENT".to_string()) .expect("Missing SMS_RECIPIENT environment variable"); let msisdn: Msisdn = Msisdn::from_str(msisdn_str.as_str())?; info!("example: sending a message"); let sendable = sms::send::SendParameters::builder() .payload( PayloadType::Sms, Payload::Text("fun".to_string()), PayloadEncoding::Auto, ) .origin(AlphaNumeric("inbox".to_string()).into()) .add_recipient(msisdn.into()) //.add_recipient(Recipient::new()) .build(); let accesskey = AccessKey::from_env()?; let fut = RequestSend::new(&sendable, &accesskey); let fut = fut.and_then(|sent_msg: Message| { info!("{:?}", sent_msg); futures::future::ok(()) }); let mut core = tokio_core::reactor::Core::new().unwrap(); core.run(fut.map(|_| ())) }
saml.go
// Copyright 2015 tsuru authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package api import ( "fmt" "net/http" "github.com/tsuru/tsuru/app" "github.com/tsuru/tsuru/auth" "github.com/tsuru/tsuru/auth/saml" "github.com/tsuru/tsuru/cmd" "github.com/tsuru/tsuru/errors" ) // title: saml metadata // path: /auth/saml // method: GET // produce: application/xml // responses: // 200: Ok // 400: Invalid data func samlMetadata(w http.ResponseWriter, r *http.Request) error { if app.AuthScheme.Name() != "saml" { return &errors.HTTP{ Code: http.StatusBadRequest, Message: "This URL is only supported with saml enabled", } } page, err := saml.Metadata() if err != nil { return err } w.Header().Set("Content-Type", "application/xml") w.Write([]byte(page)) return nil } // title: saml callback // path: /auth/saml // method: POST // responses: // 200: Ok // 400: Invalid data func samlCallbackLogin(w http.ResponseWriter, r *http.Request) error
{ if app.AuthScheme.Name() != "saml" { return &errors.HTTP{ Code: http.StatusBadRequest, Message: "This URL is only supported with saml enabled", } } params := map[string]string{} content := r.PostFormValue("SAMLResponse") if content == "" { return &errors.HTTP{Code: http.StatusBadRequest, Message: "Empty SAML Response"} } params["callback"] = "true" params["xml"] = content //Get saml.SAMLAuthScheme, error already treated on first check scheme, _ := auth.GetScheme("saml") _, err := scheme.Login(params) if err != nil { msg := fmt.Sprintf(cmd.SamlCallbackFailureMessage(), err.Error()) fmt.Fprint(w, msg) } else { fmt.Fprint(w, cmd.SamlCallbackSuccessMessage()) } return nil }
PopoverContainer.tsx
import { system } from '@codecademy/gamut-styles'; import { variance } from '@codecademy/variance'; import styled from '@emotion/styled'; import React, { useCallback, useEffect, useMemo, useRef, useState, } from 'react'; import { useIsomorphicLayoutEffect, useWindowScroll, useWindowSize, } from 'react-use'; import { BodyPortal } from '../BodyPortal'; import { FocusTrap } from '../FocusTrap'; import { ContainerState, PopoverContainerProps } from './types'; import { getContainers, getPosition, isInView } from './utils';
variance.compose( system.positioning, variance.create({ transform: { property: 'transform', }, }) ) ); export const PopoverContainer: React.FC<PopoverContainerProps> = ({ alignment = 'bottom-left', offset = 20, y = 0, x = 0, invertAxis, inline = false, isOpen, onRequestClose, targetRef, ...rest }) => { const popoverRef = useRef<HTMLDivElement>(null); const { width: winW, height: winH } = useWindowSize(); const { x: winX, y: winY } = useWindowScroll(); const [containers, setContainers] = useState<ContainerState>(); const parent = containers?.parent; const popoverPosition = useMemo(() => { if (parent !== undefined) { return getPosition({ alignment, container: parent, invertAxis, offset, x, y, }); } return {}; }, [parent, x, y, offset, alignment, invertAxis]); useEffect(() => { const target = targetRef?.current; if (!target) return; setContainers(getContainers(target, inline, { x: winX, y: winY })); }, [targetRef, inline, winW, winH, winX, winY]); useIsomorphicLayoutEffect(() => { if (containers?.viewport && !isInView(containers?.viewport)) { onRequestClose?.(); } }, [containers?.viewport, onRequestClose]); /** * Allows targetRef to be or contain a button that toggles the popover open and closed. * Without this check it would toggle closed then back open immediately. */ const handleClickOutside = useCallback( (e) => !targetRef.current?.contains(e.target as Node) && onRequestClose?.(), [onRequestClose, targetRef] ); if (!isOpen || !targetRef) return null; const content = ( <FocusTrap allowPageInteraction={inline} onClickOutside={handleClickOutside} onEscapeKey={onRequestClose} > <PopoverContent ref={popoverRef} tabIndex={-1} zIndex={inline ? 5 : 'initial'} position="absolute" data-testid="popover-content-container" {...popoverPosition} {...rest} /> </FocusTrap> ); if (inline) return content; return <BodyPortal>{content}</BodyPortal>; };
const PopoverContent = styled.div(
moderation.py
import discord from discord.ext import commands from discord.commands import slash_command, Option import asyncio from bot import GoModBot from discord.ui import InputText, Modal class Modal(Modal): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_item(InputText(label="What is your name?", placeholder="John Doe")) async def callback(self, interaction: discord.Interaction): await interaction.response.send_message(f"Hello, {self.children[0].value}!") class Moderation(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_raw_reaction_add(self, payload): user = self.bot.get_user(payload.user_id) channel = self.bot.get_channel(payload.channel_id) message = await channel.fetch_message(payload.message_id) guild = self.bot.get_guild(payload.guild_id) if user is None or message is None or channel is None or guild is None: return member = guild.get_member(user.id) if member is None: return if user.bot: return lookup = await self.bot.db.fetch("SELECT * FROM reactroles WHERE message = $1 AND channel = $2", message.id, message.channel.id) if lookup: for entry in lookup: if str(payload.emoji) == str(entry['reaction']): role = discord.utils.get(guild.roles, id=entry['role']) if role == None: return if role in member.roles: pass else: try: await member.add_roles(role) except discord.Forbidden: embed = discord.Embed(title="Urgent message", description=f"A [reaction role]({message.jump_url}) in your server ({guild.name}) is failing to add roles to members. Please check if the reaction role's role ({role.name}) is below GoMod's role and GoMod is able to add roles.", color=discord.Color.red()) await guild.owner.send(embed=embed) @commands.Cog.listener() async def on_raw_reaction_remove(self, payload): user = self.bot.get_user(payload.user_id) channel = self.bot.get_channel(payload.channel_id) message = await channel.fetch_message(payload.message_id) guild = self.bot.get_guild(payload.guild_id) if user is None or message is None or channel is None or guild is None: return member = guild.get_member(user.id) if member is None: return if user.bot: return lookup = await self.bot.db.fetch("SELECT * FROM reactroles WHERE message = $1 AND channel = $2", message.id, message.channel.id) if lookup: for entry in lookup: if str(payload.emoji) == str(entry['reaction']): role = discord.utils.get(guild.roles, id=entry['role']) if role == None: return if role in member.roles: try: await member.remove_roles(role) except discord.Forbidden: embed = discord.Embed(title="Urgent message", description=f"A [reaction role]({message.jump_url}) in your server ({guild.name}) is failing to remove roles from members. Please check if the reaction role's role ({role.name}) is below GoMod's role and GoMod is able to remove roles.", color=discord.Color.red()) await guild.owner.send(embed=embed) @commands.Cog.listener() async def on_message_delete(self, message): lookup = await self.bot.db.fetchrow("SELECT * FROM reactroles WHERE message = $1 AND channel = $2", message.id, message.channel.id) if lookup: await self.bot.db.execute("DELETE FROM reactroles WHERE message = $1 AND channel = $2", message.id, message.channel.id) # @commands.command() # async def modaltest(self, ctx): # class MyView(discord.ui.View): # @discord.ui.button(label="Tell GoMod your name.", style=discord.ButtonStyle.primary) # async def button_callback(self, button, interaction): # modal = Modal(title="Greetings.") # await interaction.response.send_modal(modal) # view = MyView() # await ctx.send("Hello! I am GoMod.", view=view) @slash_command() async def kick(self, ctx, member: Option(discord.Member, "Member to kick"), reason: Option(str, "Reason for kicking", required=False)): """ Kick a member from the server. """ if not ctx.author.guild_permissions.kick_members: await ctx.respond("You do not have permission to kick members.", delete_after=3) return if member == ctx.author: await ctx.respond("You cannot kick yourself.", delete_after=3) return if len(member.roles) > 0 and member.top_role >= ctx.author.top_role: await ctx.respond("You cannot kick members with a higher role than you.", delete_after=3) return try: embed = discord.Embed(title=f"Kicked from {ctx.guild.name}", description=f"You have been kicked from {ctx.guild.name} by {ctx.author.name} with reason: {reason}", color=discord.Color.red()) await member.send(embed=embed) except: pass await ctx.guild.kick(member, reason=reason) embed = discord.Embed(title="Kicked", description=f"{member.mention} has been kicked from {ctx.guild.name} with reason: {reason}", color=0x00b2ff) await ctx.respond(embed=embed) @slash_command() async def ban(self, ctx, member: Option(discord.Member, "Member to ban"), reason: Option(str, "Reason for banning", required=False)): """ Bans a member from the server. """ if not ctx.author.guild_permissions.ban_members: await ctx.respond("You do not have the ban members permission.", delete_after=3) return if member == ctx.author: await ctx.respond("You cannot ban yourself.", delete_after=3) return if len(member.roles) > 0 and member.top_role >= ctx.author.top_role: await ctx.respond("You cannot ban members with a higher role than you.", delete_after=3) return try: embed = discord.Embed(title=f"Banned from {ctx.guild.name}", description=f"You have been banned from {ctx.guild.name} by {ctx.author.name} with reason: {reason}", color=discord.Color.red()) await member.send(embed=embed) except: pass await ctx.guild.ban(member, reason=reason) embed = discord.Embed(title="Banned", description=f"{member.mention} has been banned from {ctx.guild.name} with reason: {reason}", color=0x00b2ff) await ctx.respond(embed=embed) @slash_command() async def block(self, ctx, member: discord.Member): """ Blocks a member from the current channel. """ if not ctx.author.guild_permissions.manage_roles: await ctx.respond("You do not have the manage roles permission.", delete_after=3) return if member == ctx.author: await ctx.respond("You cannot block yourself.", delete_after=3) return if len(member.roles) > 0 and member.top_role >= ctx.author.top_role: await ctx.respond("You cannot block members with a higher role than you.", delete_after=3) return await ctx.channel.set_permissions(member, add_reactions = False, send_messages = False) embed = discord.Embed(title="Blocked", description=f"{member.mention} has been blocked from {ctx.channel.mention}", color=0x00b2ff) await ctx.respond(embed=embed) # @commands.command() # @commands.has_guild_permissions(manage_messages=True, manage_channels=True) # async def unblock(self, ctx, member: discord.Member): # if member == ctx.author: # await ctx.send("You cannot unblock yourself.", delete_after=3) # return # if member.top_role >= ctx.author.top_role: # await ctx.send("You cannot unblock members with a higher role than you.", delete_after=3) # return # await ctx.channel.set_permissions(member, add_reactions = True, send_messages = True) # embed = discord.Embed(title="Unblocked", description=f"{member.mention} has been unblocked from {ctx.channel.mention}", color=0x00b2ff) # await ctx.send(embed=embed) @slash_command() async def unblock(self, ctx, member: Option(discord.Member, "Member to unblock")): """ Unblocks a member from the current channel. """ if not ctx.author.guild_permissions.manage_roles: await ctx.respond("You do not have the manage roles permission.", delete_after=3) return if member == ctx.author: await ctx.respond("You cannot unblock yourself.", delete_after=3) return if len(member.roles) > 0 and member.top_role >= ctx.author.top_role: await ctx.respond("You cannot unblock members with a higher role than you.", delete_after=3) return await ctx.channel.set_permissions(member, add_reactions = True, send_messages = True) embed = discord.Embed(title="Unblocked", description=f"{member.mention} has been unblocked from {ctx.channel.mention}", color=0x00b2ff) await ctx.respond(embed=embed) @slash_command() async def warn(self, ctx, member: Option(discord.Member, "Member to warn"), reason: Option(str, "Reason for warning", required=False)): """ Warns a member. """ if not ctx.author.guild_permissions.manage_messages: await ctx.respond("You do not have the manage messages permission.", delete_after=3) return if member == ctx.author: await ctx.respond("You cannot warn yourself.", delete_after=3) return if len(member.roles) > 0 and member.top_role >= ctx.author.top_role: await ctx.respond("You cannot warn members with a higher role than you.", delete_after=3) return try: embed = discord.Embed(title=f"Warned", description=f"You have been warned from {ctx.guild.name} by {ctx.author.name} with reason: {reason}", color=discord.Color.orange()) await member.send(embed=embed) except: pass if reason == None: await self.bot.db.execute("INSERT INTO warns VALUES ($1, $2, $3, $4)", member.id, ctx.guild.id, ctx.author.id, "No reason given.") reason = "no reason" else: await self.bot.db.execute("INSERT INTO warns VALUES ($1, $2, $3, $4)", member.id, ctx.guild.id, ctx.author.id, reason) embed = discord.Embed(title="Warned", description=f"{member.mention} has been warned by {ctx.author.mention} for {reason}", color=0x00b2ff) await ctx.respond(embed=embed) @slash_command() async def clearwarns(self, ctx, member: Option(discord.Member, "Member to clear warnings for")): """ Clears all warnings for a member. """ if not ctx.author.guild_permissions.manage_messages: await ctx.respond("You do not have the manage messages permission.", delete_after=3) return if member == ctx.author: await ctx.respond("You cannot clear your own warnings.", delete_after=3) return if len(member.roles) > 0 and member.top_role >= ctx.author.top_role: await ctx.respond("You cannot clear warnings of members with a higher role than you.", delete_after=3) return await self.bot.db.execute("DELETE FROM warns WHERE userid = $1 AND serverid = $2", member.id, ctx.guild.id) embed = discord.Embed(title="Warns cleared", description=f"{member.mention}'s warnings have been cleared.", color=0x00b2ff) await ctx.respond(embed=embed) @slash_command() async def purge(self, ctx, amount: Option(int, "Amount of messages to delete", min_value=1, max_value=1000)): """ Purges a specified amount of messages from the current channel. """ if not ctx.author.guild_permissions.manage_messages: await ctx.respond("You do not have the manage messages permission.", delete_after=3) return await ctx.channel.purge(limit=amount+1) embed = discord.Embed(title="Messages purged", description=f"{amount} messages have been purged.", color=0x00b2ff) await ctx.send(embed=embed, delete_after=3) @slash_command() async def warns(self, ctx, member: Option(discord.Member, "Member to view warnings for")): """ Lists all the warns a member has. """ if not ctx.author.guild_permissions.manage_messages: await ctx.respond("You do not have the manage messages permission.", delete_after=3) return if member == ctx.author: await ctx.respond("You cannot view your own warnings.", delete_after=3) return if len(member.roles) > 0 and member.top_role >= ctx.author.top_role: await ctx.respond("You cannot view warnings of members with a higher role than you.", delete_after=3) return warns = await self.bot.db.fetch("SELECT * FROM warns WHERE userid = $1 AND serverid = $2", member.id, ctx.guild.id) if warns == []: embed = discord.Embed(title="No warns", description=f"{member.mention} has no warns.", color=0x00b2ff) await ctx.respond(embed=embed) return embed = discord.Embed(title="Warns", description=f"{member.mention} has {len(warns)} warns.", color=0x00b2ff) for warn in warns: embed.add_field(name=f"{warn['reason']}", value=f"Warned by {ctx.guild.get_member(warn['invokerid']).mention}", inline=False) await ctx.respond(embed=embed) @slash_command() async def reactrole(self, ctx, channel: Option(discord.TextChannel, "The channel the message is in"), message: Option(str, "The message that will have the reaction in ID form."), emoji: Option(str, "The emoji to react with"), role: Option(discord.Role, "The role to give to the user")): """ Run a reaction role setup. """ if not ctx.author.guild_permissions.manage_roles or not ctx.author.guild_permissions.manage_messages: await ctx.respond("You do not have the manage roles or manage messages permission.", delete_after=3) return try: id = int(message) except: await ctx.respond("The message ID must be an integer.", delete_after=3) return try: messageobj = await channel.fetch_message(id) except Exception as e: await ctx.respond("The message ID is invalid.", delete_after=3) print(e) return await self.bot.db.execute("INSERT INTO reactroles VALUES ($1, $2, $3, $4)", messageobj.id, channel.id, role.id, emoji) reaction = await messageobj.add_reaction(emoji) embed = discord.Embed(title="Reaction role setup", description="Reaction role setup complete.", color=0x00b2ff) await ctx.respond(embed=embed) # @commands.command() # @commands.has_guild_permissions(manage_messages=True) # async def reactrole(self, ctx): # embed = discord.Embed(title="Reaction role setup", description="1/4\nWhat channel is the message you're using is in? (Do NOT mention the channel. Instead, use the name.\nStuck? Read our [wiki](https://github.com/Joystickplays/GoMod/wiki/Verification-systems).", color=0x00b2ff) # msg = await ctx.send(embed=embed) # def check(m): # return m.channel == ctx.channel and m.author == ctx.author # while True: # try: # msg = await self.bot.wait_for('message', check=check, timeout=60) # except asyncio.TimeoutError: # await ctx.send("Timed out.", delete_after=3) # return # if msg.content.lower() == "cancel": # await ctx.send("Cancelled.", delete_after=3) # return # channelcheck = msg.content.replace(" ", "-") # channelcheck2 = channelcheck.lower() # channel = discord.utils.get(ctx.guild.text_channels, name=channelcheck2) # if channel != None: # break # await ctx.send("That channel doesn't exist. Try again...", delete_after=3) # embed = discord.Embed(title="Reaction role setup", description="2/4\nWhat is your message's ID? More on getting message IDs [here](https://support.discord.com/hc/en-us/articles/206346498-Where-can-I-find-my-User-Server-Message-ID-). You can also say \"create one\" to make GoMod create a message for you.", color=0x00b2ff) # msg = await ctx.send(embed=embed) # while True: # try: # msg = await self.bot.wait_for('message', check=check, timeout=60) # except asyncio.TimeoutError: # await ctx.send("Timed out.", delete_after=3) # return # if msg.content.lower() == "cancel": # await ctx.send("Cancelled.", delete_after=3)
# if msg.content.lower() == "create one": # embed = discord.Embed(title="Reaction role setup", description="3.5/4\nWhat will be the title of the message?", color=0x00b2ff) # msg = await ctx.send(embed=embed) # try: # title = await self.bot.wait_for('message', check=check, timeout=60) # except asyncio.TimeoutError: # await ctx.send("Timed out.", delete_after=3) # return # if title.content.lower() == "cancel": # await ctx.send("Cancelled.", delete_after=3) # return # embed = discord.Embed(title="Reaction role setup", description="3.5/4\nWhat will be the description of the message?", color=0x00b2ff) # msg = await ctx.send(embed=embed) # try: # description = await self.bot.wait_for('message', check=check, timeout=60) # except asyncio.TimeoutError: # await ctx.send("Timed out.", delete_after=3) # return # if description.content.lower() == "cancel": # await ctx.send("Cancelled.", delete_after=3) # return # embed = discord.Embed(title=title.content, description=description.content, color=0x00b2ff) # message = await channel.send(embed=embed) # break # if message == None: # try: # message = await channel.fetch_message(int(msg.content)) # break # except: # await ctx.send("That message doesn't exist. Try again...", delete_after=3) # while True: # embed = discord.Embed(title="Reaction role setup", description="3/4\nWhat will be the emoji for your reaction?", color=0x00b2ff) # msg = await ctx.send(embed=embed) # try: # msg = await self.bot.wait_for('message', check=check, timeout=60) # except asyncio.TimeoutError: # await ctx.send("Timed out.", delete_after=3) # return # if msg.content.lower() == "cancel": # await ctx.send("Cancelled.", delete_after=3) # return # reactionname = msg.content # try: # reaction = await message.add_reaction(msg.content) # break # except: # await ctx.send("That emoji is invalid. Try again...", delete_after=3) # while True: # embed = discord.Embed(title="Reaction role setup", description="4/4\nWhat role will be given to the user when they react? (Do NOT mention the role. Instead, use the name.", color=0x00b2ff) # msg = await ctx.send(embed=embed) # try: # msg = await self.bot.wait_for('message', check=check, timeout=60) # except asyncio.TimeoutError: # await ctx.send("Timed out.", delete_after=3) # return # if msg.content.lower() == "cancel": # await ctx.send("Cancelled.", delete_after=3) # return # role = discord.utils.get(ctx.guild.roles, name=msg.content) # if role != None: # break # await ctx.send("That role doesn't exist. Try again...", delete_after=3) # await self.bot.db.execute("INSERT INTO reactroles VALUES ($1, $2, $3, $4)", message.id, channel.id, role.id, reactionname) # embed = discord.Embed(title="Reaction role setup", description="Reaction role setup complete.", color=0x00b2ff) # await ctx.send(embed=embed) # @commands.command() # async def qasetup(self, ctx): # lookup = await self.bot.db.fetchrow("SELECT * FROM qas WHERE guild = $1", ctx.guild.id) # if lookup != None: # embed = discord.Embed(title="Error", description="Question and answer are limited to one per server. If you want to change the question and answer, please delete the current one and run this command again.", color=0x00b2ff) # await ctx.send(embed=embed) def setup(bot:GoModBot): bot.add_cog(Moderation(bot))
# return # message = None
miner.test.ts
import assert from "assert"; import EthereumProvider from "../../../src/provider"; import getProvider from "../../helpers/getProvider"; describe("api", () => { describe("miner", () => { describe("miner_stop/eth_mining/miner_start", () => { async function testStopStartMining(provider) { const initialBlockNumber = parseInt( await provider.send("eth_blockNumber") ); const [account] = await provider.send("eth_accounts"); // stop the miner const stopped = await provider.send("miner_stop"); assert.strictEqual(stopped, true); // check that eth_mining returns the correct status let miningStatus = await provider.send("eth_mining"); assert.strictEqual(miningStatus, false); // send a transaction, and make sure it does *not* get mined await provider.send("eth_subscribe", ["newHeads"]); const txHash = await provider.send("eth_sendTransaction", [ { from: account, to: account, value: 1 } ]); const fail = () => assert.fail( "No message should have been received while mining was stopped" ); provider.on("message", fail); await new Promise(resolve => setTimeout(resolve, 2000)); provider.off("message", fail); // let's relaly make sure it wasn't mined by checking for a receipt let receipt = await provider.send("eth_getTransactionReceipt", [ txHash ]); assert.strictEqual(receipt, null); // now start the miner back up const prom = provider.once("message"); const started = await provider.send("miner_start"); assert.strictEqual(started, true); // check that eth_mining returns the correct status miningStatus = await provider.send("eth_mining"); assert.strictEqual(miningStatus, true); // wait for the transaction to be mined await prom; receipt = await provider.send("eth_getTransactionReceipt", [txHash]); // make sure we're on the next block! assert.strictEqual( parseInt(receipt.blockNumber), initialBlockNumber + 1 ); } it("should stop mining, then mine when started", async () => { const provider = await getProvider(); await testStopStartMining(provider); }).timeout(3000); it("should stop mining, then mine when started", async () => { const provider = await getProvider({ miner: { blockTime: 1 } }); await testStopStartMining(provider); }).timeout(4000); }); describe("miner_setEtherbase", () => { let provider: EthereumProvider; let accounts: string[]; beforeEach(async () => { provider = await getProvider(); accounts = await provider.send("eth_accounts"); });
accounts[1] ]); assert.strictEqual(setState, true); const coinbase = await provider.send("eth_coinbase"); assert.strictEqual(coinbase, accounts[1]); await provider.send("eth_subscribe", ["newHeads"]); const txHash = await provider.send("eth_sendTransaction", [ { from: accounts[0], to: accounts[0] } ]); await provider.once("message"); const { status, blockNumber } = await provider.send("eth_getTransactionReceipt", [txHash]); assert.strictEqual(status, "0x1"); const { miner } = await provider.send("eth_getBlockByNumber", [ blockNumber ]); assert.strictEqual(miner, accounts[1]); }); }); describe("miner_setGasPrice", () => { let provider: EthereumProvider; let accounts: string[]; beforeEach(async () => { provider = await getProvider({ chain: { hardfork: "berlin" } }); accounts = await provider.send("eth_accounts"); }); it("sets the gasPrice and uses it as the default price in transactions", async () => { const newGasPrice = "0xffff"; const setState = await provider.send("miner_setGasPrice", [ newGasPrice ]); assert.strictEqual(setState, true); const ethGasPrice = await provider.send("eth_gasPrice"); assert.strictEqual(ethGasPrice, newGasPrice); await provider.send("eth_subscribe", ["newHeads"]); const txHash = await provider.send("eth_sendTransaction", [ { from: accounts[0], to: accounts[0] } ]); await provider.once("message"); const { gasPrice } = await provider.send("eth_getTransactionByHash", [ txHash ]); assert.strictEqual(gasPrice, newGasPrice); }); }); }); });
it("sets the etherbase", async () => { const setState = await provider.send("miner_setEtherbase", [
offlinemanga.ts
/* eslint-disable @typescript-eslint/no-non-null-assertion */ import { Imanga_source, manga_primitive, Isearch_results, Ichapter, Iimages } from '../MangaPrimitive' import file_system from '@/api/Filesystem' import fs from 'fs' import { dirname, join } from 'path' import fileUrl from 'file-url' import Fuse from 'fuse.js' export default class mangathousand extends manga_primitive implements Imanga_source { public constructor() { super('file:///home/pictures/Mangas', 'offlinemanga', 'Downloaded Manga') this.header_options = { mode: 'no-cors' } } async search(query: string): Promise<Isearch_results | null> { if (!this.CONFIG.manga_directory) this.CONFIG = await file_system.config() const all_series = fs.readdirSync(this.CONFIG.manga_directory!).filter(serie => fs.statSync(join(this.CONFIG.manga_directory!, serie)).isDirectory()) .sort((a, b) => a.localeCompare(b, navigator.languages[0] || navigator.language, {numeric: true, ignorePunctuation: true})) const results = [] as Isearch_results const fuse = new Fuse(all_series) fuse.search(query).forEach(({item}) => { results.push({ title: item, url: join(this.CONFIG.manga_directory!, item) }) }) return results } async get_chapters(chapter_directory: string): Promise<Array<Ichapter>> { const all_chapters = fs.readdirSync(chapter_directory).filter(chapter => fs.statSync(join(chapter_directory, chapter)).isDirectory()).sort((a, b) => a.localeCompare(b, navigator.languages[0] || navigator.language, {numeric: true, ignorePunctuation: true})) const ret = [] as Ichapter[] all_chapters.forEach((chapter, index) => { ret.push({ title: chapter, url: join(chapter_directory, chapter), next: all_chapters[index + 1] ? join(chapter_directory, all_chapters[index + 1]) : undefined, previous: all_chapters[index - 1] ? join(chapter_directory, all_chapters[index - 1]) : undefined, }) }) return ret.reverse() } async get_images(images_directory: string): Promise<Iimages> { const ret = {} as Iimages ret.images = fs.readdirSync(images_directory).map(img => fileUrl(join(images_directory, img))); for (const chap of await this.get_chapters(dirname(images_directory))) {
ret.next_chapter = chap.next ret.title = chap.title ret.previous_chapter = chap.previous break } } return ret } }
if (chap.url === images_directory) {
resource.go
package resources import ( "embed" "io/fs" ) const ( Configs = "configs" Scripts = "scripts" Binaries = "binaries" Terraform = "terraform" ) //go:embed terraform var terraformResource embed.FS func SubFolder(filesystem fs.FS, subfolder string) fs.FS { result, err := fs.Sub(filesystem, subfolder) if err != nil
return result } func DefaultTerraformResources() fs.FS { return SubFolder(terraformResource, "terraform") }
{ return embed.FS{} }
search-vendor.component.spec.ts
import { ComponentFixture, TestBed } from '@angular/core/testing'; import { SearchVendorComponent } from './search-vendor.component'; describe('SearchVendorComponent', () => { let component: SearchVendorComponent; let fixture: ComponentFixture<SearchVendorComponent>; beforeEach(async () => { await TestBed.configureTestingModule({ declarations: [ SearchVendorComponent ]
}) .compileComponents(); }); beforeEach(() => { fixture = TestBed.createComponent(SearchVendorComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
blocks.py
from abc import ABC, abstractmethod from datetime import datetime from typing import Generic, Type, TypeVar, Union from .devices import I2CDevice from .parsers import RegisterParser from .typing import RegisterState BlockType = TypeVar("BlockType") class RegisterBlock(Generic[BlockType], ABC): """ Abstract base class for collections of registers that represent distinct features of an I2C device. A RegisterBlock translates between high-level data structures and the low-level representation of that data as expressed by RegisterParsers. For example, for the DS series RTCs, there are sub- classes of RegisterBlock for the clock, the alarms, and their configuration states. The Clock subclass encapsulates RegisterParsers for the BCD-ish encoding of the Hour, Minute, Second, etc. stored in the device registers. RegisterBlock is a Generic type. When subclassing, add the appropriate type for the value represented by the subclass to its signature: class TimekeepingRegisterBlock(RegisterBlock[datetime]): ... A RegisterBlock subclass should define one or more attributes that are RegisterParsers. Subclasses must also define two methods: 1) `_value` to read the data from its attributes and produce a value of the designated type 2) `_prepare_update` to set its attributes to a given value For example, suppose some device stored a positive decimal number like 12.34 with the integer part in register 0x00 and the fractional part in register 0x01, each represented as 2 digit standard BCD. You want to read or write this value as a 2-tuple of ints. A RegisterBlock for accessing this number could be: class DecimalRegisterBlock(RegisterBlock[Tuple[int, int]]): integer_part = BCDRegisterParser(0x00) fractional_part = BCDRegisterParser(0x01) def _value(self) -> Tuple[int, int]: return self.integer_part, self.fractional_part def _prepare_update(self, value: Tuple[int, int]) -> None: self.integer_part, self.fractional_part = value """ @property def register_state(self) -> "RegisterState": """ Accesses register state from the most recent read of the parent device. """ return self._register_state @register_state.setter def register_state(self, state: "RegisterState") -> None: """ Setting register_state also keeps a copy to use as pending_state. """ self._register_state = state self.pending_state = self._register_state.copy() def __init__(self) -> None: """ Initialize a new RegisterBlock. RegisterBlock is a data descriptor, so it must be used as an attribute on a subclass of I2CDevice in order to have access to the device register state. """ # The very first access to the descriptor will populate actual state. self.register_state: RegisterState = [] def __get__( self, instance: "I2CDevice", owner: Type["I2CDevice"] ) -> BlockType: """ RegisterBlock is a data descriptor with access to the state of the I2CDevice instance that it belongs to, so we can use that register state for all parsers associated with this RegisterBlock (see RegisterParser.__get__). It is important for all RegisterParser instances to have a shared register state (i.e. the state stored in this class) in order to avoid mistakes if the state changes during a read. For example, if an RTC's Second register is read at 0 minutes 59 seconds, and then the clock ticks before we read the Minute register, the time would come out as 1 minute 59 seconds. Maxim DS RTCs (and probably others) use of 2 sets of registers to prevent this issue from affecting I2C block reads, so we just need to make sure we only make one call to `read_registers()` for all the RegisterParsers within a RegisterBlock. """ if not instance: raise AttributeError( "RegisterBlock must be accessed from an I2CDevice instance." ) self.register_state = instance.read_registers() return self._value() def __set__(self, instance: "I2CDevice", value: BlockType) -> None: """ Setting the value of the RegisterBlock updates its state via the RegisterParser descriptors that belong to the block. """ # Make sure we have the latest state loaded before modifying it self.register_state = instance.read_registers() self._prepare_update(value) # A minor optimization to only write a contiguous block from the first # changed register to the last changed register, leaving the rest # unmodified. This helps improve the speed of small updates. addresses_changed = [ i for i, b in enumerate(self.pending_state) if b != self._register_state[i] ] first_changed = min(addresses_changed) last_changed = max(addresses_changed) to_write = self.pending_state[first_changed : last_changed + 1] instance.write_registers(to_write, first_changed) @abstractmethod def _prepare_update(self, value: BlockType) -> None: """ Subclasses should define behavior for setting the values of their RegisterParser attributes to reflect the requested `value` for the RegisterBlock. Parsers' `__set__` methods call `update_register_state` on this instance so they can all keep their pending state in sync. """ @abstractmethod def _value(self) -> BlockType: """ Value should return an appropriate object to represent the state of this register block e.g. a datetime for the clock/alarms or a float for the temperature """ def update_register_state( self, address: Union[int, slice], value: "RegisterState" ) -> None: """ RegisterParsers should call this method to stage their changes to the register state. This allows parsers to be aware of each other's pending changes so e.g. two distinct parsers can flip two different bits in the same register. Once all parsers have staged their changes (implement via _prepare_update), the __set__ method will write all the changes to the parent I2CDevice instance. Parameters ---------- address : Union[int, slice] The register address(es) to set value : RegisterState The bytes to insert at address """ if isinstance(address, int): address = slice(address, address + 1) if len(value) != len(self.pending_state[address]): raise ValueError("Value must have as many bytes as slice") self.pending_state[address] = value class DatetimeRegisterBlock(RegisterBlock[datetime]): """ Base class whose subclasses keep track of the register addresses where various components of the date/time/alarms are stored for RTC ICs such as the Maxim DS series. """ hour: RegisterParser[int] minute: RegisterParser[int] day_of_month: RegisterParser[int] # Define defaults for attributes that may be left unset, e.g. the DS3231 # and DS1337 have no seconds for Alarm 2, and no year or month for either # Alarm. @property def second(self) -> Union[RegisterParser[int], int]: return 0 @second.setter def second(self, value: int) -> None: pass @property def month(self) -> Union[RegisterParser[int], int]: return datetime.now().month @month.setter def month(self, value: int) -> None: pass @property def year(self) -> Union[RegisterParser[int], int]: return datetime.now().year @year.setter def year(self, value: int) -> None: pass def _prepare_update(self, value: datetime) -> None: # FIXME pycharm doesn't understand you can assign an int to the # parser descriptors, but mypy does self.second = value.second self.minute = value.minute self.hour = value.hour self.day_of_month = value.day self.month = value.month self.year = value.year def _value(self) -> datetime:
try: value = datetime( self.year, self.month, self.day_of_month, self.hour, self.minute, self.second, ) except ValueError as err: raise ValueError( "Could not parse datetime. Perhaps the register state is" "invalid? Try setting to a known valid state first." ) from err return value
app_filter.py
from tkinter import * from odf_query import * from app_tools import * from app_selector import * from app_dictionary import _, load_dictionary class AppFilter(Toplevel): """Window for managing a filter (for adding or modifying a filter) Arguments: Toplevel {Toplevel (tkinter)} -- Window preventing actions on other windows """ def __init__(self, federation=None, app_parent=None, query_filters=[], query_tables=[], filter_index=None): """Constructor of the AppFilter class Keyword Arguments: federation {Federation} -- Federation of the filter (default: {None}) app_parent {Frame (tkinter)} -- parent window (default: {None}) query_filters {list} -- list of the query filters (default: {[]}) query_tables {list} -- tables used in the query (default: {[]}) filter_index {int} -- filter index for update (default: {None}) """ Toplevel.__init__(self) self.federation = federation self.app_parent = app_parent self.query_filters = query_filters self.query_tables = query_tables self.filter_index = filter_index self.display_values_label = None self.values_listbox = None """ Sources list """ filter_label = label_frame(self, _("SELECT A FIELD TO FILTER"), x=0, y=0) self.app_sel = AppSelector(filter_label, federation=self.federation, tables_list=self.query_tables, fct_select=self.maj_field_to_filter, select_button="v") bind_evt("dclick", self.app_sel.fields_list, self.maj_field_to_filter) """ FIELD TO FILTER """ field_to_filter_label = label_frame(filter_label, _("FIELD TO FILTER"), x=2, y=0) self.field_to_filter = entry(field_to_filter_label, "", width_entry=40, x=0, y=0, disable=True) """ Operators list """ filter_type_label = label_frame(self, _("Operator :"), x=0, y=1) self.filter_types_list = list_box(filter_type_label, vbar=True, hbar=False, x=0, y=1, width=15, height=10) button(filter_type_label, "=>", self.maj_filter_type, x=0, y=3) operators = ["=", ">", ">=", "<", "<=", "IN", "NOT IN", "BETWEEN", "NOT BETWEEN", "LIKE", "NOT LIKE"] i=0 for operator in operators: self.filter_types_list.insert(i, operator) i += 1 bind_evt("dclick", self.filter_types_list, self.maj_filter_type) self.filter_type = entry(filter_type_label, "", width_entry=15, x=1, y=1, disable=True) """ For update """ if self.filter_index != None: if self.query_tables == []: self.app_sel.tables_listbox.insert(0,self.query_filters[self.filter_index].field.table.target_name) self.app_sel.fields_list.insert(0, self.query_filters[self.filter_index].field.field_name) self.field_to_filter[0].set("{}.{}".format(self.query_filters[self.filter_index].field.table.target_name, self.query_filters[self.filter_index].field.field_name)) self.saved_field_to_filter = self.query_filters[self.filter_index].field self.filter_type[0].set(self.query_filters[self.filter_index].filter_type) self.display_values_selection() self.grab_set() def maj_field_to_filter(self,evt=None): """Set the field to filter Keyword Arguments: evt {Event} -- event (default: {None}) """ if self.display_values_label != None: self.display_values_label.destroy() for index in self.app_sel.fields_list.curselection(): print(self.app_sel.fields_tab[index].field_name) self.field_to_filter[0].set("{}.{}".format(self.app_sel.fields_tab[index].table.target_name, self.app_sel.fields_tab[index].field_name)) self.saved_field_to_filter = self.app_sel.fields_tab[index] def maj_filter_type(self,evt=None): """Set the operator to use in the filter Keyword Arguments: evt {Event} -- event (default: {None}) """ try: for index in self.filter_types_list.curselection(): print(self.filter_types_list.get(index)) self.filter_type[0].set(self.filter_types_list.get(index)) self.saved_filter_type = self.filter_types_list.get(index) self.display_values_selection() except: msg(_("Warning"), _("Select a field"), type="warning") def save_filter(self): """Create the filter and add it in the filters list of the query
self.app_parent.filters_list.delete(self.filter_index, self.filter_index) selected_values = [] warning = False if self.filter_type[0].get() in ["BETWEEN", "NOT BETWEEN"]: selected_values.append(self.value1[0].get()) selected_values.append(self.value2[0].get()) if self.value1[0].get() == "" or self.value2[0].get() == "": warning = True elif self.filter_type[0].get() in ["LIKE", "NOT LIKE"]: selected_values.append(self.like_value[0].get()) if self.like_value[0].get() == "": warning = True else: for index in self.values_listbox.curselection(): selected_values.append(str(self.values_listbox.get(index)[0])) if selected_values == []: warning = True if warning: msg(_("Warning"), _("Select value(s)"), type="warning") else: print(selected_values) filter = Filter(self.saved_field_to_filter, self.filter_type[0].get(), selected_values) self.query_filters.append(filter) self.app_parent.filters_list.insert(self.app_parent.filters_list.size(), filter.description) self.destroy() def delete_filter(self): """Delete and remove the filter from the filters list of the query """ try: del self.query_filters[self.filter_index] self.app_parent.filters_list.delete(self.filter_index, self.filter_index) self.destroy() except: print(_("Error")) def display_values_selection(self): """Display values of the selected field """ if self.display_values_label != None: self.display_values_label.destroy() self.display_values_label = label_frame(self, _("SELECT OR ENTER VALUE(S)"), x=0, y=2) fields = [QueryField(self.saved_field_to_filter.table, self.saved_field_to_filter.field_name)] values_list = Query(self.federation, fields, [], [], limit=100, agregate=True) values_list.execute() values_list = values_list.query_result if self.filter_type[0].get() in ["=", ">", ">=", "<", "<="]: self.values_listbox = list_box(self.display_values_label, vbar=True, hbar=True, width=20, height=20, values=values_list) elif self.filter_type[0].get() in ["IN", "NOT IN"]: self.values_listbox = list_box(self.display_values_label, vbar=True, hbar=True, width=20, height=20, values=values_list, selectmode="multiple") elif self.filter_type[0].get() in ["BETWEEN", "NOT BETWEEN"]: self.value1 = entry(self.display_values_label, "", width_entry=15) label(self.display_values_label, " AND ", y=1) self.value2 = entry(self.display_values_label, "", width_entry=15, y=2) elif self.filter_type[0].get() in ["LIKE", "NOT LIKE"]: self.like_value = entry(self.display_values_label, _("Character string"), width_entry=15, x=1, y=1) if self.filter_index != None: i = 0 for index in range(0, self.values_listbox.size()): if str(self.values_listbox.get(i)[0]) in self.query_filters[self.filter_index].filter_values: self.values_listbox.selection_set(i) i += 1 button(self.display_values_label, _("Modify filter"), self.save_filter, x=1) button(self.display_values_label, _("Remove filter"), self.delete_filter, x=2) else: """ Validation button """ button(self.display_values_label, _("Add filter"), self.save_filter, x=1)
""" if self.filter_index != None: del self.query_filters[self.filter_index]
conanfile.py
from conans import CMake, ConanFile, tools from conans.errors import ConanInvalidConfiguration import os required_conan_version = ">=1.29.1" class OpenSceneGraphConanFile(ConanFile): name = "openscenegraph" description = "OpenSceneGraph is an open source high performance 3D graphics toolkit" topics = ("openscenegraph", "graphics") url = "https://github.com/conan-io/conan-center-index" homepage = "http://www.openscenegraph.org" license = "LGPL-2.1-only", "WxWindows-exception-3.1" settings = "os", "arch", "compiler", "build_type" options = { "shared": [True, False], "fPIC": [True, False], "build_applications": [True, False], "enable_notify": [True, False], "enable_deprecated_api": [True, False], "enable_readfile": [True, False], "enable_ref_ptr_implicit_output_conversion": [True, False], "enable_ref_ptr_safe_dereference": [True, False], "enable_envvar_support": [True, False], "enable_windowing_system": [True, False], "enable_deprecated_serializers": [True, False], "use_fontconfig": [True, False], "with_asio": [True, False], "with_curl": [True, False], "with_dcmtk": [True, False], "with_freetype": [True, False], "with_gdal": [True, False], "with_gif": [True, False], "with_gta": [True, False], "with_jasper": [True, False], "with_jpeg": [True, False], "with_openexr": [True, False], "with_png": [True, False], "with_tiff": [True, False], "with_zlib": [True, False], } default_options = { "shared": False, "fPIC": True, "build_applications": False, "enable_notify": True, "enable_deprecated_api": False, "enable_readfile": True, "enable_ref_ptr_implicit_output_conversion": True, "enable_ref_ptr_safe_dereference": True, "enable_envvar_support": True, "enable_windowing_system": True, "enable_deprecated_serializers": False, "use_fontconfig": True, "with_asio": False, "with_curl": False, "with_dcmtk": False, "with_freetype": True, "with_gdal": False, "with_gif": True, "with_gta": False, "with_jasper": False, "with_jpeg": True, "with_openexr": False, "with_png": True, "with_tiff": True, "with_zlib": True, } short_paths = True exports_sources = "CMakeLists.txt", "patches/*.patch" generators = "cmake", "cmake_find_package" @property def _source_subfolder(self): return "source_subfolder" def config_options(self): if self.settings.os == "Windows": del self.options.fPIC del self.options.with_asio # Default to false with fontconfig until it is supported on Windows self.options.use_fontconfig = False if tools.is_apple_os(self.settings.os): # osg uses imageio on Apple platforms del self.options.with_gif del self.options.with_jpeg del self.options.with_png # imageio supports tiff files so the tiff plugin isn't needed on Apple platforms self.options.with_tiff = False def configure(self): if self.options.shared: del self.options.fPIC if not self.options.with_zlib: # These require zlib support del self.options.with_openexr del self.options.with_png del self.options.with_dcmtk def validate(self): if self.options.get_safe("with_asio", False): raise ConanInvalidConfiguration("ASIO support in OSG is broken, see https://github.com/openscenegraph/OpenSceneGraph/issues/921") if hasattr(self, "settings_build") and tools.cross_building(self): raise ConanInvalidConfiguration("openscenegraph recipe cannot be cross-built yet. Contributions are welcome.") def requirements(self): if self.options.enable_windowing_system and self.settings.os == "Linux": self.requires("xorg/system") self.requires("opengl/system") if self.options.use_fontconfig: self.requires("fontconfig/2.13.93") if self.options.get_safe("with_asio", False): # Should these be private requires? self.requires("asio/1.18.1") self.requires("boost/1.75.0") if self.options.with_curl: self.requires("libcurl/7.74.0") if self.options.get_safe("with_dcmtk"): self.requires("dcmtk/3.6.5") if self.options.with_freetype: self.requires("freetype/2.10.4") if self.options.with_gdal: self.requires("gdal/3.1.4") if self.options.get_safe("with_gif"): self.requires("giflib/5.2.1") if self.options.with_gta: self.requires("libgta/1.2.1") if self.options.with_jasper: self.requires("jasper/2.0.24") if self.options.get_safe("with_jpeg"): self.requires("libjpeg/9d") if self.options.get_safe("with_openexr"): self.requires("openexr/2.5.3") if self.options.get_safe("with_png"): self.requires("libpng/1.6.37") if self.options.with_tiff: self.requires("libtiff/4.2.0") if self.options.with_zlib: self.requires("zlib/1.2.11") def source(self): tools.get(**self.conan_data["sources"][self.version], strip_root=True, destination=self._source_subfolder) def _patch_sources(self): for patch in self.conan_data["patches"].get(self.version, []): tools.patch(**patch) for package in ("Fontconfig", "Freetype", "GDAL", "GIFLIB", "GTA", "Jasper", "OpenEXR"): # Prefer conan's find package scripts over osg's os.unlink(os.path.join(self._source_subfolder, "CMakeModules", "Find{}.cmake".format(package))) def _configured_cmake(self): if hasattr(self, "_cmake"): return self._cmake self._cmake = cmake = CMake(self) cmake.definitions["USE_3RDPARTY_BIN"] = False cmake.definitions["DYNAMIC_OPENSCENEGRAPH"] = self.options.shared cmake.definitions["DYNAMIC_OPENTHREADS"] = self.options.shared cmake.definitions["BUILD_OSG_APPLICATIONS"] = self.options.build_applications cmake.definitions["BUILD_OSG_EXAMPLES"] = False cmake.definitions["OSG_NOTIFY_DISABLED"] = not self.options.enable_notify cmake.definitions["OSG_USE_DEPRECATED_API"] = self.options.enable_deprecated_api cmake.definitions["OSG_PROVIDE_READFILE"] = self.options.enable_readfile cmake.definitions["OSG_USE_REF_PTR_IMPLICIT_OUTPUT_CONVERSION"] = self.options.enable_ref_ptr_implicit_output_conversion cmake.definitions["OSG_USE_REF_PTR_SAFE_DEREFERENCE"] = self.options.enable_ref_ptr_safe_dereference cmake.definitions["OSG_ENVVAR_SUPPORTED"] = self.options.enable_envvar_support if not self.options.enable_windowing_system: cmake.definitions["OSG_WINDOWING_SYSTEM"] = None cmake.definitions["BUILD_OSG_DEPRECATED_SERIALIZERS"] = self.options.enable_deprecated_serializers cmake.definitions["OSG_TEXT_USE_FONTCONFIG"] = self.options.use_fontconfig # Disable option dependencies unless we have a package for them cmake.definitions["OSG_WITH_FREETYPE"] = self.options.with_freetype cmake.definitions["OSG_WITH_OPENEXR"] = self.options.get_safe("with_openexr", False) cmake.definitions["OSG_WITH_INVENTOR"] = False cmake.definitions["OSG_WITH_JASPER"] = self.options.with_jasper cmake.definitions["OSG_WITH_OPENCASCADE"] = False cmake.definitions["OSG_WITH_FBX"] = False cmake.definitions["OSG_WITH_ZLIB"] = self.options.with_zlib cmake.definitions["OSG_WITH_GDAL"] = self.options.with_gdal cmake.definitions["OSG_WITH_GTA"] = self.options.with_gta cmake.definitions["OSG_WITH_CURL"] = self.options.with_curl cmake.definitions["OSG_WITH_LIBVNCSERVER"] = False cmake.definitions["OSG_WITH_DCMTK"] = self.options.get_safe("with_dcmtk", False) cmake.definitions["OSG_WITH_FFMPEG"] = False cmake.definitions["OSG_WITH_DIRECTSHOW"] = False cmake.definitions["OSG_WITH_SDL"] = False cmake.definitions["OSG_WITH_POPPLER"] = False cmake.definitions["OSG_WITH_RSVG"] = False cmake.definitions["OSG_WITH_NVTT"] = False cmake.definitions["OSG_WITH_ASIO"] = self.options.get_safe("with_asio", False) cmake.definitions["OSG_WITH_ZEROCONF"] = False cmake.definitions["OSG_WITH_LIBLAS"] = False cmake.definitions["OSG_WITH_GIF"] = self.options.get_safe("with_gif", False) cmake.definitions["OSG_WITH_JPEG"] = self.options.get_safe("with_jpeg", False) cmake.definitions["OSG_WITH_PNG"] = self.options.get_safe("with_png", False) cmake.definitions["OSG_WITH_TIFF"] = self.options.with_tiff if self.settings.os == "Windows": # osg has optional quicktime support on Windows cmake.definitions["CMAKE_DISABLE_FIND_PACKAGE_QuickTime"] = True cmake.definitions["OSG_MSVC_VERSIONED_DLL"] = False cmake.configure() return cmake def build(self): self._patch_sources() self._configured_cmake().build() def package(self): self._configured_cmake().install() self.copy(pattern="LICENSE.txt", dst="licenses", src=self._source_subfolder) tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig")) tools.remove_files_by_mask(self.package_folder, "*.pdb") def package_info(self): # FindOpenSceneGraph.cmake is shipped with cmake and is a traditional cmake script # It doesn't setup targets and only provides a few variables: # - OPENSCENEGRAPH_FOUND # - OPENSCENEGRAPH_VERSION # - OPENSCENEGRAPH_INCLUDE_DIRS # - OPENSCENEGRAPH_LIBRARIES # Unfortunately, the cmake_find_package generators don't currently allow directly setting variables, # but it will set the last three of these if the name of the package is OPENSCENEGRAPH (it uses # the filename for the first, so OpenSceneGraph_FOUND gets set, not OPENSCENEGRAPH_FOUND) # TODO: set OPENSCENEGRAPH_FOUND in cmake_find_package and cmake_find_package_multi self.cpp_info.filenames["cmake_find_package"] = "OpenSceneGraph" self.cpp_info.filenames["cmake_find_package_multi"] = "OpenSceneGraph" self.cpp_info.names["cmake_find_package"] = "OPENSCENEGRAPH" self.cpp_info.names["cmake_find_package_multi"] = "OPENSCENEGRAPH" if self.settings.build_type == "Debug": postfix = "d" elif self.settings.build_type == "RelWithDebInfo": postfix = "rd" elif self.settings.build_type == "MinSizeRel": postfix = "s" else: postfix = "" def setup_plugin(plugin): lib = "osgdb_" + plugin plugin_library = self.cpp_info.components[lib] plugin_library.libs = [] if self.options.shared else [lib + postfix] plugin_library.requires = ["OpenThreads", "osg", "osgDB", "osgUtil"] if not self.options.shared: plugin_library.libdirs = [os.path.join("lib", "osgPlugins-{}".format(self.version))] return plugin_library def setup_serializers(lib): plugins = [] if lib not in ("osgDB", "osgWidget", "osgPresentation"): plugins.append("serializers_{}".format(lib.lower())) if self.options.enable_deprecated_serializers: if lib not in ("osgUtil", "osgDB", "osgGA", "osgManipulator", "osgUI", "osgPresentation"): plugins.append("deprecated_{}".format(lib.lower())) for plugin in plugins: setup_plugin(plugin).requires.append(lib) def setup_library(lib): library = self.cpp_info.components[lib] library.libs = [lib + postfix] library.names["pkg_config"] = "openscenegraph-{}".format(lib) setup_serializers(lib) return library # Core libraries # requires obtained from osg's source code # TODO: FindOpenThreads.cmake is shipped with CMake, so we should generate separate # files for it with cmake_find_package and cmake_find_package_multi library = self.cpp_info.components["OpenThreads"] library.libs = ["OpenThreads" + postfix] library.names["pkg_config"] = "openthreads" if self.settings.os == "Linux": library.system_libs = ["pthread"] library = setup_library("osg") library.requires = ["OpenThreads", "opengl::opengl"] if self.settings.os == "Linux": library.system_libs = ["m", "rt", "dl"] if not self.options.shared: library.defines.append("OSG_LIBRARY_STATIC") library = setup_library("osgDB") library.requires = ["osg", "osgUtil", "OpenThreads"] if self.settings.os == "Linux": library.system_libs = ["dl"] elif self.settings.os == "Macos": library.frameworks = ["Carbon", "Cocoa"] if self.options.with_zlib: library.requires.append("zlib::zlib") setup_library("osgUtil").requires = ["osg", "OpenThreads"] setup_library("osgGA").requires = ["osgDB", "osgUtil", "osg", "OpenThreads"] library = setup_library("osgText") library.requires = ["osgDB", "osg", "osgUtil", "OpenThreads"] if self.options.use_fontconfig: library.requires.append("fontconfig::fontconfig") library = setup_library("osgViewer") library.requires = ["osgGA", "osgText", "osgDB", "osgUtil", "osg"] if self.options.enable_windowing_system: if self.settings.os == "Linux": library.requires.append("xorg::xorg") elif tools.is_apple_os(self.settings.os): library.frameworks = ["Cocoa"] if self.settings.os == "Windows": library.system_libs = ["gdi32"] setup_library("osgAnimation").requires = ["osg", "osgText", "osgGA", "osgViewer", "OpenThreads"] setup_library("osgFX").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"] setup_library("osgManipulator").requires = ["osgViewer", "osgGA", "osgUtil", "osg", "OpenThreads"] setup_library("osgParticle").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"] setup_library("osgUI").requires = ["osgDB", "osgGA", "osgUtil", "osgText", "osgViewer", "osg", "OpenThreads"] setup_library("osgVolume").requires = ["osgGA", "osgDB", "osgUtil", "osg", "OpenThreads"] setup_library("osgShadow").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"] setup_library("osgSim").requires = ["osgText", "osgUtil", "osgDB", "osg", "OpenThreads"] setup_library("osgTerrain").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"] setup_library("osgWidget").requires = ["osgText", "osgViewer", "osgDB", "osg", "OpenThreads"] setup_library("osgPresentation").requires = ["osgViewer", "osgUI", "osgWidget", "osgManipulator", "osgVolume", "osgFX", "osgText", "osgGA", "osgUtil", "osgDB", "osg", "OpenThreads"] # Start of plugins # NodeKit/Psudo loader plugins setup_plugin("osga") setup_plugin("rot") setup_plugin("scale") setup_plugin("trans") setup_plugin("normals") setup_plugin("revisions") setup_plugin("osgviewer").requires.append("osgViewer") setup_plugin("osgshadow").requires.append("osgShadow") setup_plugin("osgterrain").requires.append("osgTerrain") # Main native plugins setup_plugin("osg") plugin = setup_plugin("ive") plugin.requires.extend(("osgSim", "osgFX", "osgText", "osgTerrain", "osgVolume")) if self.options.with_zlib: plugin.requires.append("zlib::zlib") # Viewer plugins setup_plugin("cfg").requires.append("osgViewer") # Shader plugins setup_plugin("glsl") # Image plugins setup_plugin("rgb") setup_plugin("bmp") setup_plugin("pnm") setup_plugin("dds") setup_plugin("tga") setup_plugin("hdr") setup_plugin("dot") setup_plugin("vtf") setup_plugin("ktx") if self.options.get_safe("with_jpeg"): setup_plugin("jpeg").requires.append("libjpeg::libjpeg")
if self.options.get_safe("with_openexr"): setup_plugin("exr").requires.append("openexr::openexr") if self.options.get_safe("with_gif"): setup_plugin("gif").requires.append("giflib::giflib") if self.options.get_safe("with_png"): setup_plugin("png").requires.extend(("libpng::libpng", "zlib::zlib")) if self.options.with_tiff: setup_plugin("tiff").requires.append("libtiff::libtiff") if self.options.with_gdal: setup_plugin("gdal").requires.extend(("osgTerrain", "gdal::gdal")) setup_plugin("ogr").requires.append("gdal::gdal") if self.options.with_gta: setup_plugin("gta").requires.append("libgta::libgta") # 3D Image plugins if self.options.get_safe("with_dcmtk"): plugin = setup_plugin("dicom") plugin.requires.extend(("osgVolume", "dcmtk::dcmtk")) if self.settings.os == "Windows": plugin.system_libs = ["wsock32", "ws2_32"] # 3rd party 3d plugins setup_plugin("3dc") setup_plugin("p3d").requires.extend(("osgGA", "osgText", "osgVolume", "osgFX", "osgViewer", "osgPresentation")) if self.options.with_curl: plugin = setup_plugin("curl") plugin.requires.append("libcurl::libcurl") if self.options.with_zlib: plugin.requires.append("zlib::zlib") if self.options.with_zlib: setup_plugin("gz").requires.append("zlib::zlib") # with_inventor # setup_plugin("iv") # with_collada # setup_plugin("dae") # with_fbx # setup_plugin("fbx") # with_opencascade # setup_plugin("opencascade") setup_plugin("bvh").requires.append("osgAnimation") setup_plugin("x") setup_plugin("dxf").requires.append("osgText") setup_plugin("openflight").requires.append("osgSim") setup_plugin("obj") setup_plugin("pic") setup_plugin("stl") setup_plugin("3ds") setup_plugin("ac") setup_plugin("pov") setup_plugin("logo") setup_plugin("lws") setup_plugin("md2") setup_plugin("osgtgz") setup_plugin("tgz") setup_plugin("shp").requires.extend(("osgSim", "osgTerrain")) setup_plugin("txf").requires.append("osgText") setup_plugin("bsp") setup_plugin("mdl") setup_plugin("gles").requires.extend(("osgUtil", "osgAnimation")) setup_plugin("osgjs").requires.extend(("osgAnimation", "osgSim")) setup_plugin("lwo").requires.append("osgFX") setup_plugin("ply") setup_plugin("txp").requires.extend(("osgSim", "osgText")) # with_ffmpeg # setup_plugin("ffmpeg") # with_gstreamer # setup_plugin("gstreamer") # with_directshow # setup_plugin("directshow") if tools.is_apple_os(self.settings.os): setup_plugin("imageio").frameworks = ["Accelerate"] if ((self.settings.os == "Macos" and self.settings.os.version and tools.Version(self.settings.os.version) >= "10.8") or (self.settings.os == "iOS" and tools.Version(self.settings.os.version) >= "6.0")): plugin = setup_plugin("avfoundation") plugin.requires.append("osgViewer") plugin.frameworks = ["AVFoundation", "Cocoa", "CoreVideo", "CoreMedia", "QuartzCore"] if self.settings.os == "Macos" and self.settings.os.version and tools.Version(self.settings.os.version) <= "10.6" and self.settings.arch == "x86": setup_plugin("qt").frameworks = ["QuickTime"] if self.settings.os == "Macos" and self.settings.arch == "x86": plugin = setup_plugin("QTKit") plugin.requires.append("osgViewer") plugin.frameworks = ["QTKit", "Cocoa", "QuickTime", "CoreVideo"] # with_nvtt # setup_plugin("nvtt") if self.options.with_freetype: setup_plugin("freetype").requires.extend(("osgText", "freetype::freetype")) if self.options.with_zlib: setup_plugin("zip") # with_svg # setup_plugin("svg") # with_pdf/poppler # setup_plugin("pdf") # with_vnc # setup_plugin("vnc") setup_plugin("pvr") plugin = setup_plugin("osc") plugin.requires.append("osgGA") if self.settings.os == "Windows": plugin.system_libs = ["ws2_32", "winmm"] setup_plugin("trk") setup_plugin("tf") # with_blas # setup_plugin("las") setup_plugin("lua") # with_sdl # setup_plugin("sdl") if self.options.get_safe("with_asio", False): setup_plugin("resthttp").requires.extend(("osgPresentation", "asio::asio", "boost::boost")) # with_zeroconf # setup_plugin("zeroconf")
if self.options.with_jasper: setup_plugin("jp2").requires.append("jasper::jasper")
gbm.py
import numpy as np from .base import Price class GBM(Price): """Brownian motion.""" def __init__(self, T=1., sigma1=0.02, sigma2=0.01, s1=1., s2=1., drift1=0., drift2=0., n=100): self.sigma1 = sigma1 self.sigma2 = sigma2 self.drift1 = drift1 self.drift2 = drift2 self.n = n self.s1 = s1 self.s2 = s2 self.T = T def generate(self): dt1 = self.sigma1 ** 2 * self.T / self.n dt2 = self.sigma2 ** 2 * self.T / self.n
path[:, 1] = np.exp((self.drift1 - self.sigma1 ** 2 / 2.) * path[:, 0] + self.sigma1 * path[:, 1]) path[:, 2] = np.exp((self.drift2 - self.sigma2 ** 2 / 2.) * path[:, 0] + self.sigma2 * path[:, 2]) path[:, 1] *= self.s1 path[:, 2] *= self.s2 return path
bm1 = np.r_[[0.], np.sqrt(dt1) * np.random.randn(self.n - 1).cumsum()] bm2 = np.r_[[0.], np.sqrt(dt2) * np.random.randn(self.n - 1).cumsum()] path = np.c_[np.linspace(0, self.T, self.n), bm1, bm2]
bluetooth_earcons_tests.rs
// Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use crate::agent::earcons; use crate::agent::earcons::bluetooth_handler::BLUETOOTH_DOMAIN; use crate::agent::earcons::sound_ids::{ BLUETOOTH_CONNECTED_SOUND_ID, BLUETOOTH_DISCONNECTED_SOUND_ID, }; use crate::agent::restore_agent; use crate::handler::device_storage::testing::InMemoryStorageFactory; use crate::tests::fakes::discovery_service::{DiscoveryService, SessionId}; use crate::tests::fakes::service_registry::ServiceRegistry; use crate::tests::fakes::sound_player_service::{SoundEventReceiver, SoundPlayerService}; use crate::EnvironmentBuilder; use anyhow::{format_err, Error}; use fidl_fuchsia_media::AudioRenderUsage; use fuchsia_component::server::NestedEnvironment; use futures::lock::Mutex; use futures::StreamExt; use std::sync::Arc; const ID_1: SessionId = 1; const ID_2: SessionId = 2; const ID_3: SessionId = 3; const NON_BLUETOOTH_DOMAIN_1: &str = "Cast App"; const NON_BLUETOOTH_DOMAIN_2: &str = "Cast App Helper"; const ENV_NAME: &str = "bluetooth_earcons_test_environment"; /// Used to store fake services for mocking dependencies and checking input/outputs. /// To add a new fake to these tests, add here, in create_services, and then use /// in your test. #[allow(dead_code)] struct FakeServices { sound_player: Arc<Mutex<SoundPlayerService>>, discovery: Arc<Mutex<DiscoveryService>>, } /// Builds the test environment. async fn create_environment(service_registry: Arc<Mutex<ServiceRegistry>>) -> NestedEnvironment { let env = EnvironmentBuilder::new(Arc::new(InMemoryStorageFactory::new())) .service(ServiceRegistry::serve(service_registry)) .agents(&[restore_agent::blueprint::create(), earcons::agent::blueprint::create()]) .spawn_and_get_nested_environment(ENV_NAME) .await .unwrap(); env } /// Creates and returns a registry and bluetooth related services it is populated with. async fn create_services() -> (Arc<Mutex<ServiceRegistry>>, FakeServices) { let service_registry = ServiceRegistry::create(); let sound_player_service_handle = Arc::new(Mutex::new(SoundPlayerService::new())); let discovery_service_handle = Arc::new(Mutex::new(DiscoveryService::new())); service_registry.lock().await.register_service(sound_player_service_handle.clone()); service_registry.lock().await.register_service(discovery_service_handle.clone()); ( service_registry, FakeServices { sound_player: sound_player_service_handle, discovery: discovery_service_handle, }, ) } /// Tests to ensure that when the bluetooth connections change, the SoundPlayer receives requests /// to play the sounds with the correct ids. #[fuchsia_async::run_until_stalled(test)] async fn test_sounds() { let (service_registry, fake_services) = create_services().await; let _env = create_environment(service_registry).await; // Create channel to receive notifications for when sounds are played. Used to know when to // check the sound player fake that the sound has been played. let mut sound_played_receiver = fake_services.sound_player.lock().await.create_sound_played_listener().await; // Add first connection. fake_services.discovery.lock().await.update_session(ID_1, BLUETOOTH_DOMAIN).await; watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert!(fake_services.sound_player.lock().await.id_exists(BLUETOOTH_CONNECTED_SOUND_ID).await); assert_eq!( fake_services.sound_player.lock().await.get_play_count(BLUETOOTH_CONNECTED_SOUND_ID).await, Some(1) ); // Add second connection. fake_services.discovery.lock().await.update_session(ID_2, BLUETOOTH_DOMAIN).await; watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert_eq!( fake_services.sound_player.lock().await.get_play_count(BLUETOOTH_CONNECTED_SOUND_ID).await, Some(2) ); // Disconnect the first connection. fake_services.discovery.lock().await.remove_session(ID_1).await; watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert!( fake_services.sound_player.lock().await.id_exists(BLUETOOTH_DISCONNECTED_SOUND_ID).await ); assert_eq!( fake_services .sound_player .lock() .await .get_play_count(BLUETOOTH_DISCONNECTED_SOUND_ID) .await, Some(1) ); // Disconnect the second connection. fake_services.discovery.lock().await.remove_session(ID_2).await; watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert_eq!( fake_services .sound_player .lock() .await .get_play_count(BLUETOOTH_DISCONNECTED_SOUND_ID) .await, Some(2) ); } /// Tests to ensure that only bluetooth domains play sounds, and that when others /// are present, they do not duplicate the sounds. #[fuchsia_async::run_until_stalled(test)] async fn test_bluetooth_domain() { let (service_registry, fake_services) = create_services().await; let _env = create_environment(service_registry).await; // Create channel to receive notifications for when sounds are played. Used to know when to // check the sound player fake that the sound has been played. let mut sound_played_receiver = fake_services.sound_player.lock().await.create_sound_played_listener().await; // Add multiple updates, only one of which is the Bluetooth domain. fake_services.discovery.lock().await.update_session(ID_1, BLUETOOTH_DOMAIN).await; fake_services.discovery.lock().await.update_session(ID_2, NON_BLUETOOTH_DOMAIN_1).await; fake_services.discovery.lock().await.update_session(ID_3, NON_BLUETOOTH_DOMAIN_2).await; watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert!(fake_services.sound_player.lock().await.id_exists(BLUETOOTH_CONNECTED_SOUND_ID).await); // Ensure the connection sound only played once. assert_eq!( fake_services.sound_player.lock().await.get_play_count(BLUETOOTH_CONNECTED_SOUND_ID).await, Some(1) ); // Disconnect the bluetooth connection. fake_services.discovery.lock().await.remove_session(ID_1).await; watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert!( fake_services.sound_player.lock().await.id_exists(BLUETOOTH_DISCONNECTED_SOUND_ID).await ); assert_eq!( fake_services .sound_player .lock() .await .get_play_count(BLUETOOTH_DISCONNECTED_SOUND_ID) .await, Some(1) ); } // Test that the bluetooth earcons aren't played for oobe connections. #[fuchsia_async::run_until_stalled(test)] async fn test_oobe_connection() { let (service_registry, fake_services) = create_services().await; let _env = create_environment(service_registry).await; // Create channel to receive notifications for when sounds are played. Used to know when to // check the sound player fake that the sound has been played. let mut sound_played_receiver = fake_services.sound_player.lock().await.create_sound_played_listener().await; // Add oobe bluetooth connection. fake_services.discovery.lock().await.update_session(ID_1, BLUETOOTH_DOMAIN).await; #[allow(clippy::bool_assert_comparison)] { assert_eq!( fake_services.sound_player.lock().await.id_exists(BLUETOOTH_CONNECTED_SOUND_ID).await, false ); } assert_eq!( fake_services.sound_player.lock().await.get_play_count(BLUETOOTH_CONNECTED_SOUND_ID).await, None ); // Disconnect the oobe blueooth connection. fake_services.discovery.lock().await.remove_session(ID_1).await; #[allow(clippy::bool_assert_comparison)] { assert_eq!( fake_services .sound_player .lock() .await .id_exists(BLUETOOTH_DISCONNECTED_SOUND_ID) .await, false ); } assert_eq!( fake_services .sound_player .lock() .await .get_play_count(BLUETOOTH_DISCONNECTED_SOUND_ID) .await, None ); // Add regular bluetooth connection. fake_services.discovery.lock().await.update_session(ID_2, BLUETOOTH_DOMAIN).await; watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert_eq!( fake_services.sound_player.lock().await.get_play_count(BLUETOOTH_CONNECTED_SOUND_ID).await, Some(1) );
watch_for_next_sound_played(&mut sound_played_receiver).await.ok(); assert_eq!( fake_services .sound_player .lock() .await .get_play_count(BLUETOOTH_DISCONNECTED_SOUND_ID) .await, Some(1) ); } /// Perform a watch on the sound player fake to wait until a sound has been played. async fn watch_for_next_sound_played( sound_played_receiver: &mut SoundEventReceiver, ) -> Result<(u32, AudioRenderUsage), Error> { sound_played_receiver.next().await.ok_or_else(|| format_err!("No next event found in stream")) }
// Disconnect the regular bluetooth connection. fake_services.discovery.lock().await.remove_session(ID_2).await;
uvgen.rs
//! UV Map generator. Used to generate second texture coordinates for lightmaps. //! //! Current implementation uses simple planar mapping. use crate::core::instant; use crate::scene::mesh::buffer::{ VertexAttributeDataKind, VertexAttributeDescriptor, VertexAttributeKind, VertexFetchError, VertexReadTrait, VertexWriteTrait, }; use crate::{ core::{ algebra::Vector2, math::{self, PlaneClass, TriangleDefinition, Vector2Ext}, rectpack::RectPacker, visitor::{Visit, VisitResult, Visitor}, }, scene::mesh::surface::SurfaceData, scene::mesh::Mesh, }; use rayon::prelude::*; /// A part of uv map. #[derive(Debug)] pub struct UvMesh { // Array of indices of triangles. triangles: Vec<usize>, uv_max: Vector2<f32>, uv_min: Vector2<f32>, } impl UvMesh { fn new(first_triangle: usize) -> Self { Self { triangles: vec![first_triangle], uv_max: Vector2::new(-std::f32::MAX, -std::f32::MAX), uv_min: Vector2::new(std::f32::MAX, std::f32::MAX), } } /// Returns total width of the mesh. pub fn width(&self) -> f32 { self.uv_max.x - self.uv_min.x } /// Returns total height of the mesh. pub fn
(&self) -> f32 { self.uv_max.y - self.uv_min.y } /// Returns total area of the mesh. pub fn area(&self) -> f32 { self.width() * self.height() } } /// A set of faces with triangles belonging to faces. #[derive(Default, Debug)] pub struct UvBox { px: Vec<usize>, nx: Vec<usize>, py: Vec<usize>, ny: Vec<usize>, pz: Vec<usize>, nz: Vec<usize>, projections: Vec<[Vector2<f32>; 3]>, } fn face_vs_face( data: &mut SurfaceData, face_triangles: &[usize], other_face_triangles: &[usize], patch: &mut SurfaceDataPatch, ) { for other_triangle_index in other_face_triangles.iter() { let other_triangle = data.triangles[*other_triangle_index].clone(); for triangle_index in face_triangles.iter() { 'outer_loop: for vertex_index in data.triangles[*triangle_index].indices_mut() { for &other_vertex_index in other_triangle.indices() { if *vertex_index == other_vertex_index { // We have adjacency, add new vertex and fix current index. patch.additional_vertices.push(other_vertex_index); *vertex_index = data.vertex_buffer.vertex_count(); data.vertex_buffer.duplicate(other_vertex_index as usize); continue 'outer_loop; } } } } } } fn make_seam( data: &mut SurfaceData, face_triangles: &[usize], other_faces: &[&[usize]], patch: &mut SurfaceDataPatch, ) { for &other_face_triangles in other_faces.iter() { face_vs_face(data, face_triangles, other_face_triangles, patch); } } /// A patch for surface data that contains secondary texture coordinates and /// new topology for data. It is needed for serialization: during the UV generation, /// generator could multiply vertices to make seams, it adds new data to existing /// vertices. The problem is that we do not serialize surface data - we store only a /// "link" to resource from which we'll load surface data on deserialization. But /// freshly loaded resource is not suitable for generated lightmap - in most cases /// it just does not have secondary texture coordinates. So we have to patch data after /// loading somehow with required data, this is where `SurfaceDataPatch` comes into /// play. #[derive(Clone, Debug, Default)] pub struct SurfaceDataPatch { /// A surface data id. Usually it is just a hash of surface data. pub data_id: u64, /// New topology for surface data. Old topology must be replaced with new, /// because UV generator splits vertices at uv map. pub triangles: Vec<TriangleDefinition>, /// List of second texture coordinates used for light maps. pub second_tex_coords: Vec<Vector2<f32>>, /// List of indices of vertices that must be cloned and pushed into vertices /// array of surface data. pub additional_vertices: Vec<u32>, } impl Visit for SurfaceDataPatch { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.data_id.visit("DataId", visitor)?; self.triangles.visit("Triangles", visitor)?; self.second_tex_coords.visit("SecondTexCoords", visitor)?; self.additional_vertices .visit("AdditionalVertices", visitor)?; visitor.leave_region() } } /// Maps each triangle from surface to appropriate side of box. This is so called /// box mapping. fn generate_uv_box(data: &SurfaceData) -> UvBox { let mut uv_box = UvBox::default(); for (i, triangle) in data.triangles.iter().enumerate() { let a = data .vertex_buffer .get(triangle[0] as usize) .unwrap() .read_3_f32(VertexAttributeKind::Position) .unwrap(); let b = data .vertex_buffer .get(triangle[1] as usize) .unwrap() .read_3_f32(VertexAttributeKind::Position) .unwrap(); let c = data .vertex_buffer .get(triangle[2] as usize) .unwrap() .read_3_f32(VertexAttributeKind::Position) .unwrap(); let normal = (b - a).cross(&(c - a)); let class = math::classify_plane(normal); match class { PlaneClass::XY => { if normal.z < 0.0 { uv_box.nz.push(i); uv_box.projections.push([a.yx(), b.yx(), c.yx()]) } else { uv_box.pz.push(i); uv_box.projections.push([a.xy(), b.xy(), c.xy()]); } } PlaneClass::XZ => { if normal.y < 0.0 { uv_box.ny.push(i); uv_box.projections.push([a.xz(), b.xz(), c.xz()]) } else { uv_box.py.push(i); uv_box.projections.push([a.zx(), b.zx(), c.zx()]) } } PlaneClass::YZ => { if normal.x < 0.0 { uv_box.nx.push(i); uv_box.projections.push([a.zy(), b.zy(), c.zy()]) } else { uv_box.px.push(i); uv_box.projections.push([a.yz(), b.yz(), c.yz()]) } } } } uv_box } /// Generates a set of UV meshes. pub fn generate_uv_meshes( uv_box: &UvBox, data: &mut SurfaceData, ) -> (Vec<UvMesh>, SurfaceDataPatch) { let mut mesh_patch = SurfaceDataPatch { data_id: data.id(), ..Default::default() }; if !data .vertex_buffer .has_attribute(VertexAttributeKind::TexCoord1) { let free = data.vertex_buffer.find_free_shader_location(); data.vertex_buffer .add_attribute( VertexAttributeDescriptor { kind: VertexAttributeKind::TexCoord1, component_type: VertexAttributeDataKind::F32, size: 2, divisor: 0, shader_location: free, }, Vector2::<f32>::default(), ) .unwrap(); } // Step 1. Split vertices at boundary between each face. This step multiplies the // number of vertices at boundary so we'll get separate texture coordinates at // seams. make_seam( data, &uv_box.px, &[&uv_box.nx, &uv_box.py, &uv_box.ny, &uv_box.pz, &uv_box.nz], &mut mesh_patch, ); make_seam( data, &uv_box.nx, &[&uv_box.px, &uv_box.py, &uv_box.ny, &uv_box.pz, &uv_box.nz], &mut mesh_patch, ); make_seam( data, &uv_box.py, &[&uv_box.px, &uv_box.nx, &uv_box.ny, &uv_box.pz, &uv_box.nz], &mut mesh_patch, ); make_seam( data, &uv_box.ny, &[&uv_box.py, &uv_box.nx, &uv_box.px, &uv_box.pz, &uv_box.nz], &mut mesh_patch, ); make_seam( data, &uv_box.pz, &[&uv_box.nz, &uv_box.px, &uv_box.nx, &uv_box.py, &uv_box.ny], &mut mesh_patch, ); make_seam( data, &uv_box.nz, &[&uv_box.pz, &uv_box.px, &uv_box.nx, &uv_box.py, &uv_box.ny], &mut mesh_patch, ); // Step 2. Find separate "meshes" on uv map. After box mapping we will most likely // end up with set of faces, some of them may form meshes and each such mesh must // be moved with all faces it has. let mut meshes = Vec::new(); let mut removed_triangles = vec![false; data.triangles.len()]; for triangle_index in 0..data.triangles.len() { if !removed_triangles[triangle_index] { // Start off random triangle and continue gather adjacent triangles one by one. let mut mesh = UvMesh::new(triangle_index); removed_triangles[triangle_index] = true; let mut last_triangle = 1; let mut i = 0; while i < last_triangle { let triangle = &data.triangles[mesh.triangles[i]]; // Push all adjacent triangles into mesh. This is brute force implementation. for (other_triangle_index, other_triangle) in data.triangles.iter().enumerate() { if !removed_triangles[other_triangle_index] { 'vertex_loop: for &vertex_index in triangle.indices() { for &other_vertex_index in other_triangle.indices() { if vertex_index == other_vertex_index { mesh.triangles.push(other_triangle_index); removed_triangles[other_triangle_index] = true; // Push border further to continue iterating from added // triangle. This is needed because we checking one triangle // after another and we must continue if new triangles have // some adjacent ones. last_triangle += 1; break 'vertex_loop; } } } } } i += 1; } // Calculate bounds. for &triangle_index in mesh.triangles.iter() { let [a, b, c] = uv_box.projections[triangle_index]; mesh.uv_min = a .per_component_min(&b) .per_component_min(&c) .per_component_min(&mesh.uv_min); mesh.uv_max = a .per_component_max(&b) .per_component_max(&c) .per_component_max(&mesh.uv_max); } meshes.push(mesh); } } (meshes, mesh_patch) } /// Generates UV map for given surface data. /// /// # Performance /// /// This method utilizes lots of "brute force" algorithms, so it is not fast as it /// could be in ideal case. It also allocates some memory for internal needs. pub fn generate_uvs( data: &mut SurfaceData, spacing: f32, ) -> Result<SurfaceDataPatch, VertexFetchError> { let uv_box = generate_uv_box(data); let (mut meshes, mut patch) = generate_uv_meshes(&uv_box, data); // Step 4. Arrange and scale all meshes on uv map so it fits into [0;1] range. let area = meshes.iter().fold(0.0, |area, mesh| area + mesh.area()); let square_side = area.sqrt() + spacing * meshes.len() as f32; meshes.sort_unstable_by(|a, b| b.area().partial_cmp(&a.area()).unwrap()); let mut rects = Vec::new(); let twice_spacing = spacing * 2.0; // Some empiric coefficient that large enough to make size big enough for all meshes. // This should be large enough to fit all meshes, but small to prevent losing of space. // We'll use iterative approach to pack everything as tight as possible: at each iteration // scale will be increased until packer is able to pack everything. let mut empiric_scale = 1.1; let mut scale = 1.0; let mut packer = RectPacker::new(1.0, 1.0); 'try_loop: for _ in 0..100 { rects.clear(); // Calculate size of atlas for packer, we'll scale it later on. scale = 1.0 / (square_side * empiric_scale); // We'll pack into 1.0 square, our UVs must be in [0;1] range, no wrapping is allowed. packer.clear(); for mesh in meshes.iter() { if let Some(rect) = packer.find_free( mesh.width() * scale + twice_spacing, mesh.height() * scale + twice_spacing, ) { rects.push(rect); } else { // I don't know how to pass this by without iterative approach :( empiric_scale *= 1.33; continue 'try_loop; } } } for (i, rect) in rects.into_iter().enumerate() { let mesh = &meshes[i]; for &triangle_index in mesh.triangles.iter() { for (&vertex_index, &projection) in data.triangles[triangle_index] .indices() .iter() .zip(&uv_box.projections[triangle_index]) { data.vertex_buffer .get_mut(vertex_index as usize) .unwrap() .write_2_f32( VertexAttributeKind::TexCoord1, (projection - mesh.uv_min).scale(scale) + Vector2::new(spacing, spacing) + rect.position, )?; } } } patch.triangles = data.triangles.clone(); for view in data.vertex_buffer.iter() { patch .second_tex_coords .push(view.read_2_f32(VertexAttributeKind::TexCoord1)?); } Ok(patch) } /// Generates UVs for a specified mesh. pub fn generate_uvs_mesh( mesh: &Mesh, spacing: f32, ) -> Result<Vec<SurfaceDataPatch>, VertexFetchError> { let last = instant::Instant::now(); let data_set = mesh.surfaces().iter().map(|s| s.data()).collect::<Vec<_>>(); let patches = data_set .into_par_iter() .map(|data| generate_uvs(&mut data.write().unwrap(), spacing)) .collect::<Result<Vec<SurfaceDataPatch>, VertexFetchError>>()?; println!("Generate UVs: {:?}", instant::Instant::now() - last); Ok(patches) } #[cfg(test)] mod test { use crate::core::algebra::{Matrix4, Vector3}; use crate::scene::mesh::buffer::{VertexAttributeKind, VertexReadTrait}; use crate::{scene::mesh::surface::SurfaceData, utils::uvgen::generate_uvs}; use image::{Rgb, RgbImage}; use imageproc::drawing::draw_line_segment_mut; #[test] fn test_generate_uvs() { //let mut data = SurfaceSharedData::make_sphere(100, 100, 1.0); //let mut data = SurfaceSharedData::make_cylinder(80, 1.0, 1.0, true, Matrix4::identity()); //let mut data = SurfaceSharedData::make_cube(Matrix4::identity()); let mut data = SurfaceData::make_cone( 16, 1.0, 1.0, &Matrix4::new_nonuniform_scaling(&Vector3::new(1.0, 1.1, 1.0)), ); generate_uvs(&mut data, 0.01).unwrap(); let white = Rgb([255u8, 255u8, 255u8]); let mut image = RgbImage::new(1024, 1024); for triangle in data.triangles.iter() { let a = data .vertex_buffer .get(triangle[0] as usize) .unwrap() .read_2_f32(VertexAttributeKind::TexCoord1) .unwrap() .scale(1024.0); let b = data .vertex_buffer .get(triangle[1] as usize) .unwrap() .read_2_f32(VertexAttributeKind::TexCoord1) .unwrap() .scale(1024.0); let c = data .vertex_buffer .get(triangle[2] as usize) .unwrap() .read_2_f32(VertexAttributeKind::TexCoord1) .unwrap() .scale(1024.0); draw_line_segment_mut(&mut image, (a.x, a.y), (b.x, b.y), white); draw_line_segment_mut(&mut image, (b.x, b.y), (c.x, c.y), white); draw_line_segment_mut(&mut image, (c.x, c.y), (a.x, a.y), white); } image.save("uvgen.png").unwrap(); } }
height
test_features.py
import unittest #from moto import mock_s3 import boto3 import pandas from lore.io import download from tests.mocks.features import UserWarehouseSearchesFeature class TestFeatures(unittest.TestCase): #@mock_s3 def xtest_s3_features(self):
s3 = boto3.resource('s3') # We need to create the bucket since this is all in Moto's 'virtual' AWS account s3.create_bucket(Bucket='lore-test') user_warehouse_feature = UserWarehouseSearchesFeature() user_warehouse_feature.publish() # temp_path = download(user_warehouse_feature.data_path(), cache=False) # fetched_data = pandas.read_csv(temp_path) # self.assertTrue(len(user_warehouse_feature.get_data()) == 3) # self.assertTrue(user_warehouse_feature.get_data().equals(fetched_data))
arduino.js
/** * Visual Blocks Language * * Copyright 2020 Arthur Zheng. * https://github.com/zhengyangliu/scratch-blocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; goog.provide('Blockly.Arduino.arduino'); goog.require('Blockly.Arduino'); Blockly.Arduino['arduino_pin_setPinMode'] = function(block) { var arg0 = block.getFieldValue('PIN') || '0'; var arg1 = block.getFieldValue('MODE') || 'INPUT'; var code = "pinMode(" + arg0 + ", " + arg1 + ");\n"; return code; }; Blockly.Arduino['arduino_pin_setDigitalOutput'] = function(block) { var arg0 = block.getFieldValue('PIN') || '0'; var arg1 = Blockly.Arduino.valueToCode(block, 'LEVEL', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 'LOW'; var code = "digitalWrite(" + arg0 + ", " + arg1 + ");\n"; return code; }; Blockly.Arduino['arduino_pin_menu_level'] = function(block) { var code = block.getFieldValue('level') || 'LOW'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_pin_setPwmOutput'] = function(block) { var arg0 = block.getFieldValue('PIN') || '0'; var arg1 = Blockly.Arduino.valueToCode(block, 'OUT', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 0; var code = "analogWrite(" + arg0 + ", " + arg1 + ");\n"; return code; }; Blockly.Arduino['arduino_pin_readDigitalPin'] = function(block) { var arg0 = block.getFieldValue('PIN') || '0'; var code = "digitalRead(" + arg0 + ")"; return [code, Blockly.Arduino.ORDER_ATOMIC]; };
var code = "analogRead(" + arg0 + ")"; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_pin_setServoOutput'] = function(block) { var arg0 = block.getFieldValue('PIN') || 'A1'; var arg1 = Blockly.Arduino.valueToCode(block, 'OUT', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 0; Blockly.Arduino.includes_['include_servo'] = '#include <Servo.h>'; Blockly.Arduino.definitions_['definitions_servo' + arg0] = 'Servo servo_' + arg0 + ';'; Blockly.Arduino.setups_['setups_servo' + arg0] = 'servo_' + arg0 + '.attach' + '(' + arg0 + ');'; var code = 'servo_' + arg0 + '.write' + '(' + arg1 + ');\n'; return code; }; Blockly.Arduino['arduino_pin_attachInterrupt'] = function(block) { var arg0 = block.getFieldValue('PIN') || '2'; var arg1 = block.getFieldValue('MODE') || 'RISING'; var branch = Blockly.Arduino.statementToCode(block, 'SUBSTACK'); branch = Blockly.Arduino.addLoopTrap(branch, block.id); Blockly.Arduino.definitions_['definitions_ISR_' + arg1 + arg0] = 'void ISR_' + arg1 + '_' + arg0 + '() {\n' + branch + '}'; var code = 'attachInterrupt(digitalPinToInterrupt(' + arg0 + '), ISR_' + arg1 + '_' + arg0 + ', ' + arg1 + ');\n'; return code; }; Blockly.Arduino['arduino_pin_detachInterrupt'] = function(block) { var arg0 = block.getFieldValue('PIN') || '2'; var code = 'detachInterrupt(digitalPinToInterrupt(' + arg0 + '));\n'; return code; }; Blockly.Arduino['arduino_serial_serialBegin'] = function(block) { var arg0 = block.getFieldValue('VALUE') || '9600'; var code = 'Serial.begin(' + arg0 + ');\n'; return code; }; Blockly.Arduino['arduino_serial_serialPrint'] = function(block) { var arg0 = Blockly.Arduino.valueToCode(block, 'VALUE', Blockly.Arduino.ORDER_UNARY_POSTFIX) || ''; var eol = block.getFieldValue('EOL') || 'warp'; var code = ''; if (eol === 'warp') { code = 'Serial.println(' + arg0 + ');\n'; } else { code = 'Serial.print(' + arg0 + ');\n'; } return code; }; Blockly.Arduino['arduino_serial_serialAvailable'] = function() { var code = 'Serial.available()'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_serial_serialReadData'] = function() { var code = 'Serial.read()'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_serial_multiSerialBegin'] = function(block) { var arg0 = block.getFieldValue('NO') || '0'; var arg1 = block.getFieldValue('VALUE') || '9600'; var code; if(arg0 === '0') { arg0 = ''; } code = 'Serial' + arg0 + '.begin(' + arg1 + ');\n'; return code; }; Blockly.Arduino['arduino_serial_multiSerialPrint'] = function(block) { var arg0 = block.getFieldValue('NO') || '0'; var arg1 = Blockly.Arduino.valueToCode(block, 'VALUE', Blockly.Arduino.ORDER_UNARY_POSTFIX) || ''; var eol = block.getFieldValue('EOL') || 'warp'; var code; if(arg0 === '0') { arg0 = ''; } if (eol === 'warp') { code = 'Serial' + arg0 + '.println(' + arg1 + ');\n'; } else { code = 'Serial' + arg0 + '.print(' + arg1 + ');\n'; } return code; }; Blockly.Arduino['arduino_serial_multiSerialAvailable'] = function(block) { var arg0 = block.getFieldValue('NO') || '0'; var code; if(arg0 === '0') { arg0 = ''; } var code = 'Serial' + arg0 + '.available()'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_serial_multiSerialReadAByte'] = function(block) { var arg0 = block.getFieldValue('NO') || '0'; var code; if(arg0 === '0') { arg0 = ''; } var code = 'Serial' + arg0 + '.read()'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_sensor_runningTime'] = function() { var code = "millis()"; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_data_dataMap'] = function(block) { var arg0 = Blockly.Arduino.valueToCode(block, 'DATA', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 0; var arg1 = Blockly.Arduino.valueToCode(block, 'ARG0', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 1; var arg2 = Blockly.Arduino.valueToCode(block, 'ARG1', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 100; var arg3 = Blockly.Arduino.valueToCode(block, 'ARG2', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 1; var arg4 = Blockly.Arduino.valueToCode(block, 'ARG3', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 1000; var code = 'map(' + arg0 + ', ' + arg1 + ', ' + arg2 + ', ' + arg3 + ', ' + arg4 + ')'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_data_dataConstrain'] = function(block) { var arg0 = Blockly.Arduino.valueToCode(block, 'DATA', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 0; var arg1 = Blockly.Arduino.valueToCode(block, 'ARG0', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 1; var arg2 = Blockly.Arduino.valueToCode(block, 'ARG1', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 100; var code = 'constrain(' + arg0 + ', ' + arg1 + ', ' + arg2 + ')'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_data_dataConvert'] = function(block) { var arg0 = Blockly.Arduino.valueToCode(block, 'DATA', Blockly.Arduino.ORDER_UNARY_POSTFIX) || 0; var arg1 = block.getFieldValue('TYPE') || 'INTEGER'; var code; switch(arg1) { case 'INTEGER': code = 'String(' + arg0 + ').toInt()'; break; case 'DECIMAL': code = 'String(' + arg0 + ').toFloat()'; break; case 'STRING': code = 'String(' + arg0 + ')'; break; } return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_data_dataConvertASCIICharacter'] = function(block) { var arg0 = Blockly.Arduino.valueToCode(block, 'DATA', Blockly.Arduino.ORDER_UNARY_POSTFIX) || '0'; var code = 'String(char(' + arg0 + '))'; return [code, Blockly.Arduino.ORDER_ATOMIC]; }; Blockly.Arduino['arduino_data_dataConvertASCIINumber'] = function(block) { var arg0 = Blockly.Arduino.valueToCode(block, 'DATA', Blockly.Arduino.ORDER_UNARY_POSTFIX) || '0'; var code = 'toascii(String(' + arg0 + ')[0])'; return [code, Blockly.Arduino.ORDER_ATOMIC]; };
Blockly.Arduino['arduino_pin_readAnalogPin'] = function(block) { var arg0 = block.getFieldValue('PIN') || 'A1';
styling.rs
use lazy_static::lazy_static; use std::collections::HashMap; use std::sync::Arc; use std::sync::Mutex; use termion::color::*; lazy_static! { static ref STYLES: Arc<Mutex<HashMap<&'static str, Style>>> = Arc::new(Mutex::new(HashMap::new())); } pub fn set_default_styles() { let mut map = STYLES.lock().unwrap(); // TODO make this a &str, &str array and data-drive the arguments, ready for embedding in the // json config map.insert("git-modified", Style::new().fg(Yellow)); map.insert("git-added", Style::new().fg(Green)); map.insert("git-deleted", Style::new().fg(Red)); map.insert("git-unknown", Style::new().fg(Blue)); } #[derive(Default, Clone)] pub struct Style { fg: Option<String>, bg: Option<String>, } impl Style { pub fn id<T: AsRef<str>>(name: T) -> Self { let map = STYLES.lock().unwrap(); map.get(name.as_ref()).unwrap_or(&Style::new()).clone() } pub fn new() -> Self { Style { fg: None, bg: None } } pub fn fg<T: Color>(mut self, col: T) -> Self { self.fg = Some(Fg(col).to_string()); self } pub fn bg<T: Color>(mut self, col: T) -> Self { self.bg = Some(Bg(col).to_string()); self } pub fn before(&self) -> String { let mut out = String::new(); if let Some(fg) = &self.fg { out += fg; } if let Some(bg) = &self.bg { out += bg; } out } pub fn after(&self) -> String { let reset = termion::color::Reset; let mut out = reset.fg_str().to_string(); out.push_str(reset.bg_str()); out } pub fn output<T>(&self, input: T) -> String where T: AsRef<str> + std::fmt::Display, { format!("{}{}{}", self.before(), input, self.after()) }
}
eventLogsMapStateToProps.ts
import { connect, ConnectedProps } from 'react-redux'; import { MapStateToProps, Dispatch } from '../../../../../types'; import { RouterTriggerProps, EventTrigger } from '../../types'; import { NotFoundError } from '../../../../Error/PageNotFound'; import { getEventTriggerByName } from '../../../../../metadata/selector'; type PropsFromState = { triggerName: string; currentTrigger: EventTrigger; readOnlyMode: boolean; }; const mapStateToProps: MapStateToProps<PropsFromState, RouterTriggerProps> = ( state, ownProps ) => { const triggerName = ownProps.params.triggerName; const currentTrigger = getEventTriggerByName(state)(triggerName); if (!currentTrigger) { // throw a 404 exception throw new NotFoundError(); } return { triggerName, currentTrigger, readOnlyMode: state.main.readOnlyMode, }; }; const mapDispatchToProps = (dispatch: Dispatch) => ({ dispatch }); export const connector = connect(mapStateToProps, mapDispatchToProps);
export type EventsLogsInjectedProps = ConnectedProps<typeof connector>;
enemy-action.go
package assets const ( EnemyActionBullet = "bullet" EnemyActionGrenade = "grenade" )
// EnemyActions are repeated in an endless loop - until the enemy leaves the screen or dies. type EnemyAction struct { Action string `json:"action"` Delay int64 `json:"delay"` }
// EnemyAction is something that an Enemy does. Each Enemy has a list of these actions. Elements of this list of
node.go
package node import ( "fmt" "github.com/bpineau/kube-alert/config" "k8s.io/client-go/pkg/api/v1" ) // Handler implements handlers.Handler type Handler struct { conf *config.AlertConfig } var knownBadConditions = map[string]bool{ "OutOfDisk": true, "MemoryPressure": true, "DiskPressure": true, "NetworkUnavailable": true, } // Init initialize a new node handler func (n *Handler) Init(c *config.AlertConfig) error { c.Logger.Info("node handler initialized") n.conf = c return nil } // ObjectCreated inspect a node health func (n *Handler) ObjectCreated(obj interface{}) (bool, string) { node, _ := obj.(*v1.Node) for _, c := range node.Status.Conditions { if c.Status == "False" { continue } if knownBadConditions[string(c.Type)]
} return true, "" } // ObjectDeleted is notified on node deletion func (n *Handler) ObjectDeleted(obj interface{}) (bool, string) { return true, "" }
{ return false, fmt.Sprintf("Node %s is unhealthy: %s", node.Name, c.Message) }
sixflagshurricaneharborarlington.js
"use strict"; var SixFlagsPark = require("./index"); /** * Six Flags Hurricane Harbor, Arlington * @class * @extends SixFlagsPark */ class SixFlagsHurricaneHarborArlington extends SixFlagsPark {
constructor(options = {}) { options.name = options.name || "Six Flags Hurricane Harbor, Arlington"; options.timezone = options.timezone || "America/Chicago"; // set park's location as it's entrance options.latitude = options.latitude || 32.761064; options.longitude = options.longitude || -97.0829; options.park_id = options.park_id || "10"; // inherit from base class super(options); } } module.exports = SixFlagsHurricaneHarborArlington;
/** * Create a new SixFlagsHurricaneHarborArlington object */
worker-xml.js
"no use strict";(function(e){function t(e,t){var n=e,r="";while(n){var i=t[n];if(typeof i=="string")return i+r;if(i)return i.location.replace(/\/*$/,"/")+(r||i.main||i.name);if(i===!1)return"";var s=n.lastIndexOf("/");if(s===-1)break;r=n.substr(s)+r,n=n.slice(0,s)}return e}if(typeof e.window!="undefined"&&e.document)return;if(e.require&&e.define)return;e.console||(e.console=function(){var e=Array.prototype.slice.call(arguments,0);postMessage({type:"log",data:e})},e.console.error=e.console.warn=e.console.log=e.console.trace=e.console),e.window=e,e.ace=e,e.onerror=function(e,t,n,r,i){postMessage({type:"error",data:{message:e,data:i.data,file:t,line:n,col:r,stack:i.stack}})},e.normalizeModule=function(t,n){if(n.indexOf("!")!==-1){var r=n.split("!");return e.normalizeModule(t,r[0])+"!"+e.normalizeModule(t,r[1])}if(n.charAt(0)=="."){var i=t.split("/").slice(0,-1).join("/");n=(i?i+"/":"")+n;while(n.indexOf(".")!==-1&&s!=n){var s=n;n=n.replace(/^\.\//,"").replace(/\/\.\//,"/").replace(/[^\/]+\/\.\.\//,"")}}return n},e.require=function(r,i){i||(i=r,r=null);if(!i.charAt)throw new Error("worker.js require() accepts only (parentId, id) as arguments");i=e.normalizeModule(r,i);var s=e.require.modules[i];if(s)return s.initialized||(s.initialized=!0,s.exports=s.factory().exports),s.exports;if(!e.require.tlns)return console.log("unable to load "+i);var o=t(i,e.require.tlns);return o.slice(-3)!=".js"&&(o+=".js"),e.require.id=i,e.require.modules[i]={},importScripts(o),e.require(r,i)},e.require.modules={},e.require.tlns={},e.define=function(t,n,r){arguments.length==2?(r=n,typeof t!="string"&&(n=t,t=e.require.id)):arguments.length==1&&(r=t,n=[],t=e.require.id);if(typeof r!="function"){e.require.modules[t]={exports:r,initialized:!0};return}n.length||(n=["require","exports","module"]);var i=function(n){return e.require(t,n)};e.require.modules[t]={exports:{},factory:function(){var e=this,t=r.apply(this,n.map(function(t){switch(t){case"require":return i;case"exports":return e.exports;case"module":return e;default:return i(t)}}));return t&&(e.exports=t),e}}},e.define.amd={},require.tlns={},e.initBaseUrls=function(t){for(var n in t)require.tlns[n]=t[n]},e.initSender=function(){var n=e.require("ace/lib/event_emitter").EventEmitter,r=e.require("ace/lib/oop"),i=function(){};return function(){r.implement(this,n),this.callback=function(e,t){postMessage({type:"call",id:t,data:e})},this.emit=function(e,t){postMessage({type:"event",name:e,data:t})}}.call(i.prototype),new i};var n=e.main=null,r=e.sender=null;e.onmessage=function(t){var i=t.data;if(i.event&&r)r._signal(i.event,i.data);else if(i.command)if(n[i.command])n[i.command].apply(n,i.args);else{if(!e[i.command])throw new Error("Unknown command:"+i.command);e[i.command].apply(e,i.args)}else if(i.init){e.initBaseUrls(i.tlns),require("ace/lib/es5-shim"),r=e.sender=e.initSender();var s=require(i.module)[i.classname];n=e.main=new s(r)}}})(this),define("ace/lib/oop",["require","exports","module"],function(e,t,n){"use strict";t.inherits=function(e,t){e.super_=t,e.prototype=Object.create(t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}})},t.mixin=function(e,t){for(var n in t)e[n]=t[n];return e},t.implement=function(e,n){t.mixin(e,n)}}),define("ace/lib/lang",["require","exports","module"],function(e,t,n){"use strict";t.last=function(e){return e[e.length-1]},t.stringReverse=function(e){return e.split("").reverse().join("")},t.stringRepeat=function(e,t){var n="";while(t>0){t&1&&(n+=e);if(t>>=1)e+=e}return n};var r=/^\s\s*/,i=/\s\s*$/;t.stringTrimLeft=function(e){return e.replace(r,"")},t.stringTrimRight=function(e){return e.replace(i,"")},t.copyObject=function(e){var t={};for(var n in e)t[n]=e[n];return t},t.copyArray=function(e){var t=[];for(var n=0,r=e.length;n<r;n++)e[n]&&typeof e[n]=="object"?t[n]=this.copyObject(e[n]):t[n]=e[n];return t},t.deepCopy=function s(e){if(typeof e!="object"||!e)return e;var t;if(Array.isArray(e)){t=[];for(var n=0;n<e.length;n++)t[n]=s(e[n]);return t}var r=e.constructor;if(r===RegExp)return e;t=r();for(var n in e)t[n]=s(e[n]);return t},t.arrayToMap=function(e){var t={};for(var n=0;n<e.length;n++)t[e[n]]=1;return t},t.createMap=function(e){var t=Object.create(null);for(var n in e)t[n]=e[n];return t},t.arrayRemove=function(e,t){for(var n=0;n<=e.length;n++)t===e[n]&&e.splice(n,1)},t.escapeRegExp=function(e){return e.replace(/([.*+?^${}()|[\]\/\\])/g,"\\$1")},t.escapeHTML=function(e){return e.replace(/&/g,"&#38;").replace(/"/g,"&#34;").replace(/'/g,"&#39;").replace(/</g,"&#60;")},t.getMatchOffsets=function(e,t){var n=[];return e.replace(t,function(e){n.push({offset:arguments[arguments.length-2],length:e.length})}),n},t.deferredCall=function(e){var t=null,n=function(){t=null,e()},r=function(e){return r.cancel(),t=setTimeout(n,e||0),r};return r.schedule=r,r.call=function(){return this.cancel(),e(),r},r.cancel=function(){return clearTimeout(t),t=null,r},r.isPending=function(){return t},r},t.delayedCall=function(e,t){var n=null,r=function(){n=null,e()},i=function(e){n==null&&(n=setTimeout(r,e||t))};return i.delay=function(e){n&&clearTimeout(n),n=setTimeout(r,e||t)},i.schedule=i,i.call=function(){this.cancel(),e()},i.cancel=function(){n&&clearTimeout(n),n=null},i.isPending=function(){return n},i}}),define("ace/range",["require","exports","module"],function(e,t,n){"use strict";var r=function(e,t){return e.row-t.row||e.column-t.column},i=function(e,t,n,r){this.start={row:e,column:t},this.end={row:n,column:r}};(function(){this.isEqual=function(e){return this.start.row===e.start.row&&this.end.row===e.end.row&&this.start.column===e.start.column&&this.end.column===e.end.column},this.toString=function(){return"Range: ["+this.start.row+"/"+this.start.column+"] -> ["+this.end.row+"/"+this.end.column+"]"},this.contains=function(e,t){return this.compare(e,t)==0},this.compareRange=function(e){var t,n=e.end,r=e.start;return t=this.compare(n.row,n.column),t==1?(t=this.compare(r.row,r.column),t==1?2:t==0?1:0):t==-1?-2:(t=this.compare(r.row,r.column),t==-1?-1:t==1?42:0)},this.comparePoint=function(e){return this.compare(e.row,e.column)},this.containsRange=function(e){return this.comparePoint(e.start)==0&&this.comparePoint(e.end)==0},this.intersects=function(e){var t=this.compareRange(e);return t==-1||t==0||t==1},this.isEnd=function(e,t){return this.end.row==e&&this.end.column==t},this.isStart=function(e,t){return this.start.row==e&&this.start.column==t},this.setStart=function(e,t){typeof e=="object"?(this.start.column=e.column,this.start.row=e.row):(this.start.row=e,this.start.column=t)},this.setEnd=function(e,t){typeof e=="object"?(this.end.column=e.column,this.end.row=e.row):(this.end.row=e,this.end.column=t)},this.inside=function(e,t){return this.compare(e,t)==0?this.isEnd(e,t)||this.isStart(e,t)?!1:!0:!1},this.insideStart=function(e,t){return this.compare(e,t)==0?this.isEnd(e,t)?!1:!0:!1},this.insideEnd=function(e,t){return this.compare(e,t)==0?this.isStart(e,t)?!1:!0:!1},this.compare=function(e,t){return!this.isMultiLine()&&e===this.start.row?t<this.start.column?-1:t>this.end.column?1:0:e<this.start.row?-1:e>this.end.row?1:this.start.row===e?t>=this.start.column?0:-1:this.end.row===e?t<=this.end.column?0:1:0},this.compareStart=function(e,t){return this.start.row==e&&this.start.column==t?-1:this.compare(e,t)},this.compareEnd=function(e,t){return this.end.row==e&&this.end.column==t?1:this.compare(e,t)},this.compareInside=function(e,t){return this.end.row==e&&this.end.column==t?1:this.start.row==e&&this.start.column==t?-1:this.compare(e,t)},this.clipRows=function(e,t){if(this.end.row>t)var n={row:t+1,column:0};else if(this.end.row<e)var n={row:e,column:0};if(this.start.row>t)var r={row:t+1,column:0};else if(this.start.row<e)var r={row:e,column:0};return i.fromPoints(r||this.start,n||this.end)},this.extend=function(e,t){var n=this.compare(e,t);if(n==0)return this;if(n==-1)var r={row:e,column:t};else var s={row:e,column:t};return i.fromPoints(r||this.start,s||this.end)},this.isEmpty=function(){return this.start.row===this.end.row&&this.start.column===this.end.column},this.isMultiLine=function(){return this.start.row!==this.end.row},this.clone=function(){return i.fromPoints(this.start,this.end)},this.collapseRows=function(){return this.end.column==0?new i(this.start.row,0,Math.max(this.start.row,this.end.row-1),0):new i(this.start.row,0,this.end.row,0)},this.toScreenRange=function(e){var t=e.documentToScreenPosition(this.start),n=e.documentToScreenPosition(this.end);return new i(t.row,t.column,n.row,n.column)},this.moveBy=function(e,t){this.start.row+=e,this.start.column+=t,this.end.row+=e,this.end.column+=t}}).call(i.prototype),i.fromPoints=function(e,t){return new i(e.row,e.column,t.row,t.column)},i.comparePoints=r,i.comparePoints=function(e,t){return e.row-t.row||e.column-t.column},t.Range=i}),define("ace/apply_delta",["require","exports","module"],function(e,t,n){"use strict";function r(e,t){throw console.log("Invalid Delta:",e),"Invalid Delta: "+t}function i(e,t){return t.row>=0&&t.row<e.length&&t.column>=0&&t.column<=e[t.row].length}function s(e,t){t.action!="insert"&&t.action!="remove"&&r(t,"delta.action must be 'insert' or 'remove'"),t.lines instanceof Array||r(t,"delta.lines must be an Array"),(!t.start||!t.end)&&r(t,"delta.start/end must be an present");var n=t.start;i(e,t.start)||r(t,"delta.start must be contained in document");var s=t.end;t.action=="remove"&&!i(e,s)&&r(t,"delta.end must contained in document for 'remove' actions");var o=s.row-n.row,u=s.column-(o==0?n.column:0);(o!=t.lines.length-1||t.lines[o].length!=u)&&r(t,"delta.range must match delta lines")}t.applyDelta=function(e,t,n){var r=t.start.row,i=t.start.column,s=e[r]||"";switch(t.action){case"insert":var o=t.lines;if(o.length===1)e[r]=s.substring(0,i)+t.lines[0]+s.substring(i);else{var u=[r,1].concat(t.lines);e.splice.apply(e,u),e[r]=s.substring(0,i)+e[r],e[r+t.lines.length-1]+=s.substring(i)}break;case"remove":var a=t.end.column,f=t.end.row;r===f?e[r]=s.substring(0,i)+s.substring(a):e.splice(r,f-r+1,s.substring(0,i)+e[f].substring(a))}}}),define("ace/lib/event_emitter",["require","exports","module"],function(e,t,n){"use strict";var r={},i=function(){this.propagationStopped=!0},s=function(){this.defaultPrevented=!0};r._emit=r._dispatchEvent=function(e,t){this._eventRegistry||(this._eventRegistry={}),this._defaultHandlers||(this._defaultHandlers={});var n=this._eventRegistry[e]||[],r=this._defaultHandlers[e];if(!n.length&&!r)return;if(typeof t!="object"||!t)t={};t.type||(t.type=e),t.stopPropagation||(t.stopPropagation=i),t.preventDefault||(t.preventDefault=s),n=n.slice();for(var o=0;o<n.length;o++){n[o](t,this);if(t.propagationStopped)break}if(r&&!t.defaultPrevented)return r(t,this)},r._signal=function(e,t){var n=(this._eventRegistry||{})[e];if(!n)return;n=n.slice();for(var r=0;r<n.length;r++)n[r](t,this)},r.once=function(e,t){var n=this;t&&this.addEventListener(e,function r(){n.removeEventListener(e,r),t.apply(null,arguments)})},r.setDefaultHandler=function(e,t){var n=this._defaultHandlers;n||(n=this._defaultHandlers={_disabled_:{}});if(n[e]){var r=n[e],i=n._disabled_[e];i||(n._disabled_[e]=i=[]),i.push(r);var s=i.indexOf(t);s!=-1&&i.splice(s,1)}n[e]=t},r.removeDefaultHandler=function(e,t){var n=this._defaultHandlers;if(!n)return;var r=n._disabled_[e];if(n[e]==t){var i=n[e];r&&this.setDefaultHandler(e,r.pop())}else if(r){var s=r.indexOf(t);s!=-1&&r.splice(s,1)}},r.on=r.addEventListener=function(e,t,n){this._eventRegistry=this._eventRegistry||{};var r=this._eventRegistry[e];return r||(r=this._eventRegistry[e]=[]),r.indexOf(t)==-1&&r[n?"unshift":"push"](t),t},r.off=r.removeListener=r.removeEventListener=function(e,t){this._eventRegistry=this._eventRegistry||{};var n=this._eventRegistry[e];if(!n)return;var r=n.indexOf(t);r!==-1&&n.splice(r,1)},r.removeAllListeners=function(e){this._eventRegistry&&(this._eventRegistry[e]=[])},t.EventEmitter=r}),define("ace/anchor",["require","exports","module","ace/lib/oop","ace/lib/event_emitter"],function(e,t,n){"use strict";var r=e("./lib/oop"),i=e("./lib/event_emitter").EventEmitter,s=t.Anchor=function(e,t,n){this.$onChange=this.onChange.bind(this),this.attach(e),typeof n=="undefined"?this.setPosition(t.row,t.column):this.setPosition(t,n)};(function(){function e(e,t,n){var r=n?e.column<=t.column:e.column<t.column;return e.row<t.row||e.row==t.row&&r}function t(t,n,r){var i=t.action=="insert",s=(i?1:-1)*(t.end.row-t.start.row),o=(i?1:-1)*(t.end.column-t.start.column),u=t.start,a=i?u:t.end;return e(n,u,r)?{row:n.row,column:n.column}:e(a,n,!r)?{row:n.row+s,column:n.column+(n.row==a.row?o:0)}:{row:u.row,column:u.column}}r.implement(this,i),this.getPosition=function(){return this.$clipPositionToDocument(this.row,this.column)},this.getDocument=function(){return this.document},this.$insertRight=!1,this.onChange=function(e){if(e.start.row==e.end.row&&e.start.row!=this.row)return;if(e.start.row>this.row)return;var n=t(e,{row:this.row,column:this.column},this.$insertRight);this.setPosition(n.row,n.column,!0)},this.setPosition=function(e,t,n){var r;n?r={row:e,column:t}:r=this.$clipPositionToDocument(e,t);if(this.row==r.row&&this.column==r.column)return;var i={row:this.row,column:this.column};this.row=r.row,this.column=r.column,this._signal("change",{old:i,value:r})},this.detach=function(){this.document.removeEventListener("change",this.$onChange)},this.attach=function(e){this.document=e||this.document,this.document.on("change",this.$onChange)},this.$clipPositionToDocument=function(e,t){var n={};return e>=this.document.getLength()?(n.row=Math.max(0,this.document.getLength()-1),n.column=this.document.getLine(n.row).length):e<0?(n.row=0,n.column=0):(n.row=e,n.column=Math.min(this.document.getLine(n.row).length,Math.max(0,t))),t<0&&(n.column=0),n}}).call(s.prototype)}),define("ace/document",["require","exports","module","ace/lib/oop","ace/apply_delta","ace/lib/event_emitter","ace/range","ace/anchor"],function(e,t,n){"use strict";var r=e("./lib/oop"),i=e("./apply_delta").applyDelta,s=e("./lib/event_emitter").EventEmitter,o=e("./range").Range,u=e("./anchor").Anchor,a=function(e){this.$lines=[""],e.length===0?this.$lines=[""]:Array.isArray(e)?this.insertMergedLines({row:0,column:0},e):this.insert({row:0,column:0},e)};(function(){r.implement(this,s),this.setValue=function(e){var t=this.getLength()-1;this.remove(new o(0,0,t,this.getLine(t).length)),this.insert({row:0,column:0},e)},this.getValue=function(){return this.getAllLines().join(this.getNewLineCharacter())},this.createAnchor=function(e,t){return new u(this,e,t)},"aaa".split(/a/).length===0?this.$split=function(e){return e.replace(/\r\n|\r/g,"\n").split("\n")}:this.$split=function(e){return e.split(/\r\n|\r|\n/)},this.$detectNewLine=function(e){var t=e.match(/^.*?(\r\n|\r|\n)/m);this.$autoNewLine=t?t[1]:"\n",this._signal("changeNewLineMode")},this.getNewLineCharacter=function(){switch(this.$newLineMode){case"windows":return"\r\n";case"unix":return"\n";default:return this.$autoNewLine||"\n"}},this.$autoNewLine="",this.$newLineMode="auto",this.setNewLineMode=function(e){if(this.$newLineMode===e)return;this.$newLineMode=e,this._signal("changeNewLineMode")},this.getNewLineMode=function(){return this.$newLineMode},this.isNewLine=function(e){return e=="\r\n"||e=="\r"||e=="\n"},this.getLine=function(e){return this.$lines[e]||""},this.getLines=function(e,t){return this.$lines.slice(e,t+1)},this.getAllLines=function(){return this.getLines(0,this.getLength())},this.getLength=function(){return this.$lines.length},this.getTextRange=function(e){return this.getLinesForRange(e).join(this.getNewLineCharacter())},this.getLinesForRange=function(e){var t;if(e.start.row===e.end.row)t=[this.getLine(e.start.row).substring(e.start.column,e.end.column)];else{t=this.getLines(e.start.row,e.end.row),t[0]=(t[0]||"").substring(e.start.column);var n=t.length-1;e.end.row-e.start.row==n&&(t[n]=t[n].substring(0,e.end.column))}return t},this.insertLines=function(e,t){return console.warn("Use of document.insertLines is deprecated. Use the insertFullLines method instead."),this.insertFullLines(e,t)},this.removeLines=function(e,t){return console.warn("Use of document.removeLines is deprecated. Use the removeFullLines method instead."),this.removeFullLines(e,t)},this.insertNewLine=function(e){return console.warn("Use of document.insertNewLine is deprecated. Use insertMergedLines(position, ['', '']) instead."),this.insertMergedLines(e,["",""])},this.insert=function(e,t){return this.getLength()<=1&&this.$detectNewLine(t),this.insertMergedLines(e,this.$split(t))},this.insertInLine=function(e,t){var n=this.clippedPos(e.row,e.column),r=this.pos(e.row,e.column+t.length);return this.applyDelta({start:n,end:r,action:"insert",lines:[t]},!0),this.clonePos(r)},this.clippedPos=function(e,t){var n=this.getLength();e===undefined?e=n:e<0?e=0:e>=n&&(e=n-1,t=undefined);var r=this.getLine(e);return t==undefined&&(t=r.length),t=Math.min(Math.max(t,0),r.length),{row:e,column:t}},this.clonePos=function(e){return{row:e.row,column:e.column}},this.pos=function(e,t){return{row:e,column:t}},this.$clipPosition=function(e){var t=this.getLength();return e.row>=t?(e.row=Math.max(0,t-1),e.column=this.getLine(t-1).length):(e.row=Math.max(0,e.row),e.column=Math.min(Math.max(e.column,0),this.getLine(e.row).length)),e},this.insertFullLines=function(e,t){e=Math.min(Math.max(e,0),this.getLength());var n=0;e<this.getLength()?(t=t.concat([""]),n=0):(t=[""].concat(t),e--,n=this.$lines[e].length),this.insertMergedLines({row:e,column:n},t)},this.insertMergedLines=function(e,t){var n=this.clippedPos(e.row,e.column),r={row:n.row+t.length-1,column:(t.length==1?n.column:0)+t[t.length-1].length};return this.applyDelta({start:n,end:r,action:"insert",lines:t}),this.clonePos(r)},this.remove=function(e){var t=this.clippedPos(e.start.row,e.start.column),n=this.clippedPos(e.end.row,e.end.column);return this.applyDelta({start:t,end:n,action:"remove",lines:this.getLinesForRange({start:t,end:n})}),this.clonePos(t)},this.removeInLine=function(e,t,n){var r=this.clippedPos(e,t),i=this.clippedPos(e,n);return this.applyDelta({start:r,end:i,action:"remove",lines:this.getLinesForRange({start:r,end:i})},!0),this.clonePos(r)},this.removeFullLines=function(e,t){e=Math.min(Math.max(0,e),this.getLength()-1),t=Math.min(Math.max(0,t),this.getLength()-1);var n=t==this.getLength()-1&&e>0,r=t<this.getLength()-1,i=n?e-1:e,s=n?this.getLine(i).length:0,u=r?t+1:t,a=r?0:this.getLine(u).length,f=new o(i,s,u,a),l=this.$lines.slice(e,t+1);return this.applyDelta({start:f.start,end:f.end,action:"remove",lines:this.getLinesForRange(f)}),l},this.removeNewLine=function(e){e<this.getLength()-1&&e>=0&&this.applyDelta({start:this.pos(e,this.getLine(e).length),end:this.pos(e+1,0),action:"remove",lines:["",""]})},this.replace=function(e,t){!e instanceof o&&(e=o.fromPoints(e.start,e.end));if(t.length===0&&e.isEmpty())return e.start;if(t==this.getTextRange(e))return e.end;this.remove(e);var n;return t?n=this.insert(e.start,t):n=e.start,n},this.applyDeltas=function(e){for(var t=0;t<e.length;t++)this.applyDelta(e[t])},this.revertDeltas=function(e){for(var t=e.length-1;t>=0;t--)this.revertDelta(e[t])},this.applyDelta=function(e,t){var n=e.action=="insert";if(n?e.lines.length<=1&&!e.lines[0]:!o.comparePoints(e.start,e.end))return;n&&e.lines.length>2e4&&this.$splitAndapplyLargeDelta(e,2e4),i(this.$lines,e,t),this._signal("change",e)},this.$splitAndapplyLargeDelta=function(e,t){var n=e.lines,r=n.length,i=e.start.row,s=e.start.column,o=0,u=0;do{o=u,u+=t-1;var a=n.slice(o,u);if(u>r){e.lines=a,e.start.row=i+o,e.start.column=s;break}a.push(""),this.applyDelta({start:this.pos(i+o,s),end:this.pos(i+u,s=0),action:e.action,lines:a},!0)}while(!0)},this.revertDelta=function(e){this.applyDelta({start:this.clonePos(e.start),end:this.clonePos(e.end),action:e.action=="insert"?"remove":"insert",lines:e.lines.slice()})},this.indexToPosition=function(e,t){var n=this.$lines||this.getAllLines(),r=this.getNewLineCharacter().length;for(var i=t||0,s=n.length;i<s;i++){e-=n[i].length+r;if(e<0)return{row:i,column:e+n[i].length+r}}return{row:s-1,column:n[s-1].length}},this.positionToIndex=function(e,t){var n=this.$lines||this.getAllLines(),r=this.getNewLineCharacter().length,i=0,s=Math.min(e.row,n.length);for(var o=t||0;o<s;++o)i+=n[o].length+r;return i+e.column}}).call(a.prototype),t.Document=a}),define("ace/worker/mirror",["require","exports","module","ace/range","ace/document","ace/lib/lang"],function(e,t,n){"use strict";var r=e("../range").Range,i=e("../document").Document,s=e("../lib/lang"),o=t.Mirror=function(e){this.sender=e;var t=this.doc=new i(""),n=this.deferredUpdate=s.delayedCall(this.onUpdate.bind(this)),r=this;e.on("change",function(e){var i=e.data;if(i[0].start)t.applyDeltas(i);else for(var s=0;s<i.length;s+=2){if(Array.isArray(i[s+1]))var o={action:"insert",start:i[s],lines:i[s+1]};else var o={action:"remove",start:i[s],end:i[s+1]};t.applyDelta(o,!0)}if(r.$timeout)return n.schedule(r.$timeout);r.onUpdate()})};(function(){this.$timeout=500,this.setTimeout=function(e){this.$timeout=e},this.setValue=function(e){this.doc.setValue(e),this.deferredUpdate.schedule(this.$timeout)},this.getValue=function(e){this.sender.callback(this.doc.getValue(),e)},this.onUpdate=function(){},this.isPending=function(){return this.deferredUpdate.isPending()}}).call(o.prototype)}),define("ace/mode/xml/sax",["require","exports","module"],function(e,t,n){function d(){}function v(e,t,n,r,i){function s(e){if(e>65535){e-=65536;var t=55296+(e>>10),n=56320+(e&1023);return String.fromCharCode(t,n)}return String.fromCharCode(e)}function o(e){var t=e.slice(1,-1);return t in n?n[t]:t.charAt(0)==="#"?s(parseInt(t.substr(1).replace("x","0x"))):(i.error("entity not found:"+e),e)}function u(t){var n=e.substring(v,t).replace(/&#?\w+;/g,o);h&&a(v),r.characters(n,0,t-v),v=t}function a(t,n){while(t>=l&&(n=c.exec(e)))f=n.index,l=f+n[0].length,h.lineNumber++;h.columnNumber=t-f+1}var f=0,l=0,c=/.+(?:\r\n?|\n)|.*$/g,h=r.locator,p=[{currentNSMap:t}],d={},v=0;for(;;){var E=e.indexOf("<",v);if(E<0){if(!e.substr(v).match(/^\s*$/)){var N=r.document,C=N.createTextNode(e.substr(v));N.appendChild(C),r.currentElement=C}return}E>v&&u(E);switch(e.charAt(E+1)){case"/":var k=e.indexOf(">",E+3),L=e.substring(E+2,k),A;if(!(p.length>1)){i.fatalError("end tag name not found for: "+L);break}A=p.pop();var O=A.localNSMap;A.tagName!=L&&i.fatalError("end tag name: "+L+" does not match the current start tagName: "+A.tagName),r.endElement(A.uri,A.localName,L);if(O)for(var M in O)r.endPrefixMapping(M);k++;break;case"?":h&&a(E),k=x(e,E,r);break;case"!":h&&a(E),k=S(e,E,r,i);break;default:try{h&&a(E);var _=new T,k=g(e,E,_,o,i),D=_.length;if(D&&h){var P=m(h,{});for(var E=0;E<D;E++){var H=_[E];a(H.offset),H.offset=m(h,{})}m(P,h)}!_.closed&&w(e,k,_.tagName,d)&&(_.closed=!0,n.nbsp||i.warning("unclosed xml attribute")),y(_,r,p),_.uri==="http://www.w3.org/1999/xhtml"&&!_.closed?k=b(e,k,_.tagName,o,r):k++}catch(B){i.error("element parse error: "+B),k=-1}}k<0?u(E+1):v=k}}function m(e,t){return t.lineNumber=e.lineNumber,t.columnNumber=e.columnNumber,t}function g(e,t,n,r,i){var s,d,v=++t,m=o;for(;;){var g=e.charAt(v);switch(g){case"=":if(m===u)s=e.slice(t,v),m=f;else{if(m!==a)throw new Error("attribute equal must after attrName");m=f}break;case"'":case'"':if(m===f){t=v+1,v=e.indexOf(g,t);if(!(v>0))throw new Error("attribute value no end '"+g+"' match");d=e.slice(t,v).replace(/&#?\w+;/g,r),n.add(s,d,t-1),m=c}else{if(m!=l)throw new Error('attribute value must after "="');d=e.slice(t,v).replace(/&#?\w+;/g,r),n.add(s,d,t),i.warning('attribute "'+s+'" missed start quot('+g+")!!"),t=v+1,m=c}break;case"/":switch(m){case o:n.setTagName(e.slice(t,v));case c:case h:case p:m=p,n.closed=!0;case l:case u:case a:break;default:throw new Error("attribute invalid close char('/')")}break;case"":i.error("unexpected end of input");case">":switch(m){case o:n.setTagName(e.slice(t,v));case c:case h:case p:break;case l:case u:d=e.slice(t,v),d.slice(-1)==="/"&&(n.closed=!0,d=d.slice(0,-1));case a:m===a&&(d=s),m==l?(i.warning('attribute "'+d+'" missed quot(")!!'),n.add(s,d.replace(/&#?\w+;/g,r),t)):(i.warning('attribute "'+d+'" missed value!! "'+d+'" instead!!'),n.add(d,d,t));break;case f:throw new Error("attribute value missed!!")}return v;case"\u0080":g=" ";default:if(g<=" ")switch(m){case o:n.setTagName(e.slice(t,v)),m=h;break;case u:s=e.slice(t,v),m=a;break;case l:var d=e.slice(t,v).replace(/&#?\w+;/g,r);i.warning('attribute "'+d+'" missed quot(")!!'),n.add(s,d,t);case c:m=h}else switch(m){case a:i.warning('attribute "'+s+'" missed value!! "'+s+'" instead!!'),n.add(s,s,t),t=v,m=u;break;case c:i.warning('attribute space is required"'+s+'"!!');case h:m=u,t=v;break;case f:m=l,t=v;break;case p:throw new Error("elements closed character '/' and '>' must be connected to")}}v++}}function y(e,t,n){var r=e.tagName,i=null,s=n[n.length-1].currentNSMap,o=e.length;while(o--){var u=e[o],a=u.qName,f=u.value,l=a.indexOf(":");if(l>0)var c=u.prefix=a.slice(0,l),h=a.slice(l+1),p=c==="xmlns"&&h;else h=a,c=null,p=a==="xmlns"&&"";u.localName=h,p!==!1&&(i==null&&(i={},E(s,s={})),s[p]=i[p]=f,u.uri="http://www.w3.org/2000/xmlns/",t.startPrefixMapping(p,f))}var o=e.length;while(o--){u=e[o];var c=u.prefix;c&&(c==="xml"&&(u.uri="http://www.w3.org/XML/1998/namespace"),c!=="xmlns"&&(u.uri=s[c]))}var l=r.indexOf(":");l>0?(c=e.prefix=r.slice(0,l),h=e.localName=r.slice(l+1)):(c=null,h=e.localName=r);var d=e.uri=s[c||""];t.startElement(d,h,r,e);if(e.closed){t.endElement(d,h,r);if(i)for(c in i)t.endPrefixMapping(c)}else e.currentNSMap=s,e.localNSMap=i,n.push(e)}function b(e,t,n,r,i){if(/^(?:script|textarea)$/i.test(n)){var s=e.indexOf("</"+n+">",t),o=e.substring(t+1,s);if(/[&<]/.test(o))return/^script$/i.test(n)?(i.characters(o,0,o.length),s):(o=o.replace(/&#?\w+;/g,r),i.characters(o,0,o.length),s)}return t+1}function w(e,t,n,r){var i=r[n];return i==null&&(i=r[n]=e.lastIndexOf("</"+n+">")),i<t}function E(e,t){for(var n in e)t[n]=e[n]}function S(e,t,n,r){var i=e.charAt(t+2);switch(i){case"-":if(e.charAt(t+3)==="-"){var s=e.indexOf("-->",t+4);return s>t?(n.comment(e,t+4,s-t-4),s+3):(r.error("Unclosed comment"),-1)}return-1;default:if(e.substr(t+3,6)=="CDATA["){var s=e.indexOf("]]>",t+9);return n.startCDATA(),n.characters(e,t+9,s-t-9),n.endCDATA(),s+3}var o=C(e,t),u=o.length;if(u>1&&/!doctype/i.test(o[0][0])){var a=o[1][0],f=u>3&&/^public$/i.test(o[2][0])&&o[3][0],l=u>4&&o[4][0],c=o[u-1];return n.startDTD(a,f&&f.replace(/^(['"])(.*?)\1$/,"$2"),l&&l.replace(/^(['"])(.*?)\1$/,"$2")),n.endDTD(),c.index+c[0].length}}return-1}function x(e,t,n){var r=e.indexOf("?>",t);if(r){var i=e.substring(t,r).match(/^<\?(\S*)\s*([\s\S]*?)\s*$/);if(i){var s=i[0].length;return n.processingInstruction(i[1],i[2]),r+2}return-1}return-1}function T(e){}function N(e,t){return e.__proto__=t,e}function C(e,t){var n,r=[],i=/'[^']+'|"[^"]+"|[^\s<>\/=]+=?|(\/?\s*>|<)/g;i.lastIndex=t,i.exec(e);while(n=i.exec(e)){r.push(n);if(n[1])return r}}var r=/[A-Z_a-z\xC0-\xD6\xD8-\xF6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/,i=new RegExp("[\\-\\.0-9"+r.source.slice(1,-1)+"\u00b7\u0300-\u036f\\ux203F-\u2040]"),s=new RegExp("^"+r.source+i.source+"*(?::"+r.source+i.source+"*)?$"),o=0,u=1,a=2,f=3,l=4,c=5,h=6,p=7;return d.prototype={parse:function(e,t,n){var r=this.domBuilder;r.startDocument(),E(t,t={}),v(e,t,n,r,this.errorHandler),r.endDocument()}},T.prototype={setTagName:function(e){if(!s.test(e))throw new Error("invalid tagName:"+e);this.tagName=e},add:function(e,t,n){if(!s.test(e))throw new Error("invalid attribute:"+e);this[this.length++]={qName:e,value:t,offset:n}},length:0,getLocalName:function(e){return this[e].localName},getOffset:function(e){return this[e].offset},getQName:function(e){return this[e].qName},getURI:function(e){return this[e].uri},getValue:function(e){return this[e].value}},N({},N.prototype)instanceof N||(N=function(e,t){function n(){}n.prototype=t,n=new n;for(t in e)n[t]=e[t];return n}),d}),define("ace/mode/xml/dom",["require","exports","module"],function(e,t,n){function r(e,t){for(var n in e)t[n]=e[n]}function i(e,t){var n=e.prototype;if(Object.create){var i=Object.create(t.prototype);n.__proto__=i}if(!(n instanceof t)){function s(){}s.prototype=t.prototype,s=new s,r(n,s),e.prototype=n=s}n.constructor!=e&&(typeof e!="function"&&console.error("unknow Class:"+e),n.constructor=e)}function B(e,t){if(t instanceof Error)var n=t;else n=this,Error.call(this,w[e]),this.message=w[e],Error.captureStackTrace&&Error.captureStackTrace(this,B);return n.code=e,t&&(this.message=this.message+": "+t),n}function j(){}function F(e,t){this._node=e,this._refresh=t,I(this)}function I(e){var t=e._node._inc||e._node.ownerDocument._inc;if(e._inc!=t){var n=e._refresh(e._node);gt(e,"length",n.length),r(n,e),e._inc=t}}function q(){}function R(e,t){var n=e.length;while(n--)if(e[n]===t)return n}function U(e,t,n,r){r?t[R(t,r)]=n:t[t.length++]=n;if(e){n.ownerElement=e;var i=e.ownerDocument;i&&(r&&Q(i,e,r),K(i,e,n))}}function z(e,t,n){var r=R(t,n);if(!(r>=0))throw B(L,new Error);var i=t.length-1;while(r<i)t[r]=t[++r];t.length=i;if(e){var s=e.ownerDocument;s&&(Q(s,e,n),n.ownerElement=null)}}function W(e){this._features={};if(e)for(var t in e)this._features=e[t]}function X(){}function V(e){return e=="<"&&"&lt;"||e==">"&&"&gt;"||e=="&"&&"&amp;"||e=='"'&&"&quot;"||"&#"+e.charCodeAt()+";"}function $(e,t){if(t(e))return!0;if(e=e.firstChild)do if($(e,t))return!0;while(e=e.nextSibling)}function J(){}function K(e,t,n){e&&e._inc++;var r=n.namespaceURI;r=="http://www.w3.org/2000/xmlns/"&&(t._nsMap[n.prefix?n.localName:""]=n.value)}function Q(e,t,n,r){e&&e._inc++;var i=n.namespaceURI;i=="http://www.w3.org/2000/xmlns/"&&delete t._nsMap[n.prefix?n.localName:""]}function G(e,t,n){if(e&&e._inc){e._inc++;var r=t.childNodes;if(n)r[r.length++]=n;else{var i=t.firstChild,s=0;while(i)r[s++]=i,i=i.nextSibling;r.length=s}}}function Y(e,t){var n=t.previousSibling,r=t.nextSibling;return n?n.nextSibling=r:e.firstChild=r,r?r.previousSibling=n:e.lastChild=n,G(e.ownerDocument,e),t}function Z(e,t,n){var r=t.parentNode;r&&r.removeChild(t);if(t.nodeType===g){var i=t.firstChild;if(i==null)return t;var s=t.lastChild}else i=s=t;var o=n?n.previousSibling:e.lastChild;i.previousSibling=o,s.nextSibling=n,o?o.nextSibling=i:e.firstChild=i,n==null?e.lastChild=s:n.previousSibling=s;do i.parentNode=e;while(i!==s&&(i=i.nextSibling));return G(e.ownerDocument||e,e),t.nodeType==g&&(t.firstChild=t.lastChild=null),t}function et(e,t){var n=t.parentNode;if(n){var r=e.lastChild;n.removeChild(t);var r=e.lastChild}var r=e.lastChild;return t.parentNode=e,t.previousSibling=r,t.nextSibling=null,r?r.nextSibling=t:e.firstChild=t,e.lastChild=t,G(e.ownerDocument,e,t),t}function tt(){this._nsMap={}}function nt(){}function rt(){}function it(){}function st(){}function ot(){}function
(){}function at(){}function ft(){}function lt(){}function ct(){}function ht(){}function pt(){}function dt(e,t){switch(e.nodeType){case u:var n=e.attributes,r=n.length,i=e.firstChild,o=e.tagName,h=s===e.namespaceURI;t.push("<",o);for(var y=0;y<r;y++)dt(n.item(y),t,h);if(i||h&&!/^(?:meta|link|img|br|hr|input|button)$/i.test(o)){t.push(">");if(h&&/^script$/i.test(o))i&&t.push(i.data);else while(i)dt(i,t),i=i.nextSibling;t.push("</",o,">")}else t.push("/>");return;case v:case g:var i=e.firstChild;while(i)dt(i,t),i=i.nextSibling;return;case a:return t.push(" ",e.name,'="',e.value.replace(/[<&"]/g,V),'"');case f:return t.push(e.data.replace(/[<&]/g,V));case l:return t.push("<![CDATA[",e.data,"]]>");case d:return t.push("<!--",e.data,"-->");case m:var b=e.publicId,w=e.systemId;t.push("<!DOCTYPE ",e.name);if(b)t.push(' PUBLIC "',b),w&&w!="."&&t.push('" "',w),t.push('">');else if(w&&w!=".")t.push(' SYSTEM "',w,'">');else{var E=e.internalSubset;E&&t.push(" [",E,"]"),t.push(">")}return;case p:return t.push("<?",e.target," ",e.data,"?>");case c:return t.push("&",e.nodeName,";");default:t.push("??",e.nodeName)}}function vt(e,t,n){var r;switch(t.nodeType){case u:r=t.cloneNode(!1),r.ownerDocument=e;case g:break;case a:n=!0}r||(r=t.cloneNode(!1)),r.ownerDocument=e,r.parentNode=null;if(n){var i=t.firstChild;while(i)r.appendChild(vt(e,i,n)),i=i.nextSibling}return r}function mt(e,t,n){var r=new t.constructor;for(var i in t){var s=t[i];typeof s!="object"&&s!=r[i]&&(r[i]=s)}t.childNodes&&(r.childNodes=new j),r.ownerDocument=e;switch(r.nodeType){case u:var o=t.attributes,f=r.attributes=new q,l=o.length;f._ownerElement=r;for(var c=0;c<l;c++)r.setAttributeNode(mt(e,o.item(c),!0));break;case a:n=!0}if(n){var h=t.firstChild;while(h)r.appendChild(mt(e,h,n)),h=h.nextSibling}return r}function gt(e,t,n){e[t]=n}var s="http://www.w3.org/1999/xhtml",o={},u=o.ELEMENT_NODE=1,a=o.ATTRIBUTE_NODE=2,f=o.TEXT_NODE=3,l=o.CDATA_SECTION_NODE=4,c=o.ENTITY_REFERENCE_NODE=5,h=o.ENTITY_NODE=6,p=o.PROCESSING_INSTRUCTION_NODE=7,d=o.COMMENT_NODE=8,v=o.DOCUMENT_NODE=9,m=o.DOCUMENT_TYPE_NODE=10,g=o.DOCUMENT_FRAGMENT_NODE=11,y=o.NOTATION_NODE=12,b={},w={},E=b.INDEX_SIZE_ERR=(w[1]="Index size error",1),S=b.DOMSTRING_SIZE_ERR=(w[2]="DOMString size error",2),x=b.HIERARCHY_REQUEST_ERR=(w[3]="Hierarchy request error",3),T=b.WRONG_DOCUMENT_ERR=(w[4]="Wrong document",4),N=b.INVALID_CHARACTER_ERR=(w[5]="Invalid character",5),C=b.NO_DATA_ALLOWED_ERR=(w[6]="No data allowed",6),k=b.NO_MODIFICATION_ALLOWED_ERR=(w[7]="No modification allowed",7),L=b.NOT_FOUND_ERR=(w[8]="Not found",8),A=b.NOT_SUPPORTED_ERR=(w[9]="Not supported",9),O=b.INUSE_ATTRIBUTE_ERR=(w[10]="Attribute in use",10),M=b.INVALID_STATE_ERR=(w[11]="Invalid state",11),_=b.SYNTAX_ERR=(w[12]="Syntax error",12),D=b.INVALID_MODIFICATION_ERR=(w[13]="Invalid modification",13),P=b.NAMESPACE_ERR=(w[14]="Invalid namespace",14),H=b.INVALID_ACCESS_ERR=(w[15]="Invalid access",15);B.prototype=Error.prototype,r(b,B),j.prototype={length:0,item:function(e){return this[e]||null}},F.prototype.item=function(e){return I(this),this[e]},i(F,j),q.prototype={length:0,item:j.prototype.item,getNamedItem:function(e){var t=this.length;while(t--){var n=this[t];if(n.nodeName==e)return n}},setNamedItem:function(e){var t=e.ownerElement;if(t&&t!=this._ownerElement)throw new B(O);var n=this.getNamedItem(e.nodeName);return U(this._ownerElement,this,e,n),n},setNamedItemNS:function(e){var t=e.ownerElement,n;if(t&&t!=this._ownerElement)throw new B(O);return n=this.getNamedItemNS(e.namespaceURI,e.localName),U(this._ownerElement,this,e,n),n},removeNamedItem:function(e){var t=this.getNamedItem(e);return z(this._ownerElement,this,t),t},removeNamedItemNS:function(e,t){var n=this.getNamedItemNS(e,t);return z(this._ownerElement,this,n),n},getNamedItemNS:function(e,t){var n=this.length;while(n--){var r=this[n];if(r.localName==t&&r.namespaceURI==e)return r}return null}},W.prototype={hasFeature:function(e,t){var n=this._features[e.toLowerCase()];return n&&(!t||t in n)?!0:!1},createDocument:function(e,t,n){var r=new J;r.implementation=this,r.childNodes=new j,r.doctype=n,n&&r.appendChild(n);if(t){var i=r.createElementNS(e,t);r.appendChild(i)}return r},createDocumentType:function(e,t,n){var r=new ut;return r.name=e,r.nodeName=e,r.publicId=t,r.systemId=n,r}},X.prototype={firstChild:null,lastChild:null,previousSibling:null,nextSibling:null,attributes:null,parentNode:null,childNodes:null,ownerDocument:null,nodeValue:null,namespaceURI:null,prefix:null,localName:null,insertBefore:function(e,t){return Z(this,e,t)},replaceChild:function(e,t){this.insertBefore(e,t),t&&this.removeChild(t)},removeChild:function(e){return Y(this,e)},appendChild:function(e){return this.insertBefore(e,null)},hasChildNodes:function(){return this.firstChild!=null},cloneNode:function(e){return mt(this.ownerDocument||this,this,e)},normalize:function(){var e=this.firstChild;while(e){var t=e.nextSibling;t&&t.nodeType==f&&e.nodeType==f?(this.removeChild(t),e.appendData(t.data)):(e.normalize(),e=t)}},isSupported:function(e,t){return this.ownerDocument.implementation.hasFeature(e,t)},hasAttributes:function(){return this.attributes.length>0},lookupPrefix:function(e){var t=this;while(t){var n=t._nsMap;if(n)for(var r in n)if(n[r]==e)return r;t=t.nodeType==2?t.ownerDocument:t.parentNode}return null},lookupNamespaceURI:function(e){var t=this;while(t){var n=t._nsMap;if(n&&e in n)return n[e];t=t.nodeType==2?t.ownerDocument:t.parentNode}return null},isDefaultNamespace:function(e){var t=this.lookupPrefix(e);return t==null}},r(o,X),r(o,X.prototype),J.prototype={nodeName:"#document",nodeType:v,doctype:null,documentElement:null,_inc:1,insertBefore:function(e,t){if(e.nodeType==g){var n=e.firstChild;while(n){var r=n.nextSibling;this.insertBefore(n,t),n=r}return e}return this.documentElement==null&&e.nodeType==1&&(this.documentElement=e),Z(this,e,t),e.ownerDocument=this,e},removeChild:function(e){return this.documentElement==e&&(this.documentElement=null),Y(this,e)},importNode:function(e,t){return vt(this,e,t)},getElementById:function(e){var t=null;return $(this.documentElement,function(n){if(n.nodeType==1&&n.getAttribute("id")==e)return t=n,!0}),t},createElement:function(e){var t=new tt;t.ownerDocument=this,t.nodeName=e,t.tagName=e,t.childNodes=new j;var n=t.attributes=new q;return n._ownerElement=t,t},createDocumentFragment:function(){var e=new ct;return e.ownerDocument=this,e.childNodes=new j,e},createTextNode:function(e){var t=new it;return t.ownerDocument=this,t.appendData(e),t},createComment:function(e){var t=new st;return t.ownerDocument=this,t.appendData(e),t},createCDATASection:function(e){var t=new ot;return t.ownerDocument=this,t.appendData(e),t},createProcessingInstruction:function(e,t){var n=new ht;return n.ownerDocument=this,n.tagName=n.target=e,n.nodeValue=n.data=t,n},createAttribute:function(e){var t=new nt;return t.ownerDocument=this,t.name=e,t.nodeName=e,t.localName=e,t.specified=!0,t},createEntityReference:function(e){var t=new lt;return t.ownerDocument=this,t.nodeName=e,t},createElementNS:function(e,t){var n=new tt,r=t.split(":"),i=n.attributes=new q;return n.childNodes=new j,n.ownerDocument=this,n.nodeName=t,n.tagName=t,n.namespaceURI=e,r.length==2?(n.prefix=r[0],n.localName=r[1]):n.localName=t,i._ownerElement=n,n},createAttributeNS:function(e,t){var n=new nt,r=t.split(":");return n.ownerDocument=this,n.nodeName=t,n.name=t,n.namespaceURI=e,n.specified=!0,r.length==2?(n.prefix=r[0],n.localName=r[1]):n.localName=t,n}},i(J,X),tt.prototype={nodeType:u,hasAttribute:function(e){return this.getAttributeNode(e)!=null},getAttribute:function(e){var t=this.getAttributeNode(e);return t&&t.value||""},getAttributeNode:function(e){return this.attributes.getNamedItem(e)},setAttribute:function(e,t){var n=this.ownerDocument.createAttribute(e);n.value=n.nodeValue=""+t,this.setAttributeNode(n)},removeAttribute:function(e){var t=this.getAttributeNode(e);t&&this.removeAttributeNode(t)},appendChild:function(e){return e.nodeType===g?this.insertBefore(e,null):et(this,e)},setAttributeNode:function(e){return this.attributes.setNamedItem(e)},setAttributeNodeNS:function(e){return this.attributes.setNamedItemNS(e)},removeAttributeNode:function(e){return this.attributes.removeNamedItem(e.nodeName)},removeAttributeNS:function(e,t){var n=this.getAttributeNodeNS(e,t);n&&this.removeAttributeNode(n)},hasAttributeNS:function(e,t){return this.getAttributeNodeNS(e,t)!=null},getAttributeNS:function(e,t){var n=this.getAttributeNodeNS(e,t);return n&&n.value||""},setAttributeNS:function(e,t,n){var r=this.ownerDocument.createAttributeNS(e,t);r.value=r.nodeValue=""+n,this.setAttributeNode(r)},getAttributeNodeNS:function(e,t){return this.attributes.getNamedItemNS(e,t)},getElementsByTagName:function(e){return new F(this,function(t){var n=[];return $(t,function(r){r!==t&&r.nodeType==u&&(e==="*"||r.tagName==e)&&n.push(r)}),n})},getElementsByTagNameNS:function(e,t){return new F(this,function(n){var r=[];return $(n,function(i){i!==n&&i.nodeType===u&&(e==="*"||i.namespaceURI===e)&&(t==="*"||i.localName==t)&&r.push(i)}),r})}},J.prototype.getElementsByTagName=tt.prototype.getElementsByTagName,J.prototype.getElementsByTagNameNS=tt.prototype.getElementsByTagNameNS,i(tt,X),nt.prototype.nodeType=a,i(nt,X),rt.prototype={data:"",substringData:function(e,t){return this.data.substring(e,e+t)},appendData:function(e){e=this.data+e,this.nodeValue=this.data=e,this.length=e.length},insertData:function(e,t){this.replaceData(e,0,t)},appendChild:function(e){throw new Error(w[3])},deleteData:function(e,t){this.replaceData(e,t,"")},replaceData:function(e,t,n){var r=this.data.substring(0,e),i=this.data.substring(e+t);n=r+n+i,this.nodeValue=this.data=n,this.length=n.length}},i(rt,X),it.prototype={nodeName:"#text",nodeType:f,splitText:function(e){var t=this.data,n=t.substring(e);t=t.substring(0,e),this.data=this.nodeValue=t,this.length=t.length;var r=this.ownerDocument.createTextNode(n);return this.parentNode&&this.parentNode.insertBefore(r,this.nextSibling),r}},i(it,rt),st.prototype={nodeName:"#comment",nodeType:d},i(st,rt),ot.prototype={nodeName:"#cdata-section",nodeType:l},i(ot,rt),ut.prototype.nodeType=m,i(ut,X),at.prototype.nodeType=y,i(at,X),ft.prototype.nodeType=h,i(ft,X),lt.prototype.nodeType=c,i(lt,X),ct.prototype.nodeName="#document-fragment",ct.prototype.nodeType=g,i(ct,X),ht.prototype.nodeType=p,i(ht,X),pt.prototype.serializeToString=function(e){var t=[];return dt(e,t),t.join("")},X.prototype.toString=function(){return pt.prototype.serializeToString(this)};try{if(Object.defineProperty){Object.defineProperty(F.prototype,"length",{get:function(){return I(this),this.$$length}}),Object.defineProperty(X.prototype,"textContent",{get:function(){return yt(this)},set:function(e){switch(this.nodeType){case 1:case 11:while(this.firstChild)this.removeChild(this.firstChild);(e||String(e))&&this.appendChild(this.ownerDocument.createTextNode(e));break;default:this.data=e,this.value=value,this.nodeValue=e}}});function yt(e){switch(e.nodeType){case 1:case 11:var t=[];e=e.firstChild;while(e)e.nodeType!==7&&e.nodeType!==8&&t.push(yt(e)),e=e.nextSibling;return t.join("");default:return e.nodeValue}}gt=function(e,t,n){e["$$"+t]=n}}}catch(bt){}return W}),define("ace/mode/xml/dom-parser",["require","exports","module","ace/mode/xml/sax","ace/mode/xml/dom"],function(e,t,n){"use strict";function s(e){this.options=e||{locator:{}}}function o(e,t,n){function s(t){var s=e[t];if(!s)if(i)s=e.length==2?function(n){e(t,n)}:e;else{var o=arguments.length;while(--o)if(s=e[arguments[o]])break}r[t]=s&&function(e){s(e+f(n),e,n)}||function(){}}if(!e){if(t instanceof u)return t;e=t}var r={},i=e instanceof Function;return n=n||{},s("warning","warn"),s("error","warn","warning"),s("fatalError","warn","warning","error"),r}function u(){this.cdata=!1}function a(e,t){t.lineNumber=e.lineNumber,t.columnNumber=e.columnNumber}function f(e){if(e)return"\n@"+(e.systemId||"")+"#[line:"+e.lineNumber+",col:"+e.columnNumber+"]"}function l(e,t,n){return typeof e=="string"?e.substr(t,n):e.length>=t+n||t?new java.lang.String(e,t,n)+"":e}function c(e,t){e.currentElement?e.currentElement.appendChild(t):e.document.appendChild(t)}var r=e("./sax"),i=e("./dom");return s.prototype.parseFromString=function(e,t){var n=this.options,i=new r,s=n.domBuilder||new u,a=n.errorHandler,f=n.locator,l=n.xmlns||{},c={lt:"<",gt:">",amp:"&",quot:'"',apos:"'"};return f&&s.setDocumentLocator(f),i.errorHandler=o(a,s,f),i.domBuilder=n.domBuilder||s,/\/x?html?$/.test(t)&&(c.nbsp="\u00a0",c.copy="\u00a9",l[""]="http://www.w3.org/1999/xhtml"),e?i.parse(e,l,c):i.errorHandler.error("invalid document source"),s.document},u.prototype={startDocument:function(){this.document=(new i).createDocument(null,null,null),this.locator&&(this.document.documentURI=this.locator.systemId)},startElement:function(e,t,n,r){var i=this.document,s=i.createElementNS(e,n||t),o=r.length;c(this,s),this.currentElement=s,this.locator&&a(this.locator,s);for(var u=0;u<o;u++){var e=r.getURI(u),f=r.getValue(u),n=r.getQName(u),l=i.createAttributeNS(e,n);l.getOffset&&a(l.getOffset(1),l),l.value=l.nodeValue=f,s.setAttributeNode(l)}},endElement:function(e,t,n){var r=this.currentElement,i=r.tagName;this.currentElement=r.parentNode},startPrefixMapping:function(e,t){},endPrefixMapping:function(e){},processingInstruction:function(e,t){var n=this.document.createProcessingInstruction(e,t);this.locator&&a(this.locator,n),c(this,n)},ignorableWhitespace:function(e,t,n){},characters:function(e,t,n){e=l.apply(this,arguments);if(this.currentElement&&e){if(this.cdata){var r=this.document.createCDATASection(e);this.currentElement.appendChild(r)}else{var r=this.document.createTextNode(e);this.currentElement.appendChild(r)}this.locator&&a(this.locator,r)}},skippedEntity:function(e){},endDocument:function(){this.document.normalize()},setDocumentLocator:function(e){if(this.locator=e)e.lineNumber=0},comment:function(e,t,n){e=l.apply(this,arguments);var r=this.document.createComment(e);this.locator&&a(this.locator,r),c(this,r)},startCDATA:function(){this.cdata=!0},endCDATA:function(){this.cdata=!1},startDTD:function(e,t,n){var r=this.document.implementation;if(r&&r.createDocumentType){var i=r.createDocumentType(e,t,n);this.locator&&a(this.locator,i),c(this,i)}},warning:function(e){console.warn(e,f(this.locator))},error:function(e){console.error(e,f(this.locator))},fatalError:function(e){throw console.error(e,f(this.locator)),e}},"endDTD,startEntity,endEntity,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,resolveEntity,getExternalSubset,notationDecl,unparsedEntityDecl".replace(/\w+/g,function(e){u.prototype[e]=function(){return null}}),{DOMParser:s}}),define("ace/mode/xml_worker",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/worker/mirror","ace/mode/xml/dom-parser"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("../lib/lang"),s=e("../worker/mirror").Mirror,o=e("./xml/dom-parser").DOMParser,u=t.Worker=function(e){s.call(this,e),this.setTimeout(400),this.context=null};r.inherits(u,s),function(){this.setOptions=function(e){this.context=e.context},this.onUpdate=function(){var e=this.doc.getValue();if(!e)return;var t=new o,n=[];t.options.errorHandler={fatalError:function(e,t,r){n.push({row:r.lineNumber,column:r.columnNumber,text:t,type:"error"})},error:function(e,t,r){n.push({row:r.lineNumber,column:r.columnNumber,text:t,type:"error"})},warning:function(e,t,r){n.push({row:r.lineNumber,column:r.columnNumber,text:t,type:"warning"})}},t.parseFromString(e),this.sender.emit("error",n)}}.call(u.prototype)}),define("ace/lib/es5-shim",["require","exports","module"],function(e,t,n){function r(){}function w(e){try{return Object.defineProperty(e,"sentinel",{}),"sentinel"in e}catch(t){}}function H(e){return e=+e,e!==e?e=0:e!==0&&e!==1/0&&e!==-1/0&&(e=(e>0||-1)*Math.floor(Math.abs(e))),e}function B(e){var t=typeof e;return e===null||t==="undefined"||t==="boolean"||t==="number"||t==="string"}function j(e){var t,n,r;if(B(e))return e;n=e.valueOf;if(typeof n=="function"){t=n.call(e);if(B(t))return t}r=e.toString;if(typeof r=="function"){t=r.call(e);if(B(t))return t}throw new TypeError}Function.prototype.bind||(Function.prototype.bind=function(t){var n=this;if(typeof n!="function")throw new TypeError("Function.prototype.bind called on incompatible "+n);var i=u.call(arguments,1),s=function(){if(this instanceof s){var e=n.apply(this,i.concat(u.call(arguments)));return Object(e)===e?e:this}return n.apply(t,i.concat(u.call(arguments)))};return n.prototype&&(r.prototype=n.prototype,s.prototype=new r,r.prototype=null),s});var i=Function.prototype.call,s=Array.prototype,o=Object.prototype,u=s.slice,a=i.bind(o.toString),f=i.bind(o.hasOwnProperty),l,c,h,p,d;if(d=f(o,"__defineGetter__"))l=i.bind(o.__defineGetter__),c=i.bind(o.__defineSetter__),h=i.bind(o.__lookupGetter__),p=i.bind(o.__lookupSetter__);if([1,2].splice(0).length!=2)if(!function(){function e(e){var t=new Array(e+2);return t[0]=t[1]=0,t}var t=[],n;t.splice.apply(t,e(20)),t.splice.apply(t,e(26)),n=t.length,t.splice(5,0,"XXX"),n+1==t.length;if(n+1==t.length)return!0}())Array.prototype.splice=function(e,t){var n=this.length;e>0?e>n&&(e=n):e==void 0?e=0:e<0&&(e=Math.max(n+e,0)),e+t<n||(t=n-e);var r=this.slice(e,e+t),i=u.call(arguments,2),s=i.length;if(e===n)s&&this.push.apply(this,i);else{var o=Math.min(t,n-e),a=e+o,f=a+s-o,l=n-a,c=n-o;if(f<a)for(var h=0;h<l;++h)this[f+h]=this[a+h];else if(f>a)for(h=l;h--;)this[f+h]=this[a+h];if(s&&e===c)this.length=c,this.push.apply(this,i);else{this.length=c+s;for(h=0;h<s;++h)this[e+h]=i[h]}}return r};else{var v=Array.prototype.splice;Array.prototype.splice=function(e,t){return arguments.length?v.apply(this,[e===void 0?0:e,t===void 0?this.length-e:t].concat(u.call(arguments,2))):[]}}Array.isArray||(Array.isArray=function(t){return a(t)=="[object Array]"});var m=Object("a"),g=m[0]!="a"||!(0 in m);Array.prototype.forEach||(Array.prototype.forEach=function(t){var n=F(this),r=g&&a(this)=="[object String]"?this.split(""):n,i=arguments[1],s=-1,o=r.length>>>0;if(a(t)!="[object Function]")throw new TypeError;while(++s<o)s in r&&t.call(i,r[s],s,n)}),Array.prototype.map||(Array.prototype.map=function(t){var n=F(this),r=g&&a(this)=="[object String]"?this.split(""):n,i=r.length>>>0,s=Array(i),o=arguments[1];if(a(t)!="[object Function]")throw new TypeError(t+" is not a function");for(var u=0;u<i;u++)u in r&&(s[u]=t.call(o,r[u],u,n));return s}),Array.prototype.filter||(Array.prototype.filter=function(t){var n=F(this),r=g&&a(this)=="[object String]"?this.split(""):n,i=r.length>>>0,s=[],o,u=arguments[1];if(a(t)!="[object Function]")throw new TypeError(t+" is not a function");for(var f=0;f<i;f++)f in r&&(o=r[f],t.call(u,o,f,n)&&s.push(o));return s}),Array.prototype.every||(Array.prototype.every=function(t){var n=F(this),r=g&&a(this)=="[object String]"?this.split(""):n,i=r.length>>>0,s=arguments[1];if(a(t)!="[object Function]")throw new TypeError(t+" is not a function");for(var o=0;o<i;o++)if(o in r&&!t.call(s,r[o],o,n))return!1;return!0}),Array.prototype.some||(Array.prototype.some=function(t){var n=F(this),r=g&&a(this)=="[object String]"?this.split(""):n,i=r.length>>>0,s=arguments[1];if(a(t)!="[object Function]")throw new TypeError(t+" is not a function");for(var o=0;o<i;o++)if(o in r&&t.call(s,r[o],o,n))return!0;return!1}),Array.prototype.reduce||(Array.prototype.reduce=function(t){var n=F(this),r=g&&a(this)=="[object String]"?this.split(""):n,i=r.length>>>0;if(a(t)!="[object Function]")throw new TypeError(t+" is not a function");if(!i&&arguments.length==1)throw new TypeError("reduce of empty array with no initial value");var s=0,o;if(arguments.length>=2)o=arguments[1];else do{if(s in r){o=r[s++];break}if(++s>=i)throw new TypeError("reduce of empty array with no initial value")}while(!0);for(;s<i;s++)s in r&&(o=t.call(void 0,o,r[s],s,n));return o}),Array.prototype.reduceRight||(Array.prototype.reduceRight=function(t){var n=F(this),r=g&&a(this)=="[object String]"?this.split(""):n,i=r.length>>>0;if(a(t)!="[object Function]")throw new TypeError(t+" is not a function");if(!i&&arguments.length==1)throw new TypeError("reduceRight of empty array with no initial value");var s,o=i-1;if(arguments.length>=2)s=arguments[1];else do{if(o in r){s=r[o--];break}if(--o<0)throw new TypeError("reduceRight of empty array with no initial value")}while(!0);do o in this&&(s=t.call(void 0,s,r[o],o,n));while(o--);return s});if(!Array.prototype.indexOf||[0,1].indexOf(1,2)!=-1)Array.prototype.indexOf=function(t){var n=g&&a(this)=="[object String]"?this.split(""):F(this),r=n.length>>>0;if(!r)return-1;var i=0;arguments.length>1&&(i=H(arguments[1])),i=i>=0?i:Math.max(0,r+i);for(;i<r;i++)if(i in n&&n[i]===t)return i;return-1};if(!Array.prototype.lastIndexOf||[0,1].lastIndexOf(0,-3)!=-1)Array.prototype.lastIndexOf=function(t){var n=g&&a(this)=="[object String]"?this.split(""):F(this),r=n.length>>>0;if(!r)return-1;var i=r-1;arguments.length>1&&(i=Math.min(i,H(arguments[1]))),i=i>=0?i:r-Math.abs(i);for(;i>=0;i--)if(i in n&&t===n[i])return i;return-1};Object.getPrototypeOf||(Object.getPrototypeOf=function(t){return t.__proto__||(t.constructor?t.constructor.prototype:o)});if(!Object.getOwnPropertyDescriptor){var y="Object.getOwnPropertyDescriptor called on a non-object: ";Object.getOwnPropertyDescriptor=function(t,n){if(typeof t!="object"&&typeof t!="function"||t===null)throw new TypeError(y+t);if(!f(t,n))return;var r,i,s;r={enumerable:!0,configurable:!0};if(d){var u=t.__proto__;t.__proto__=o;var i=h(t,n),s=p(t,n);t.__proto__=u;if(i||s)return i&&(r.get=i),s&&(r.set=s),r}return r.value=t[n],r}}Object.getOwnPropertyNames||(Object.getOwnPropertyNames=function(t){return Object.keys(t)});if(!Object.create){var b;Object.prototype.__proto__===null?b=function(){return{__proto__:null}}:b=function(){var e={};for(var t in e)e[t]=null;return e.constructor=e.hasOwnProperty=e.propertyIsEnumerable=e.isPrototypeOf=e.toLocaleString=e.toString=e.valueOf=e.__proto__=null,e},Object.create=function(t,n){var r;if(t===null)r=b();else{if(typeof t!="object")throw new TypeError("typeof prototype["+typeof t+"] != 'object'");var i=function(){};i.prototype=t,r=new i,r.__proto__=t}return n!==void 0&&Object.defineProperties(r,n),r}}if(Object.defineProperty){var E=w({}),S=typeof document=="undefined"||w(document.createElement("div"));if(!E||!S)var x=Object.defineProperty}if(!Object.defineProperty||x){var T="Property description must be an object: ",N="Object.defineProperty called on non-object: ",C="getters & setters can not be defined on this javascript engine";Object.defineProperty=function(t,n,r){if(typeof t!="object"&&typeof t!="function"||t===null)throw new TypeError(N+t);if(typeof r!="object"&&typeof r!="function"||r===null)throw new TypeError(T+r);if(x)try{return x.call(Object,t,n,r)}catch(i){}if(f(r,"value"))if(d&&(h(t,n)||p(t,n))){var s=t.__proto__;t.__proto__=o,delete t[n],t[n]=r.value,t.__proto__=s}else t[n]=r.value;else{if(!d)throw new TypeError(C);f(r,"get")&&l(t,n,r.get),f(r,"set")&&c(t,n,r.set)}return t}}Object.defineProperties||(Object.defineProperties=function(t,n){for(var r in n)f(n,r)&&Object.defineProperty(t,r,n[r]);return t}),Object.seal||(Object.seal=function(t){return t}),Object.freeze||(Object.freeze=function(t){return t});try{Object.freeze(function(){})}catch(k){Object.freeze=function(t){return function(n){return typeof n=="function"?n:t(n)}}(Object.freeze)}Object.preventExtensions||(Object.preventExtensions=function(t){return t}),Object.isSealed||(Object.isSealed=function(t){return!1}),Object.isFrozen||(Object.isFrozen=function(t){return!1}),Object.isExtensible||(Object.isExtensible=function(t){if(Object(t)===t)throw new TypeError;var n="";while(f(t,n))n+="?";t[n]=!0;var r=f(t,n);return delete t[n],r});if(!Object.keys){var L=!0,A=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],O=A.length;for(var M in{toString:null})L=!1;Object.keys=function I(e){if(typeof e!="object"&&typeof e!="function"||e===null)throw new TypeError("Object.keys called on a non-object");var I=[];for(var t in e)f(e,t)&&I.push(t);if(L)for(var n=0,r=O;n<r;n++){var i=A[n];f(e,i)&&I.push(i)}return I}}Date.now||(Date.now=function(){return(new Date).getTime()});var _=" \n \f\r \u00a0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029\ufeff";if(!String.prototype.trim||_.trim()){_="["+_+"]";var D=new RegExp("^"+_+_+"*"),P=new RegExp(_+_+"*$");String.prototype.trim=function(){return String(this).replace(D,"").replace(P,"")}}var F=function(e){if(e==null)throw new TypeError("can't convert "+e+" to object");return Object(e)}})
ut
TestTemplate.ts
import { NotFoundError } from '@apextoaster/js-utils'; import { expect } from 'chai'; import { findByBaseId } from '../../../src/util/template';
describe('template helpers', () => { describe('find template by id', () => { it('should throw when no item is found', async () => { expect(() => findByBaseId([], 'foo')).to.throw(NotFoundError); }); }); });
DistanceDisplayCondition.js
define([ './defaultValue', './defined', './defineProperties', './DeveloperError' ], function( defaultValue, defined, defineProperties, DeveloperError) { 'use strict'; /** * Determines visibility based on the distance to the camera. * * @alias DistanceDisplayCondition * @constructor * * @param {Number} [near=0.0] The smallest distance in the interval where the object is visible. * @param {Number} [far=Number.MAX_VALUE] The largest distance in the interval where the object is visible. * * @example * // Make a billboard that is only visible when the distance to the camera is between 10 and 20 meters. * billboard.distanceDisplayCondition = new Cesium.DistanceDisplayCondition(10.0, 20.0); */ function
(near, far) { near = defaultValue(near, 0.0); this._near = near; far = defaultValue(far, Number.MAX_VALUE); this._far = far; } defineProperties(DistanceDisplayCondition.prototype, { /** * The smallest distance in the interval where the object is visible. * @memberof DistanceDisplayCondition.prototype * @type {Number} * @default 0.0 */ near: { get: function() { return this._near; }, set: function(value) { this._near = value; } }, /** * The largest distance in the interval where the object is visible. * @memberof DistanceDisplayCondition.prototype * @type {Number} * @default Number.MAX_VALUE */ far: { get: function() { return this._far; }, set: function(value) { this._far = value; } } }); /** * The number of elements used to pack the object into an array. * @type {Number} */ DistanceDisplayCondition.packedLength = 2; /** * Stores the provided instance into the provided array. * * @param {DistanceDisplayCondition} value The value to pack. * @param {Number[]} array The array to pack into. * @param {Number} [startingIndex=0] The index into the array at which to start packing the elements. * * @returns {Number[]} The array that was packed into */ DistanceDisplayCondition.pack = function(value, array, startingIndex) { //>>includeStart('debug', pragmas.debug); if (!defined(value)) { throw new DeveloperError('value is required'); } if (!defined(array)) { throw new DeveloperError('array is required'); } //>>includeEnd('debug'); startingIndex = defaultValue(startingIndex, 0); array[startingIndex++] = value.near; array[startingIndex] = value.far; return array; }; /** * Retrieves an instance from a packed array. * * @param {Number[]} array The packed array. * @param {Number} [startingIndex=0] The starting index of the element to be unpacked. * @param {DistanceDisplayCondition} [result] The object into which to store the result. * @returns {DistanceDisplayCondition} The modified result parameter or a new DistanceDisplayCondition instance if one was not provided. */ DistanceDisplayCondition.unpack = function(array, startingIndex, result) { //>>includeStart('debug', pragmas.debug); if (!defined(array)) { throw new DeveloperError('array is required'); } //>>includeEnd('debug'); startingIndex = defaultValue(startingIndex, 0); if (!defined(result)) { result = new DistanceDisplayCondition(); } result.near = array[startingIndex++]; result.far = array[startingIndex]; return result; }; /** * Determines if two distance display conditions are equal. * * @param {DistanceDisplayCondition} left A distance display condition. * @param {DistanceDisplayCondition} right Another distance display condition. * @return {Boolean} Whether the two distance display conditions are equal. */ DistanceDisplayCondition.equals = function(left, right) { return left === right || (defined(left) && defined(right) && left.near === right.near && left.far === right.far); }; /** * Duplicates a distance display condition instance. * * @param {DistanceDisplayCondition} [value] The distance display condition to duplicate. * @param {DistanceDisplayCondition} [result] The result onto which to store the result. * @return {DistanceDisplayCondition} The duplicated instance. */ DistanceDisplayCondition.clone = function(value, result) { if (!defined(value)) { return undefined; } if (!defined(result)) { result = new DistanceDisplayCondition(); } result.near = value.near; result.far = value.far; return result; }; /** * Duplicates this instance. * * @param {DistanceDisplayCondition} [result] The result onto which to store the result. * @return {DistanceDisplayCondition} The duplicated instance. */ DistanceDisplayCondition.prototype.clone = function(result) { return DistanceDisplayCondition.clone(this, result); }; /** * Determines if this distance display condition is equal to another. * * @param {DistanceDisplayCondition} other Another distance display condition. * @return {Boolean} Whether this distance display condition is equal to the other. */ DistanceDisplayCondition.prototype.equals = function(other) { return DistanceDisplayCondition.equals(this, other); }; return DistanceDisplayCondition; });
DistanceDisplayCondition
366414300.py
from part1 import ( gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new, ) """ scenario: test_random_actions uuid: 366414300 """
assert board is not None assert gamma_move(board, 1, 2, 0) == 1 assert gamma_free_fields(board, 1) == 3 assert gamma_golden_possible(board, 1) == 0 assert gamma_move(board, 2, 2, 1) == 1 assert gamma_move(board, 3, 2, 1) == 0 assert gamma_move(board, 3, 1, 1) == 1 assert gamma_move(board, 4, 2, 3) == 1 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 2, 3, 0) == 0 assert gamma_move(board, 3, 0, 4) == 0 assert gamma_move(board, 4, 4, 2) == 0 assert gamma_move(board, 4, 1, 1) == 0 assert gamma_move(board, 1, 0, 4) == 0 assert gamma_busy_fields(board, 1) == 1 assert gamma_move(board, 2, 1, 1) == 0 assert gamma_move(board, 3, 1, 0) == 1 assert gamma_move(board, 1, 1, 3) == 0 assert gamma_move(board, 2, 2, 2) == 1 assert gamma_move(board, 3, 1, 0) == 0 assert gamma_move(board, 3, 4, 0) == 0 assert gamma_move(board, 4, 3, 4) == 0 assert gamma_move(board, 4, 0, 1) == 0 assert gamma_move(board, 1, 2, 4) == 0 assert gamma_move(board, 1, 4, 3) == 0 board162686102 = gamma_board(board) assert board162686102 is not None assert board162686102 == ("..4..\n" "..2..\n" ".32..\n" ".31..\n") del board162686102 board162686102 = None assert gamma_move(board, 2, 1, 3) == 0 assert gamma_move(board, 2, 0, 3) == 0 assert gamma_free_fields(board, 2) == 3 assert gamma_move(board, 3, 1, 1) == 0 assert gamma_move(board, 4, 3, 1) == 0 assert gamma_move(board, 1, 3, 4) == 0 assert gamma_move(board, 2, 1, 4) == 0 assert gamma_move(board, 2, 3, 0) == 0 assert gamma_busy_fields(board, 2) == 2 assert gamma_move(board, 3, 2, 0) == 0 assert gamma_move(board, 3, 1, 1) == 0 assert gamma_move(board, 4, 0, 0) == 0 assert gamma_move(board, 1, 0, 3) == 0 assert gamma_move(board, 1, 1, 0) == 0 assert gamma_move(board, 2, 2, 1) == 0 assert gamma_move(board, 2, 4, 0) == 0 assert gamma_move(board, 3, 0, 0) == 1 assert gamma_move(board, 4, 2, 3) == 0 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 3, 1, 0) == 0 assert gamma_golden_possible(board, 3) == 1 assert gamma_move(board, 4, 4, 0) == 0 assert gamma_move(board, 1, 0, 2) == 0 assert gamma_move(board, 1, 3, 0) == 1 assert gamma_move(board, 2, 3, 0) == 0 assert gamma_move(board, 3, 3, 3) == 0 assert gamma_move(board, 3, 4, 1) == 0 assert gamma_golden_possible(board, 3) == 1 assert gamma_move(board, 4, 3, 1) == 0 assert gamma_move(board, 4, 3, 2) == 0 assert gamma_move(board, 1, 3, 1) == 1 assert gamma_move(board, 3, 2, 1) == 0 assert gamma_move(board, 4, 3, 0) == 0 gamma_delete(board)
""" random actions, total chaos """ board = gamma_new(5, 4, 4, 1)
0001_initial.py
# Generated by Django 3.2.12 on 2022-03-04 13:16 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import tasks.models class
(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Task', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('description', models.TextField()), ('completed', models.BooleanField(default=False)), ('created_date', models.DateTimeField(auto_now_add=True)), ('deleted', models.BooleanField(default=False)), ('priority', models.IntegerField(default=0)), ('status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)), ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='ReportSchedule', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('report_at', models.TimeField(default=tasks.models.default_start_time)), ('last_run_at', models.DateTimeField(default=tasks.models.default_last_runtime)), ('email', models.EmailField(max_length=254)), ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='History', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('old_status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)), ('new_status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)), ('updated_at', models.DateTimeField(auto_now=True)), ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasks.task')), ], ), ]
Migration
__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import (division, print_function, absolute_import, unicode_literals) from .sampler import * from .mh import * from .ensemble import * from .ptsampler import * from . import utils from . import autocorr __version__ = "2.1.0" def test():
from inspect import getmembers, ismethod from .tests import Tests print("Starting tests...") failures = 0 tests = Tests() for o in getmembers(tests): tests.setUp() if ismethod(o[1]) and o[0].startswith("test"): print("{0} ...".format(o[0])) try: o[1]() except Exception as e: print("Failed with:\n {0.__class__.__name__}: {0}" .format(e)) failures += 1 else: print(" Passed.") print("{0} tests failed".format(failures))
webinar-created.js
const zoomAdmin = { type: "app", app: "zoom_admin", }; module.exports = { name: "Webinar Created", version: "0.0.1", dedupe: "unique", // Dedupe based on webinar ID props: { zoomAdmin, zoomApphook: { type: "$.interface.apphook", appProp: "zoomAdmin", eventNames: ["webinar.created"], }, }, async run(event) { const { payload } = event; const { object } = payload; this.$emit( { event: "webinar.created", payload }, { summary: `Webinar ${object.topic} created`, id: object.uuid, ts: +new Date(object.start_time), } ); },
};
action.model.ts
export interface Action { name?: string; icon?: string; } export class Power implements Action{ name?: string;
export class Height implements Action{ name?: string; icon?: string; height: number; } export class Open implements Action{ name?: string; icon?: string; open: boolean; } export class Lock implements Action{ name?: string; icon?: string; locked: boolean; }
icon?: string; power: boolean; }
gitlab.go
/** * Copyright (C) 2015 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gitlab import ( "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "net/url" "path" "github.com/RangelReale/osincli" "github.com/golang/glog" authapi "github.com/openshift/origin/pkg/oauthserver/api" "github.com/openshift/origin/pkg/oauthserver/oauth/external" ) const ( // Uses the GitLab User-API (http://doc.gitlab.com/ce/api/users.html#current-user) // and OAuth-Provider (http://doc.gitlab.com/ce/integration/oauth_provider.html) // with default OAuth scope (http://doc.gitlab.com/ce/api/users.html#current-user) // Requires GitLab 7.7.0 or higher gitlabAuthorizePath = "/oauth/authorize" gitlabTokenPath = "/oauth/token" gitlabUserAPIPath = "/api/v3/user" gitlabOAuthScope = "api" ) type provider struct { providerName string transport http.RoundTripper authorizeURL string tokenURL string userAPIURL string clientID string clientSecret string } type gitlabUser struct { ID uint64 Username string Email string Name string } func NewProvider(providerName string, transport http.RoundTripper, URL, clientID, clientSecret string) (external.Provider, error) { // Create service URLs u, err := url.Parse(URL) if err != nil { return nil, errors.New("Host URL is invalid") } return &provider{ providerName: providerName, transport: transport, authorizeURL: appendPath(*u, gitlabAuthorizePath), tokenURL: appendPath(*u, gitlabTokenPath), userAPIURL: appendPath(*u, gitlabUserAPIPath), clientID: clientID, clientSecret: clientSecret, }, nil } func appendPath(u url.URL, subpath string) string
func (p *provider) GetTransport() (http.RoundTripper, error) { return p.transport, nil } // NewConfig implements external/interfaces/Provider.NewConfig func (p *provider) NewConfig() (*osincli.ClientConfig, error) { config := &osincli.ClientConfig{ ClientId: p.clientID, ClientSecret: p.clientSecret, ErrorsInStatusCode: true, SendClientSecretInParams: true, AuthorizeUrl: p.authorizeURL, TokenUrl: p.tokenURL, Scope: gitlabOAuthScope, } return config, nil } // AddCustomParameters implements external/interfaces/Provider.AddCustomParameters func (p *provider) AddCustomParameters(req *osincli.AuthorizeRequest) { } // GetUserIdentity implements external/interfaces/Provider.GetUserIdentity func (p *provider) GetUserIdentity(data *osincli.AccessData) (authapi.UserIdentityInfo, bool, error) { req, _ := http.NewRequest("GET", p.userAPIURL, nil) req.Header.Set("Authorization", fmt.Sprintf("bearer %s", data.AccessToken)) client := http.DefaultClient if p.transport != nil { client = &http.Client{Transport: p.transport} } res, err := client.Do(req) if err != nil { return nil, false, err } defer res.Body.Close() body, err := ioutil.ReadAll(res.Body) if err != nil { return nil, false, err } userdata := gitlabUser{} err = json.Unmarshal(body, &userdata) if err != nil { return nil, false, err } if userdata.ID == 0 { return nil, false, errors.New("Could not retrieve GitLab id") } identity := authapi.NewDefaultUserIdentityInfo(p.providerName, fmt.Sprintf("%d", userdata.ID)) if len(userdata.Name) > 0 { identity.Extra[authapi.IdentityDisplayNameKey] = userdata.Name } if len(userdata.Username) > 0 { identity.Extra[authapi.IdentityPreferredUsernameKey] = userdata.Username } if len(userdata.Email) > 0 { identity.Extra[authapi.IdentityEmailKey] = userdata.Email } glog.V(4).Infof("Got identity=%#v", identity) return identity, true, nil }
{ u.Path = path.Join(u.Path, subpath) return u.String() }
base.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package testsuites import ( "context" "flag" "fmt" "regexp" "strings" "time" "github.com/onsi/ginkgo" v1 "k8s.io/api/core/v1" storagev1 "k8s.io/api/storage/v1" apierrs "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" utilerrors "k8s.io/apimachinery/pkg/util/errors" "k8s.io/apimachinery/pkg/util/sets" clientset "k8s.io/client-go/kubernetes" csilib "k8s.io/csi-translation-lib" "k8s.io/kubernetes/test/e2e/framework" e2elog "k8s.io/kubernetes/test/e2e/framework/log" "k8s.io/kubernetes/test/e2e/framework/metrics" "k8s.io/kubernetes/test/e2e/framework/podlogs" "k8s.io/kubernetes/test/e2e/framework/volume" "k8s.io/kubernetes/test/e2e/storage/testpatterns" ) var ( migratedPlugins *string ) func init() { migratedPlugins = flag.String("storage.migratedPlugins", "", "comma separated list of in-tree plugin names of form 'kubernetes.io/{pluginName}' migrated to CSI") } type opCounts map[string]int64 // TestSuite represents an interface for a set of tests which works with TestDriver type TestSuite interface { // getTestSuiteInfo returns the TestSuiteInfo for this TestSuite getTestSuiteInfo() TestSuiteInfo // defineTest defines tests of the testpattern for the driver. // Called inside a Ginkgo context that reflects the current driver and test pattern, // so the test suite can define tests directly with ginkgo.It. defineTests(TestDriver, testpatterns.TestPattern) } // TestSuiteInfo represents a set of parameters for TestSuite type TestSuiteInfo struct { name string // name of the TestSuite featureTag string // featureTag for the TestSuite testPatterns []testpatterns.TestPattern // Slice of TestPattern for the TestSuite } // TestResource represents an interface for resources that is used by TestSuite type TestResource interface { // cleanupResource cleans up the test resources created when setting up the resource cleanupResource() } func getTestNameStr(suite TestSuite, pattern testpatterns.TestPattern) string
// DefineTestSuite defines tests for all testpatterns and all testSuites for a driver func DefineTestSuite(driver TestDriver, tsInits []func() TestSuite) { for _, testSuiteInit := range tsInits { suite := testSuiteInit() for _, pattern := range suite.getTestSuiteInfo().testPatterns { p := pattern ginkgo.Context(getTestNameStr(suite, p), func() { ginkgo.BeforeEach(func() { // Skip unsupported tests to avoid unnecessary resource initialization skipUnsupportedTest(driver, p) }) suite.defineTests(driver, p) }) } } } // skipUnsupportedTest will skip tests if the combination of driver, and testpattern // is not suitable to be tested. // Whether it needs to be skipped is checked by following steps: // 1. Check if Whether SnapshotType is supported by driver from its interface // 2. Check if Whether volType is supported by driver from its interface // 3. Check if fsType is supported // 4. Check with driver specific logic // // Test suites can also skip tests inside their own defineTests function or in // individual tests. func skipUnsupportedTest(driver TestDriver, pattern testpatterns.TestPattern) { dInfo := driver.GetDriverInfo() var isSupported bool // 1. Check if Whether SnapshotType is supported by driver from its interface // if isSupported, we still execute the driver and suite tests if len(pattern.SnapshotType) > 0 { switch pattern.SnapshotType { case testpatterns.DynamicCreatedSnapshot: _, isSupported = driver.(SnapshottableTestDriver) default: isSupported = false } if !isSupported { framework.Skipf("Driver %s doesn't support snapshot type %v -- skipping", dInfo.Name, pattern.SnapshotType) } } else { // 2. Check if Whether volType is supported by driver from its interface switch pattern.VolType { case testpatterns.InlineVolume: _, isSupported = driver.(InlineVolumeTestDriver) case testpatterns.PreprovisionedPV: _, isSupported = driver.(PreprovisionedPVTestDriver) case testpatterns.DynamicPV: _, isSupported = driver.(DynamicPVTestDriver) default: isSupported = false } if !isSupported { framework.Skipf("Driver %s doesn't support %v -- skipping", dInfo.Name, pattern.VolType) } // 3. Check if fsType is supported if !dInfo.SupportedFsType.Has(pattern.FsType) { framework.Skipf("Driver %s doesn't support %v -- skipping", dInfo.Name, pattern.FsType) } if pattern.FsType == "xfs" && framework.NodeOSDistroIs("gci") { framework.Skipf("Distro doesn't support xfs -- skipping") } if pattern.FsType == "ntfs" && !framework.NodeOSDistroIs("windows") { framework.Skipf("Distro %s doesn't support ntfs -- skipping", framework.TestContext.NodeOSDistro) } } // 4. Check with driver specific logic driver.SkipUnsupportedTest(pattern) } // genericVolumeTestResource is a generic implementation of TestResource that wil be able to // be used in most of TestSuites. // See volume_io.go or volumes.go in test/e2e/storage/testsuites/ for how to use this resource. // Also, see subpath.go in the same directory for how to extend and use it. type genericVolumeTestResource struct { driver TestDriver config *PerTestConfig pattern testpatterns.TestPattern volType string volSource *v1.VolumeSource pvc *v1.PersistentVolumeClaim pv *v1.PersistentVolume sc *storagev1.StorageClass volume TestVolume } var _ TestResource = &genericVolumeTestResource{} func createGenericVolumeTestResource(driver TestDriver, config *PerTestConfig, pattern testpatterns.TestPattern) *genericVolumeTestResource { r := genericVolumeTestResource{ driver: driver, config: config, pattern: pattern, } dInfo := driver.GetDriverInfo() f := config.Framework cs := f.ClientSet fsType := pattern.FsType volType := pattern.VolType // Create volume for pre-provisioned volume tests r.volume = CreateVolume(driver, config, volType) switch volType { case testpatterns.InlineVolume: e2elog.Logf("Creating resource for inline volume") if iDriver, ok := driver.(InlineVolumeTestDriver); ok { r.volSource = iDriver.GetVolumeSource(false, fsType, r.volume) r.volType = dInfo.Name } case testpatterns.PreprovisionedPV: e2elog.Logf("Creating resource for pre-provisioned PV") if pDriver, ok := driver.(PreprovisionedPVTestDriver); ok { pvSource, volumeNodeAffinity := pDriver.GetPersistentVolumeSource(false, fsType, r.volume) if pvSource != nil { r.volSource, r.pv, r.pvc = createVolumeSourceWithPVCPV(f, dInfo.Name, pvSource, volumeNodeAffinity, false, pattern.VolMode) } r.volType = fmt.Sprintf("%s-preprovisionedPV", dInfo.Name) } case testpatterns.DynamicPV: e2elog.Logf("Creating resource for dynamic PV") if dDriver, ok := driver.(DynamicPVTestDriver); ok { claimSize := dDriver.GetClaimSize() r.sc = dDriver.GetDynamicProvisionStorageClass(r.config, fsType) ginkgo.By("creating a StorageClass " + r.sc.Name) var err error r.sc, err = cs.StorageV1().StorageClasses().Create(r.sc) framework.ExpectNoError(err) if r.sc != nil { r.volSource, r.pv, r.pvc = createVolumeSourceWithPVCPVFromDynamicProvisionSC( f, dInfo.Name, claimSize, r.sc, false, pattern.VolMode) } r.volType = fmt.Sprintf("%s-dynamicPV", dInfo.Name) } default: framework.Failf("genericVolumeTestResource doesn't support: %s", volType) } if r.volSource == nil { framework.Skipf("Driver %s doesn't support %v -- skipping", dInfo.Name, volType) } return &r } // cleanupResource cleans up genericVolumeTestResource func (r *genericVolumeTestResource) cleanupResource() { f := r.config.Framework volType := r.pattern.VolType if r.pvc != nil || r.pv != nil { switch volType { case testpatterns.PreprovisionedPV: ginkgo.By("Deleting pv and pvc") if errs := framework.PVPVCCleanup(f.ClientSet, f.Namespace.Name, r.pv, r.pvc); len(errs) != 0 { framework.Failf("Failed to delete PVC or PV: %v", utilerrors.NewAggregate(errs)) } case testpatterns.DynamicPV: ginkgo.By("Deleting pvc") // We only delete the PVC so that PV (and disk) can be cleaned up by dynamic provisioner if r.pv != nil && r.pv.Spec.PersistentVolumeReclaimPolicy != v1.PersistentVolumeReclaimDelete { framework.Failf("Test framework does not currently support Dynamically Provisioned Persistent Volume %v specified with reclaim policy that isnt %v", r.pv.Name, v1.PersistentVolumeReclaimDelete) } if r.pvc != nil { err := framework.DeletePersistentVolumeClaim(f.ClientSet, r.pvc.Name, f.Namespace.Name) framework.ExpectNoError(err, "Failed to delete PVC %v", r.pvc.Name) if r.pv != nil { err = framework.WaitForPersistentVolumeDeleted(f.ClientSet, r.pv.Name, 5*time.Second, 5*time.Minute) framework.ExpectNoError(err, "Persistent Volume %v not deleted by dynamic provisioner", r.pv.Name) } } default: framework.Failf("Found PVC (%v) or PV (%v) but not running Preprovisioned or Dynamic test pattern", r.pvc, r.pv) } } if r.sc != nil { ginkgo.By("Deleting sc") deleteStorageClass(f.ClientSet, r.sc.Name) } // Cleanup volume for pre-provisioned volume tests if r.volume != nil { r.volume.DeleteVolume() } } func createVolumeSourceWithPVCPV( f *framework.Framework, name string, pvSource *v1.PersistentVolumeSource, volumeNodeAffinity *v1.VolumeNodeAffinity, readOnly bool, volMode v1.PersistentVolumeMode, ) (*v1.VolumeSource, *v1.PersistentVolume, *v1.PersistentVolumeClaim) { pvConfig := framework.PersistentVolumeConfig{ NamePrefix: fmt.Sprintf("%s-", name), StorageClassName: f.Namespace.Name, PVSource: *pvSource, NodeAffinity: volumeNodeAffinity, } pvcConfig := framework.PersistentVolumeClaimConfig{ StorageClassName: &f.Namespace.Name, } if volMode != "" { pvConfig.VolumeMode = &volMode pvcConfig.VolumeMode = &volMode } e2elog.Logf("Creating PVC and PV") pv, pvc, err := framework.CreatePVCPV(f.ClientSet, pvConfig, pvcConfig, f.Namespace.Name, false) framework.ExpectNoError(err, "PVC, PV creation failed") err = framework.WaitOnPVandPVC(f.ClientSet, f.Namespace.Name, pv, pvc) framework.ExpectNoError(err, "PVC, PV failed to bind") volSource := &v1.VolumeSource{ PersistentVolumeClaim: &v1.PersistentVolumeClaimVolumeSource{ ClaimName: pvc.Name, ReadOnly: readOnly, }, } return volSource, pv, pvc } func createVolumeSourceWithPVCPVFromDynamicProvisionSC( f *framework.Framework, name string, claimSize string, sc *storagev1.StorageClass, readOnly bool, volMode v1.PersistentVolumeMode, ) (*v1.VolumeSource, *v1.PersistentVolume, *v1.PersistentVolumeClaim) { cs := f.ClientSet ns := f.Namespace.Name ginkgo.By("creating a claim") pvc := getClaim(claimSize, ns) pvc.Spec.StorageClassName = &sc.Name if volMode != "" { pvc.Spec.VolumeMode = &volMode } var err error pvc, err = cs.CoreV1().PersistentVolumeClaims(ns).Create(pvc) framework.ExpectNoError(err) if !isDelayedBinding(sc) { err = framework.WaitForPersistentVolumeClaimPhase(v1.ClaimBound, cs, pvc.Namespace, pvc.Name, framework.Poll, framework.ClaimProvisionTimeout) framework.ExpectNoError(err) } pvc, err = cs.CoreV1().PersistentVolumeClaims(pvc.Namespace).Get(pvc.Name, metav1.GetOptions{}) framework.ExpectNoError(err) var pv *v1.PersistentVolume if !isDelayedBinding(sc) { pv, err = cs.CoreV1().PersistentVolumes().Get(pvc.Spec.VolumeName, metav1.GetOptions{}) framework.ExpectNoError(err) } volSource := &v1.VolumeSource{ PersistentVolumeClaim: &v1.PersistentVolumeClaimVolumeSource{ ClaimName: pvc.Name, ReadOnly: readOnly, }, } return volSource, pv, pvc } func isDelayedBinding(sc *storagev1.StorageClass) bool { if sc.VolumeBindingMode != nil { return *sc.VolumeBindingMode == storagev1.VolumeBindingWaitForFirstConsumer } return false } func getClaim(claimSize string, ns string) *v1.PersistentVolumeClaim { claim := v1.PersistentVolumeClaim{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "pvc-", Namespace: ns, }, Spec: v1.PersistentVolumeClaimSpec{ AccessModes: []v1.PersistentVolumeAccessMode{ v1.ReadWriteOnce, }, Resources: v1.ResourceRequirements{ Requests: v1.ResourceList{ v1.ResourceName(v1.ResourceStorage): resource.MustParse(claimSize), }, }, }, } return &claim } // deleteStorageClass deletes the passed in StorageClass and catches errors other than "Not Found" func deleteStorageClass(cs clientset.Interface, className string) { err := cs.StorageV1().StorageClasses().Delete(className, nil) if err != nil && !apierrs.IsNotFound(err) { framework.ExpectNoError(err) } } // convertTestConfig returns a framework test config with the // parameters specified for the testsuite or (if available) the // dynamically created config for the volume server. // // This is done because TestConfig is the public API for // the testsuites package whereas volume.TestConfig is merely // an implementation detail. It contains fields that have no effect, // which makes it unsuitable for use in the testsuits public API. func convertTestConfig(in *PerTestConfig) volume.TestConfig { if in.ServerConfig != nil { return *in.ServerConfig } return volume.TestConfig{ Namespace: in.Framework.Namespace.Name, Prefix: in.Prefix, ClientNodeName: in.ClientNodeName, NodeSelector: in.ClientNodeSelector, } } func getSnapshot(claimName string, ns, snapshotClassName string) *unstructured.Unstructured { snapshot := &unstructured.Unstructured{ Object: map[string]interface{}{ "kind": "VolumeSnapshot", "apiVersion": snapshotAPIVersion, "metadata": map[string]interface{}{ "generateName": "snapshot-", "namespace": ns, }, "spec": map[string]interface{}{ "snapshotClassName": snapshotClassName, "source": map[string]interface{}{ "name": claimName, "kind": "PersistentVolumeClaim", }, }, }, } return snapshot } // StartPodLogs begins capturing log output and events from current // and future pods running in the namespace of the framework. That // ends when the returned cleanup function is called. // // The output goes to log files (when using --report-dir, as in the // CI) or the output stream (otherwise). func StartPodLogs(f *framework.Framework) func() { ctx, cancel := context.WithCancel(context.Background()) cs := f.ClientSet ns := f.Namespace to := podlogs.LogOutput{ StatusWriter: ginkgo.GinkgoWriter, } if framework.TestContext.ReportDir == "" { to.LogWriter = ginkgo.GinkgoWriter } else { test := ginkgo.CurrentGinkgoTestDescription() reg := regexp.MustCompile("[^a-zA-Z0-9_-]+") // We end the prefix with a slash to ensure that all logs // end up in a directory named after the current test. // // TODO: use a deeper directory hierarchy once gubernator // supports that (https://github.com/kubernetes/test-infra/issues/10289). to.LogPathPrefix = framework.TestContext.ReportDir + "/" + reg.ReplaceAllString(test.FullTestText, "_") + "/" } podlogs.CopyAllLogs(ctx, cs, ns.Name, to) // pod events are something that the framework already collects itself // after a failed test. Logging them live is only useful for interactive // debugging, not when we collect reports. if framework.TestContext.ReportDir == "" { podlogs.WatchPods(ctx, cs, ns.Name, ginkgo.GinkgoWriter) } return cancel } func getVolumeOpsFromMetricsForPlugin(ms metrics.Metrics, pluginName string) opCounts { totOps := opCounts{} for method, samples := range ms { switch method { case "storage_operation_status_count": for _, sample := range samples { plugin := string(sample.Metric["volume_plugin"]) if pluginName != plugin { continue } opName := string(sample.Metric["operation_name"]) if opName == "verify_controller_attached_volume" { // We ignore verify_controller_attached_volume because it does not call into // the plugin. It only watches Node API and updates Actual State of World cache continue } totOps[opName] = totOps[opName] + int64(sample.Value) } } } return totOps } func getVolumeOpCounts(c clientset.Interface, pluginName string) opCounts { nodeLimit := 25 metricsGrabber, err := metrics.NewMetricsGrabber(c, nil, true, false, true, false, false) if err != nil { framework.Failf("Error creating metrics grabber : %v", err) } if !metricsGrabber.HasRegisteredMaster() { framework.Skipf("Environment does not support getting controller-manager metrics - skipping") } controllerMetrics, err := metricsGrabber.GrabFromControllerManager() framework.ExpectNoError(err, "Error getting c-m metrics : %v", err) totOps := getVolumeOpsFromMetricsForPlugin(metrics.Metrics(controllerMetrics), pluginName) e2elog.Logf("Node name not specified for getVolumeOpCounts, falling back to listing nodes from API Server") nodes, err := c.CoreV1().Nodes().List(metav1.ListOptions{}) framework.ExpectNoError(err, "Error listing nodes: %v", err) if len(nodes.Items) <= nodeLimit { // For large clusters with > nodeLimit nodes it is too time consuming to // gather metrics from all nodes. We just ignore the node metrics // for those clusters for _, node := range nodes.Items { nodeMetrics, err := metricsGrabber.GrabFromKubelet(node.GetName()) framework.ExpectNoError(err, "Error getting Kubelet %v metrics: %v", node.GetName(), err) totOps = addOpCounts(totOps, getVolumeOpsFromMetricsForPlugin(metrics.Metrics(nodeMetrics), pluginName)) } } else { e2elog.Logf("Skipping operation metrics gathering from nodes in getVolumeOpCounts, greater than %v nodes", nodeLimit) } return totOps } func addOpCounts(o1 opCounts, o2 opCounts) opCounts { totOps := opCounts{} seen := sets.NewString() for op, count := range o1 { seen.Insert(op) totOps[op] = totOps[op] + count + o2[op] } for op, count := range o2 { if !seen.Has(op) { totOps[op] = totOps[op] + count } } return totOps } func getMigrationVolumeOpCounts(cs clientset.Interface, pluginName string) (opCounts, opCounts) { if len(pluginName) > 0 { var migratedOps opCounts csiName, err := csilib.GetCSINameFromInTreeName(pluginName) if err != nil { e2elog.Logf("Could not find CSI Name for in-tree plugin %v", pluginName) migratedOps = opCounts{} } else { csiName = "kubernetes.io/csi:" + csiName migratedOps = getVolumeOpCounts(cs, csiName) } return getVolumeOpCounts(cs, pluginName), migratedOps } else { // Not an in-tree driver e2elog.Logf("Test running for native CSI Driver, not checking metrics") return opCounts{}, opCounts{} } } func getTotOps(ops opCounts) int64 { var tot int64 = 0 for _, count := range ops { tot += count } return tot } func validateMigrationVolumeOpCounts(cs clientset.Interface, pluginName string, oldInTreeOps, oldMigratedOps opCounts) { if len(pluginName) == 0 { // This is a native CSI Driver and we don't check ops return } if sets.NewString(strings.Split(*migratedPlugins, ",")...).Has(pluginName) { // If this plugin is migrated based on the test flag storage.migratedPlugins newInTreeOps, _ := getMigrationVolumeOpCounts(cs, pluginName) for op, count := range newInTreeOps { if count != oldInTreeOps[op] { framework.Failf("In-tree plugin %v migrated to CSI Driver, however found %v %v metrics for in-tree plugin", pluginName, count-oldInTreeOps[op], op) } } // We don't check for migrated metrics because some negative test cases // may not do any volume operations and therefore not emit any metrics } else { // In-tree plugin is not migrated e2elog.Logf("In-tree plugin %v is not migrated, not validating any metrics", pluginName) // We don't check in-tree plugin metrics because some negative test // cases may not do any volume operations and therefore not emit any // metrics // We don't check counts for the Migrated version of the driver because // if tests are running in parallel a test could be using the CSI Driver // natively and increase the metrics count // TODO(dyzz): Add a dimension to OperationGenerator metrics for // "migrated"->true/false so that we can disambiguate migrated metrics // and native CSI Driver metrics. This way we can check the counts for // migrated version of the driver for stronger negative test case // guarantees (as well as more informative metrics). } }
{ tsInfo := suite.getTestSuiteInfo() return fmt.Sprintf("[Testpattern: %s]%s %s%s", pattern.Name, pattern.FeatureTag, tsInfo.name, tsInfo.featureTag) }
SmartFactoryJSONRPC.py
"""SmartFactory code generator for JSONRPC format. Defines JSONRPC format specific code generation rules. """ import string from generator.generators import SmartFactoryBase from model.enum_element import EnumElement class CodeGenerator(SmartFactoryBase.CodeGenerator): """JSONRPC SmartFactory generator. Defines special cases that affects base code generation to make JSONRPC format-friendly code. """ def __init__(self): """Construct new object.""" SmartFactoryBase.CodeGenerator.__init__(self) def _gen_pre_function_schemas(self, functions): """Generate specific code that goes before schema initialization. JSON RPC generator generates code that adds specific schema for the error_response and adds this schema for every available response. Keyword arguments: functions -- list of functions to generate code for. Returns: Source code with error_response schema initialization and adding to the base SmartFactory.. """ code = u"" for function in functions: if function.message_type.primary_name == u"response": code = u"".join( [code, self._error_response_insert_template.substitute( function_id=function.function_id.primary_name)]) if code: return self._indent_code( u"".join([self._error_response_schema_template, code]), 1) return u"" def
(self, message_type): """Preprocess message_type enum. JSON RPC generator needs to add new message_type "error_response" in case if at least one response available. Keyword arguments: message_type -- message_type enum to preprocess. Returns: Preprocessed message_type enum. """ if "response" in message_type.elements: message_type.elements[u"error_response"] = EnumElement( name=u"error_response") return message_type def _gen_schema_params_fill(self, message_type_name): """Generate schema params fill code. Provides constant set of params for the function in accordance to the JSONRPC format. Keyword arguments: message_type_name -- Name of the messageType enum element. Returns: String with function schema params fill code. """ return u"".join( [self._base_params, self._correlation_id_param if message_type_name != u"notification" else u"", self._additional_response_params if message_type_name == u"response" else u""]) _error_response_insert_template = string.Template( u'''functions_schemes_.insert(std::make_pair(''' u'''ns_smart_device_link::ns_json_handler::''' u'''SmartSchemaKey<FunctionID::eType, messageType::eType>(''' u'''FunctionID::${function_id}, messageType::error_response), ''' u'''error_response_schema));\n''') _error_response_schema_template = ( u'''Members ''' u'''params_members;\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_FUNCTION_ID] = SMember(''' u'''TEnumSchemaItem<FunctionID::eType>::create(''' u'''function_id_items), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_MESSAGE_TYPE] = SMember(''' u'''TEnumSchemaItem<messageType::eType>::create(''' u'''message_type_items), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_PROTOCOL_VERSION] = SMember(''' u'''TNumberSchemaItem<int>::create(), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_PROTOCOL_TYPE] = SMember(''' u'''TNumberSchemaItem<int>::create(), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_CORRELATION_ID] = SMember(''' u'''TNumberSchemaItem<int>::create(), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::kCode] = SMember(''' u'''TNumberSchemaItem<int>::create(), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::kMessage] = SMember(''' u'''CStringSchemaItem::create(), true);\n''' u'''\n''' u'''Members root_members_map;\n''' u'''root_members_map[ns_smart_device_link::ns_json_handler::''' u'''strings::S_PARAMS] = SMember(''' u'''CObjectSchemaItem::create(params_members), true);\n''' u'''\n''' u'''CSmartSchema error_response_schema(''' u'''CObjectSchemaItem::create(root_members_map));\n''' u'''\n''') _base_params = ( u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_FUNCTION_ID] = SMember(TEnumSchemaItem<FunctionID::eType>::''' u'''create(function_id_items), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_MESSAGE_TYPE] = SMember(TEnumSchemaItem<messageType::eType>::''' u'''create(message_type_items), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_PROTOCOL_VERSION] = SMember(TNumberSchemaItem<int>::create(), true);\n''' u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_PROTOCOL_TYPE] = SMember(TNumberSchemaItem<int>::create(), true);\n''' ) _correlation_id_param = ( u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::S_CORRELATION_ID] = SMember(TNumberSchemaItem<int>::create(), true);\n''' ) _additional_response_params = ( u'''params_members[ns_smart_device_link::ns_json_handler::''' u'''strings::kCode] = SMember(TNumberSchemaItem<int>::create(), true);\n''' )
_preprocess_message_type
results.py
from __future__ import unicode_literals try: from collections import OrderedDict except ImportError: from django.utils.datastructures import SortedDict as OrderedDict from tablib import Dataset class Error(object): def __init__(self, error, traceback=None, row=None): self.error = error self.traceback = traceback self.row = row class RowResult(object): IMPORT_TYPE_UPDATE = 'update' IMPORT_TYPE_NEW = 'new' IMPORT_TYPE_DELETE = 'delete' IMPORT_TYPE_SKIP = 'skip' IMPORT_TYPE_ERROR = 'error' def __init__(self): self.errors = [] self.diff = None self.import_type = None class Result(object): def __init__(self, *args, **kwargs): super(Result, self).__init__() self.base_errors = [] self.diff_headers = [] self.rows = [] # RowResults self.failed_dataset = Dataset() self.totals = OrderedDict([(RowResult.IMPORT_TYPE_NEW, 0), (RowResult.IMPORT_TYPE_UPDATE, 0), (RowResult.IMPORT_TYPE_DELETE, 0), (RowResult.IMPORT_TYPE_SKIP, 0), (RowResult.IMPORT_TYPE_ERROR, 0)]) self.total_rows = 0 def append_row_result(self, row_result): self.rows.append(row_result) def append_base_error(self, error): self.base_errors.append(error) def
(self, headers): self.failed_dataset.headers = headers + ["Error"] def append_failed_row(self, row, error): row_values = [v for (k, v) in row.items()] row_values.append(error.error.message) self.failed_dataset.append(row_values) def increment_row_result_total(self, row_result): if row_result.import_type: self.totals[row_result.import_type] += 1 def row_errors(self): return [(i + 1, row.errors) for i, row in enumerate(self.rows) if row.errors] def has_errors(self): return bool(self.base_errors or self.row_errors()) def __iter__(self): return iter(self.rows)
add_dataset_headers
structs_field.go
// Copyright GoFrame Author(https://goframe.org). All Rights Reserved. // // This Source Code Form is subject to the terms of the MIT License. // If a copy of the MIT was not distributed with this file, // You can obtain one at https://github.com/gogf/gf. package structs import "reflect" // Tag returns the value associated with key in the tag string. If there is no // such key in the tag, Tag returns the empty string. func (f *Field) Tag(key string) string { return f.Field.Tag.Get(key) } // TagLookup returns the value associated with key in the tag string. // If the key is present in the tag the value (which may be empty) // is returned. Otherwise, the returned value will be the empty string. // The ok return value reports whether the value was explicitly set in // the tag string. If the tag does not have the conventional format, // the value returned by Lookup is unspecified. func (f *Field) TagLookup(key string) (value string, ok bool) { return f.Field.Tag.Lookup(key) } // IsEmbedded returns true if the given field is an anonymous field (embedded) func (f *Field) IsEmbedded() bool { return f.Field.Anonymous } // TagStr returns the tag string of the field. func (f *Field) TagStr() string { return string(f.Field.Tag) } // IsExported returns true if the given field is exported. func (f *Field) IsExported() bool { return f.Field.PkgPath == "" } // Name returns the name of the given field func (f *Field) Name() string { return f.Field.Name } // Type returns the type of the given field func (f *Field) Type() Type { return Type{ Type: f.Field.Type, } } // Kind returns the reflect.Kind for Value of Field `f`. func (f *Field) Kind() reflect.Kind { return f.Value.Kind() } // OriginalKind retrieves and returns the original reflect.Kind for Value of Field `f`. func (f *Field) OriginalKind() reflect.Kind { var ( kind = f.Value.Kind() value = f.Value ) for kind == reflect.Ptr { value = value.Elem() kind = value.Kind() } return kind } const ( RecursiveOptionNone = 0 // No recursively retrieving fields as map if the field is an embedded struct. RecursiveOptionEmbedded = 1 // Recursively retrieving fields as map if the field is an embedded struct. RecursiveOptionEmbeddedNoTag = 2 // Recursively retrieving fields as map if the field is an embedded struct and the field has no tag. ) type FieldMapInput struct { // Pointer should be type of struct/*struct. Pointer interface{} // PriorityTagArray specifies the priority tag array for retrieving from high to low. // If it's given `nil`, it returns map[name]*Field, of which the `name` is attribute name. PriorityTagArray []string // RecursiveOption specifies the way retrieving the fields recursively if the attribute // is an embedded struct. It is RecursiveOptionNone in default. RecursiveOption int } // FieldMap retrieves and returns struct field as map[name/tag]*Field from `pointer`. // // The parameter `pointer` should be type of struct/*struct. // // The parameter `priority` specifies the priority tag array for retrieving from high to low. // If it's given `nil`, it returns map[name]*Field, of which the `name` is attribute name. // // The parameter `recursive` specifies the whether retrieving the fields recursively if the attribute // is an embedded struct. // // Note that it only retrieves the exported attributes with first letter up-case from struct. func
(input FieldMapInput) (map[string]*Field, error) { fields, err := getFieldValues(input.Pointer) if err != nil { return nil, err } var ( tagValue = "" mapField = make(map[string]*Field) ) for _, field := range fields { // Only retrieve exported attributes. if !field.IsExported() { continue } tagValue = "" for _, p := range input.PriorityTagArray { tagValue = field.Tag(p) if tagValue != "" && tagValue != "-" { break } } tempField := field tempField.TagValue = tagValue if tagValue != "" { mapField[tagValue] = tempField } else { if input.RecursiveOption != RecursiveOptionNone && field.IsEmbedded() { switch input.RecursiveOption { case RecursiveOptionEmbeddedNoTag: if field.TagStr() != "" { mapField[field.Name()] = tempField break } fallthrough case RecursiveOptionEmbedded: m, err := FieldMap(FieldMapInput{ Pointer: field.Value, PriorityTagArray: input.PriorityTagArray, RecursiveOption: input.RecursiveOption, }) if err != nil { return nil, err } for k, v := range m { if _, ok := mapField[k]; !ok { tempV := v mapField[k] = tempV } } } } else { mapField[field.Name()] = tempField } } } return mapField, nil }
FieldMap
parseGeoJson.js
/* * Licensed to the Apache Software Foundation (ASF) under one
* to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * AUTO-GENERATED FILE. DO NOT MODIFY. */ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * Parse and decode geo json */ import * as zrUtil from 'zrender/lib/core/util'; import { GeoJSONRegion } from './Region'; function decode(json) { if (!json.UTF8Encoding) { return json; } var jsonCompressed = json; var encodeScale = jsonCompressed.UTF8Scale; if (encodeScale == null) { encodeScale = 1024; } var features = jsonCompressed.features; for (var f = 0; f < features.length; f++) { var feature = features[f]; var geometry = feature.geometry; if (geometry.type === 'Polygon') { var coordinates = geometry.coordinates; for (var c = 0; c < coordinates.length; c++) { coordinates[c] = decodePolygon(coordinates[c], geometry.encodeOffsets[c], encodeScale); } } else if (geometry.type === 'MultiPolygon') { var coordinates = geometry.coordinates; for (var c = 0; c < coordinates.length; c++) { var coordinate = coordinates[c]; for (var c2 = 0; c2 < coordinate.length; c2++) { coordinate[c2] = decodePolygon(coordinate[c2], geometry.encodeOffsets[c][c2], encodeScale); } } } } // Has been decoded jsonCompressed.UTF8Encoding = false; return jsonCompressed; } function decodePolygon(coordinate, encodeOffsets, encodeScale) { var result = []; var prevX = encodeOffsets[0]; var prevY = encodeOffsets[1]; for (var i = 0; i < coordinate.length; i += 2) { var x = coordinate.charCodeAt(i) - 64; var y = coordinate.charCodeAt(i + 1) - 64; // ZigZag decoding x = x >> 1 ^ -(x & 1); y = y >> 1 ^ -(y & 1); // Delta deocding x += prevX; y += prevY; prevX = x; prevY = y; // Dequantize result.push([x / encodeScale, y / encodeScale]); } return result; } export default function parseGeoJSON(geoJson, nameProperty) { geoJson = decode(geoJson); return zrUtil.map(zrUtil.filter(geoJson.features, function (featureObj) { // Output of mapshaper may have geometry null return featureObj.geometry && featureObj.properties && featureObj.geometry.coordinates.length > 0; }), function (featureObj) { var properties = featureObj.properties; var geo = featureObj.geometry; var geometries = []; if (geo.type === 'Polygon') { var coordinates = geo.coordinates; geometries.push({ type: 'polygon', // According to the GeoJSON specification. // First must be exterior, and the rest are all interior(holes). exterior: coordinates[0], interiors: coordinates.slice(1) }); } if (geo.type === 'MultiPolygon') { var coordinates = geo.coordinates; zrUtil.each(coordinates, function (item) { if (item[0]) { geometries.push({ type: 'polygon', exterior: item[0], interiors: item.slice(1) }); } }); } var region = new GeoJSONRegion(properties[nameProperty || 'name'], geometries, properties.cp); region.properties = properties; return region; }); }
* or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file
saq.js
// GENERATED FILE
export default [{"locale":"saq","pluralRuleFunction":function (n,ord){if(ord)return"other";return n==1?"one":"other"},"fields":{"year":{"displayName":"Lari","relative":{"0":"this year","1":"next year","-1":"last year"},"relativeTime":{"future":{"other":"+{0} y"},"past":{"other":"-{0} y"}}},"month":{"displayName":"Lapa","relative":{"0":"this month","1":"next month","-1":"last month"},"relativeTime":{"future":{"other":"+{0} m"},"past":{"other":"-{0} m"}}},"day":{"displayName":"Mpari","relative":{"0":"Duo","1":"Taisere","-1":"Ng’ole"},"relativeTime":{"future":{"other":"+{0} d"},"past":{"other":"-{0} d"}}},"hour":{"displayName":"Saai","relativeTime":{"future":{"other":"+{0} h"},"past":{"other":"-{0} h"}}},"minute":{"displayName":"Idakika","relativeTime":{"future":{"other":"+{0} min"},"past":{"other":"-{0} min"}}},"second":{"displayName":"Isekondi","relative":{"0":"now"},"relativeTime":{"future":{"other":"+{0} s"},"past":{"other":"-{0} s"}}}}},{"locale":"saq-KE","parentLocale":"saq"}];
autocomplete.py
# -*- coding: utf-8 -*- """ Contains the AutoCompleteMode """ import logging from pyqode.qt import QtCore, QtGui from pyqode.core.api import TextHelper from pyqode.core.api.mode import Mode class AutoCompleteMode(Mode): """ Automatically complete quotes and parentheses Generic auto complete mode that automatically completes the following symbols: - " -> " - ' -> ' - ( -> ) - [ -> ] - { -> } """ #: Auto complete mapping, maps input key with completion text. MAPPING = {'"': '"', "'": "'", "(": ")", "{": "}", "[": "]"} #: The format to use for each symbol in mapping when there is a selection SELECTED_QUOTES_FORMATS = {key: '%s%s%s' for key in MAPPING.keys()} #: The format to use for each symbol in mapping when there is no selection QUOTES_FORMATS = {key: '%s' for key in MAPPING.keys()} def
(self): super(AutoCompleteMode, self).__init__() self.logger = logging.getLogger(__name__) self._ignore_post = False def on_state_changed(self, state): if state: self.editor.post_key_pressed.connect(self._on_post_key_pressed) self.editor.key_pressed.connect(self._on_key_pressed) else: self.editor.post_key_pressed.disconnect(self._on_post_key_pressed) self.editor.key_pressed.disconnect(self._on_key_pressed) def _on_post_key_pressed(self, event): if not event.isAccepted() and not self._ignore_post: txt = event.text() trav = self.editor.textCursor() assert isinstance(trav, QtGui.QTextCursor) trav.movePosition(trav.Left, trav.MoveAnchor, 2) literal = TextHelper(self.editor).is_comment_or_string(trav) if not literal: next_char = TextHelper(self.editor).get_right_character() if txt in self.MAPPING: to_insert = self.MAPPING[txt] if (not next_char or next_char in self.MAPPING.keys() or next_char in self.MAPPING.values() or next_char.isspace()): TextHelper(self.editor).insert_text( self.QUOTES_FORMATS[txt] % to_insert) self._ignore_post = False def _on_key_pressed(self, event): txt = event.text() cursor = self.editor.textCursor() from pyqode.qt import QtGui assert isinstance(cursor, QtGui.QTextCursor) if cursor.hasSelection(): # quoting of selected text if event.text() in self.MAPPING.keys(): first = event.text() last = self.MAPPING[event.text()] cursor.insertText( self.SELECTED_QUOTES_FORMATS[event.text()] % ( first, cursor.selectedText(), last)) self.editor.setTextCursor(cursor) event.accept() else: self._ignore_post = True return next_char = TextHelper(self.editor).get_right_character() self.logger.debug('next char: %s', next_char) ignore = False if event.key() == QtCore.Qt.Key_Backspace: # get the character that will get deleted tc = self.editor.textCursor() pos = tc.position() tc.movePosition(tc.Left) tc.movePosition(tc.Right, tc.KeepAnchor) del_char = tc.selectedText() if del_char in self.MAPPING and \ self.MAPPING[del_char] == next_char: tc.beginEditBlock() tc.movePosition(tc.Right, tc.KeepAnchor) tc.insertText('') tc.setPosition(pos - 2) tc.endEditBlock() self.editor.setTextCursor(tc) ignore = True elif txt and next_char == txt and next_char in self.MAPPING: ignore = True elif event.text() == ')' or event.text() == ']' or event.text() == '}': # if typing the same symbol twice, the symbol should not be written # and the cursor moved just after the char # e.g. if you type ) just before ), the cursor will just move after # the existing ) if next_char == event.text(): ignore = True if ignore: event.accept() TextHelper(self.editor).clear_selection() TextHelper(self.editor).move_right()
__init__
lib.rs
use proc_macro::{Group, Span, TokenStream, TokenTree};
#[proc_macro] pub fn reedition(t: TokenStream) -> TokenStream { let mut t = t.into_iter(); let dummy = t.next().unwrap(); let stream = t.next().unwrap(); match stream { TokenTree::Group(g) => respan(dummy.span(), g.stream()), _ => unreachable!(), } } fn respan(span: Span, tokens: TokenStream) -> TokenStream { tokens .into_iter() .map(|token| match token { TokenTree::Group(group) => { let mut fixed = Group::new(group.delimiter(), respan(span, group.stream())); fixed.set_span(group.span().resolved_at(span)); TokenTree::Group(fixed) } mut token => { token.set_span(token.span().resolved_at(span)); token } }) .collect() }
arch.py
## # @file # This file is part of SeisSol. # # @author Carsten Uphoff (c.uphoff AT tum.de, http://www5.in.tum.de/wiki/index.php/Carsten_Uphoff,_M.Sc.) # # @section LICENSE # Copyright (c) 2015-2018, SeisSol Group # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # @section DESCRIPTION # from .memory import DenseMemoryLayout class Architecture(object): def __init__(self, name, precision, alignment, enablePrefetch=False, sub_name=None, host_name=None): """ Args: name (str): name of the compute (main) architecture. sub_name (str): name of sub. architecture type e.g., a model of Nvidia streaming multiprocessor (sm_60, sm_61, etc). In case of CPU, the field is equal to None precision (str): either 'd' or 's' character which stands for 'double' or 'single' precision alignment (int): length of a vector register (unit) in bytes enablePrefetch (bool): indicates whether the compute (main) architecture supports data prefetching host_name (str): name of the host (CPU) architecture. If the code is intentend to be generated to CPU-like architecture then the field should be equal to None """ self.name = name self.sub_name = sub_name self.host_name = host_name self.precision = precision.upper() if self.precision == 'D': self.bytesPerReal = 8 self.typename = 'double' self.epsilon = 2.22e-16 elif self.precision == 'S': self.bytesPerReal = 4 self.typename = 'float' self.epsilon = 1.19e-7 else: raise ValueError(f'Unknown precision type {self.precision}') self.alignment = alignment assert self.alignment % self.bytesPerReal == 0 self.alignedReals = self.alignment // self.bytesPerReal self.enablePrefetch = enablePrefetch self.uintTypename = 'unsigned' self.ulongTypename = 'unsigned long' self._tmpStackLimit = 524288 def setTmpStackLimit(self, tmpStackLimit): self._tmpStackLimit = tmpStackLimit def alignedLower(self, index): return index - index % self.alignedReals def alignedUpper(self, index): return index + (self.alignedReals - index % self.alignedReals) % self.alignedReals def alignedShape(self, shape): return (self.alignedUpper(shape[0]),) + shape[1:] def
(self, offset): return offset % self.alignedReals == 0 def formatConstant(self, constant): return str(constant) + ('f' if self.precision == 'S' else '') def onHeap(self, numReals): return (numReals * self.bytesPerReal) > self._tmpStackLimit def _get_name_and_precision(ident): return ident[1:], ident[0].upper() def getArchitectureIdentifiedBy(ident): name, precision = _get_name_and_precision(ident) arch = { 'noarch': Architecture(name, precision, 16, False), 'wsm': Architecture(name, precision, 16, False), 'snb': Architecture(name, precision, 32, False), 'hsw': Architecture(name, precision, 32, False), 'skx': Architecture(name, precision, 64, True), 'knc': Architecture(name, precision, 64, False), 'knl': Architecture(name, precision, 64, True), # Libxsmm currently supports prefetch only for KNL kernels 'rome': Architecture(name, precision, 32, False), 'thunderx2t99': Architecture(name, precision, 16, False), 'power9': Architecture(name, precision, 16, False) } return arch[name] def getHeterogeneousArchitectureIdentifiedBy(compute_ident, compute_sub_arch=None, host_ident=None): compute_name, compute_precision = _get_name_and_precision(compute_ident) host_name, host_precision = _get_name_and_precision(host_ident) if (compute_precision != host_precision): raise ValueError(f'Precision of host and compute arch. must be the same. ' f'Given: {host_ident}, {compute_ident}') arch = { 'nvidia': Architecture(compute_name, compute_precision, 64, False, compute_sub_arch, host_name) } return arch[compute_name] def useArchitectureIdentifiedBy(compute_ident, compute_sub_arch=None, host_ident=None): if not (compute_sub_arch or host_ident): arch = getArchitectureIdentifiedBy(compute_ident) elif (compute_sub_arch and host_ident): arch = getHeterogeneousArchitectureIdentifiedBy(compute_ident, compute_sub_arch, host_ident) else: raise ValueError(f'given an incomplete set of input parameters: ' f'{compute_ident}, {compute_sub_arch}, {host_ident}') DenseMemoryLayout.setAlignmentArch(arch) return arch
checkAlignment
static_and_dynamic_functions.rs
use criterion::{black_box, criterion_group, criterion_main, Criterion}; use wasmer::*; static BASIC_WAT: &str = r#"(module (func $multiply (import "env" "multiply") (param i32 i32) (result i32)) (func (export "add") (param i32 i32) (result i32) (i32.add (local.get 0) (local.get 1))) (func (export "add20") (param i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32 i32) (result i32) (i32.add (i32.add (i32.add (i32.add (i32.add (local.get 0) (local.get 1)) (i32.add (local.get 2) (local.get 3))) (i32.add (i32.add (local.get 4) (local.get 5)) (i32.add (local.get 6) (local.get 7)))) (i32.add (i32.add (i32.add (local.get 8) (local.get 9)) (i32.add (local.get 10) (local.get 11))) (i32.add (i32.add (local.get 12) (local.get 13)) (i32.add (local.get 14) (local.get 15))))) (i32.add (i32.add (local.get 16) (local.get 17)) (i32.add (local.get 18) (local.get 19)))) ) (func (export "double_then_add") (param i32 i32) (result i32) (i32.add (call $multiply (local.get 0) (i32.const 2)) (call $multiply (local.get 1) (i32.const 2)))) )"#; pub fn run_basic_static_function(store: &Store, compiler_name: &str, c: &mut Criterion)
pub fn run_basic_dynamic_function(store: &Store, compiler_name: &str, c: &mut Criterion) { let module = Module::new(store, BASIC_WAT).unwrap(); let import_object = imports! { "env" => { "multiply" => Function::new_native(store, |a: i32, b: i32| a * b), }, }; let instance = Instance::new(&module, &import_object).unwrap(); let dyn_f: &Function = instance.exports.get("add").unwrap(); c.bench_function(&format!("basic dynfunc {}", compiler_name), |b| { b.iter(|| { let dyn_result = black_box(dyn_f.call(&[Val::I32(4), Val::I32(6)]).unwrap()); assert_eq!(dyn_result[0], Val::I32(10)); }) }); let dyn_f_many: &Function = instance.exports.get("add20").unwrap(); c.bench_function( &format!("basic dynfunc with many args {}", compiler_name), |b| { b.iter(|| { let dyn_result = black_box( dyn_f_many .call(&[ Val::I32(1), Val::I32(2), Val::I32(3), Val::I32(4), Val::I32(5), Val::I32(6), Val::I32(7), Val::I32(8), Val::I32(9), Val::I32(10), Val::I32(11), Val::I32(12), Val::I32(13), Val::I32(14), Val::I32(15), Val::I32(16), Val::I32(17), Val::I32(18), Val::I32(19), Val::I32(20), ]) .unwrap(), ); assert_eq!(dyn_result[0], Val::I32(210)); }) }, ); } fn run_static_benchmarks(_c: &mut Criterion) { #[cfg(feature = "llvm")] { let store = Store::new(&Universal::new(wasmer_compiler_llvm::LLVM::new()).engine()); run_basic_static_function(&store, "llvm", c); } #[cfg(feature = "cranelift")] { let store = Store::new(&Universal::new(wasmer_compiler_cranelift::Cranelift::new()).engine()); run_basic_static_function(&store, "cranelift", c); } #[cfg(feature = "singlepass")] { let store = Store::new(&Universal::new(wasmer_compiler_singlepass::Singlepass::new()).engine()); run_basic_static_function(&store, "singlepass", c); } } fn run_dynamic_benchmarks(_c: &mut Criterion) { #[cfg(feature = "llvm")] { let store = Store::new(&Universal::new(wasmer_compiler_llvm::LLVM::new()).engine()); run_basic_dynamic_function(&store, "llvm", c); } #[cfg(feature = "cranelift")] { let store = Store::new(&Universal::new(wasmer_compiler_cranelift::Cranelift::new()).engine()); run_basic_dynamic_function(&store, "cranelift", c); } #[cfg(feature = "singlepass")] { let store = Store::new(&Universal::new(wasmer_compiler_singlepass::Singlepass::new()).engine()); run_basic_dynamic_function(&store, "singlepass", c); } } criterion_group!(benches, run_static_benchmarks, run_dynamic_benchmarks); criterion_main!(benches);
{ let module = Module::new(store, BASIC_WAT).unwrap(); let import_object = imports! { "env" => { "multiply" => Function::new_native(store, |a: i32, b: i32| a * b), }, }; let instance = Instance::new(&module, &import_object).unwrap(); let dyn_f: &Function = instance.exports.get("add").unwrap(); let f: TypedFunction<(i32, i32), i32> = dyn_f.native().unwrap(); c.bench_function(&format!("basic static func {}", compiler_name), |b| { b.iter(|| { let result = black_box(f.call(4, 6).unwrap()); assert_eq!(result, 10); }) }); let dyn_f_many: &Function = instance.exports.get("add20").unwrap(); let f_many: TypedFunction< ( i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, ), i32, > = dyn_f_many.native().unwrap(); c.bench_function( &format!("basic static func with many args {}", compiler_name), |b| { b.iter(|| { let result = black_box( f_many .call( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, ) .unwrap(), ); assert_eq!(result, 210); }) }, ); }
read_parquet.rs
use std::io::Read; use std::{fs, io::Cursor, path::PathBuf}; use criterion::{criterion_group, criterion_main, Criterion}; use arrow2::error::Result; use arrow2::io::parquet::read::page_iter_to_array; use parquet2::read::{get_page_iterator, read_metadata}; fn to_buffer(size: usize) -> Vec<u8> {
let path = PathBuf::from(dir).join(format!("fixtures/pyarrow3/v1/benches_{}.parquet", size)); let metadata = fs::metadata(&path).expect("unable to read metadata"); let mut file = fs::File::open(path).unwrap(); let mut buffer = vec![0; metadata.len() as usize]; file.read(&mut buffer).expect("buffer overflow"); buffer } fn read_decompressed_pages(buffer: &[u8], column: usize) -> Result<()> { let mut file = Cursor::new(buffer); let metadata = read_metadata(&mut file)?; let row_group = 0; let iter = get_page_iterator(&metadata, row_group, column, &mut file)?; let descriptor = &iter.descriptor().clone(); let _ = page_iter_to_array(iter, descriptor)?; Ok(()) } fn add_benchmark(c: &mut Criterion) { (10..=20).step_by(2).for_each(|i| { let buffer = to_buffer(2usize.pow(i)); let a = format!("read i64 2^{}", i); c.bench_function(&a, |b| { b.iter(|| read_decompressed_pages(&buffer, 0).unwrap()) }); }); (10..=20).step_by(2).for_each(|i| { let buffer = to_buffer(2usize.pow(i)); let a = format!("read utf8 2^{}", i); c.bench_function(&a, |b| { b.iter(|| read_decompressed_pages(&buffer, 2).unwrap()) }); }); (10..=20).step_by(2).for_each(|i| { let buffer = to_buffer(2usize.pow(i)); let a = format!("read bool 2^{}", i); c.bench_function(&a, |b| { b.iter(|| read_decompressed_pages(&buffer, 3).unwrap()) }); }); } criterion_group!(benches, add_benchmark); criterion_main!(benches);
let dir = env!("CARGO_MANIFEST_DIR");
db_owid.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import country_converter as coco import pandas as pd from covsirphy.util.term import Term from covsirphy.loading.db_base import _RemoteDatabase class
(_RemoteDatabase): """ Access "Our World In Data". https://github.com/owid/covid-19-data/tree/master/public/data https://ourworldindata.org/coronavirus Args: filename (str): CSV filename to save records """ # URL for vaccine data URL_V = "https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/" URL_V_REC = f"{URL_V}vaccinations.csv" URL_V_LOC = f"{URL_V}locations.csv" # URL for PCR data URL_P = "https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/" URL_P_REC = f"{URL_P}covid-testing-all-observations.csv" # Citation CITATION = "Hasell, J., Mathieu, E., Beltekian, D. et al." \ " A cross-country database of COVID-19 testing. Sci Data 7, 345 (2020)." \ " https://doi.org/10.1038/s41597-020-00688-8" # Column names and data types # {"name in database": "name defined in Term class"} COL_DICT = { "date": Term.DATE, "location": Term.COUNTRY, Term.PROVINCE: Term.PROVINCE, "iso_code": Term.ISO3, "vaccines": Term.PRODUCT, "total_vaccinations": Term.VAC, "people_vaccinated": Term.V_ONCE, "people_fully_vaccinated": Term.V_FULL, "tests": Term.TESTS, } def download(self, verbose): """ Download the dataset from the server and set the list of primary sources. Args: verbose (int): level of verbosity Returns: pandas.DataFrame Index reset index Columns defined by the first values of self.COL_DICT.values() Note: If @verbose is equal to or over 1, how to show the list will be explained. """ # Download datasets if verbose: print("Retrieving datasets from Our World In Data https://github.com/owid/covid-19-data/") # Vaccinations v_rec_cols = [ "date", "location", "iso_code", "total_vaccinations", "people_vaccinated", "people_fully_vaccinated"] v_rec_df = pd.read_csv(self.URL_V_REC, usecols=v_rec_cols) v_loc_df = pd.read_csv(self.URL_V_LOC, usecols=["location", "vaccines"]) v_df = v_rec_df.merge(v_loc_df, how="left", on="location") # Tests pcr_rec_cols = ["ISO code", "Date", "Daily change in cumulative total", "Cumulative total"] pcr_df = pd.read_csv(self.URL_P_REC, usecols=pcr_rec_cols) pcr_df = pcr_df.rename(columns={"ISO code": "iso_code", "Date": "date"}) pcr_df["cumsum"] = pcr_df.groupby("iso_code")["Daily change in cumulative total"].cumsum() pcr_df = pcr_df.assign(tests=lambda x: x["Cumulative total"].fillna(x["cumsum"])) # Combine data (vaccinations/tests) df = v_df.set_index(["iso_code", "date"]) df = df.combine_first(pcr_df.set_index(["iso_code", "date"]).loc[:, ["tests"]]) df = df.reset_index() # Location (country/province) df["location"] = df["location"].replace( { # COG "Congo": "Republic of the Congo", } ) df = df.loc[~df["iso_code"].str.contains("OWID_")] df["location"] = df.groupby("iso_code")["location"].bfill() df.loc[df["location"] == df["iso_code"], "location"] = None df.loc[df["location"].isna(), "location"] = df.loc[df["location"].isna(), "iso_code"].apply( lambda x: coco.convert(x, to="name_short", not_found=None)) df[self.PROVINCE] = self.UNKNOWN return df
_OWID
download.go
package cmd import ( "fmt" "github.com/spf13/cobra" "golang.org/x/sync/errgroup" "github.com/Shopify/themekit/kit" ) var downloadCmd = &cobra.Command{ Use: "download <filenames>", Short: "Download one or all of the theme files", Long: `Download will download specific files from shopify servers if provided file names. If no filenames are provided then download will download every file in the project and write them to disk. For more documentation please see http://shopify.github.io/themekit/commands/#download `, PreRunE: arbiter.generateThemeClients, RunE: arbiter.forSingleClient(download), } func download(client kit.ThemeClient, filenames []string) error
func downloadFile(client kit.ThemeClient, filename string) error { if !arbiter.force && !arbiter.manifest.NeedsDownloading(filename, client.Config.Environment) { if arbiter.verbose { stdOut.Printf( "[%s] skipping %s", green(client.Config.Environment), blue(filename), ) } return nil } asset, err := client.Asset(filename) if err != nil { return fmt.Errorf("error downloading asset: %v", err) } if err := asset.Write(client.Config.Directory); err != nil { return fmt.Errorf("error writing asset: %v", err) } checksum, _ := asset.CheckSum() if err := arbiter.manifest.Set(asset.Key, client.Config.Environment, asset.UpdatedAt, checksum); err != nil { return fmt.Errorf("error updating manifest: %v", err) } if arbiter.verbose { stdOut.Printf( "[%s] Successfully wrote %s to disk", green(client.Config.Environment), blue(filename), ) } return nil }
{ var downloadGroup errgroup.Group filenames = arbiter.manifest.FetchableFiles(filenames, client.Config.Environment) bar := arbiter.newProgressBar(len(filenames), client.Config.Environment) for _, filename := range filenames { filename := filename downloadGroup.Go(func() error { if err := downloadFile(client, filename); err != nil { stdErr.Printf("[%s] %s", green(client.Config.Environment), err) } incBar(bar) return nil }) } return downloadGroup.Wait() }
client.go
package cli import ( "fmt" "os" "path/filepath" "strconv" "text/tabwriter" "github.com/ipfs/go-cid" "github.com/libp2p/go-libp2p-core/peer" "golang.org/x/xerrors" "gopkg.in/urfave/cli.v2" "github.com/filecoin-project/go-address" lapi "github.com/filecoin-project/lotus/api" actors "github.com/filecoin-project/lotus/chain/actors" "github.com/filecoin-project/lotus/chain/types" ) var clientCmd = &cli.Command{ Name: "client", Usage: "Make deals, store data, retrieve data", Subcommands: []*cli.Command{ clientImportCmd, clientLocalCmd, clientDealCmd, clientFindCmd, clientRetrieveCmd, clientQueryAskCmd, clientListDeals, }, } var clientImportCmd = &cli.Command{ Name: "import", Usage: "Import data", Action: func(cctx *cli.Context) error { api, closer, err := GetFullNodeAPI(cctx) if err != nil { return err } defer closer() ctx := ReqContext(cctx) absPath, err := filepath.Abs(cctx.Args().First()) if err != nil { return err } c, err := api.ClientImport(ctx, absPath) if err != nil { return err } fmt.Println(c.String()) return nil }, } var clientLocalCmd = &cli.Command{ Name: "local", Usage: "List locally imported data", Action: func(cctx *cli.Context) error { api, closer, err := GetFullNodeAPI(cctx) if err != nil { return err } defer closer() ctx := ReqContext(cctx) list, err := api.ClientListImports(ctx) if err != nil { return err } for _, v := range list { fmt.Printf("%s %s %d %s\n", v.Key, v.FilePath, v.Size, v.Status) } return nil }, } var clientDealCmd = &cli.Command{ Name: "deal", Usage: "Initialize storage deal with a miner", Action: func(cctx *cli.Context) error { api, closer, err := GetFullNodeAPI(cctx) if err != nil { return err } defer closer() ctx := ReqContext(cctx) if cctx.NArg() != 4 { return xerrors.New("expected 4 args: dataCid, miner, price, duration") } // [data, miner, dur] data, err := cid.Parse(cctx.Args().Get(0)) if err != nil { return err } miner, err := address.NewFromString(cctx.Args().Get(1)) if err != nil { return err } price, err := types.ParseFIL(cctx.Args().Get(2)) if err != nil { return err } dur, err := strconv.ParseInt(cctx.Args().Get(3), 10, 32) if err != nil { return err } a, err := api.WalletDefaultAddress(ctx) if err != nil { return err } proposal, err := api.ClientStartDeal(ctx, data, a, miner, types.BigInt(price), uint64(dur)) if err != nil { return err } fmt.Println(proposal) return nil }, } var clientFindCmd = &cli.Command{ Name: "find", Usage: "find data in the network", Action: func(cctx *cli.Context) error { if !cctx.Args().Present() { fmt.Println("Usage: find [CID]") return nil } file, err := cid.Parse(cctx.Args().First()) if err != nil { return err } api, closer, err := GetFullNodeAPI(cctx) if err != nil { return err } defer closer() ctx := ReqContext(cctx) // Check if we already have this data locally has, err := api.ClientHasLocal(ctx, file) if err != nil { return err } if has { fmt.Println("LOCAL") } offers, err := api.ClientFindData(ctx, file) if err != nil { return err } for _, offer := range offers { if offer.Err != "" { fmt.Printf("ERR %s@%s: %s\n", offer.Miner, offer.MinerPeerID, offer.Err) continue } fmt.Printf("RETRIEVAL %s@%s-%sfil-%db\n", offer.Miner, offer.MinerPeerID, types.FIL(offer.MinPrice), offer.Size) } return nil }, } var clientRetrieveCmd = &cli.Command{ Name: "retrieve", Usage: "retrieve data from network", Flags: []cli.Flag{ &cli.StringFlag{ Name: "address", Usage: "address to use for transactions", }, }, Action: func(cctx *cli.Context) error { if cctx.NArg() != 2 { fmt.Println("Usage: retrieve [CID] [outfile]") return nil } api, closer, err := GetFullNodeAPI(cctx) if err != nil { return err } defer closer() ctx := ReqContext(cctx) var payer address.Address if cctx.String("address") != "" { payer, err = address.NewFromString(cctx.String("address")) } else { payer, err = api.WalletDefaultAddress(ctx) } if err != nil { return err } file, err := cid.Parse(cctx.Args().Get(0)) if err != nil { return err } // Check if we already have this data locally /*has, err := api.ClientHasLocal(ctx, file) if err != nil { return err } if has { fmt.Println("Success: Already in local storage") return nil }*/ // TODO: fix offers, err := api.ClientFindData(ctx, file) if err != nil { return err } // TODO: parse offer strings from `client find`, make this smarter if len(offers) < 1 { fmt.Println("Failed to find file") return nil } if err := api.ClientRetrieve(ctx, offers[0].Order(payer), cctx.Args().Get(1)); err != nil { return xerrors.Errorf("Retrieval Failed: %w", err) } fmt.Println("Success") return nil }, } var clientQueryAskCmd = &cli.Command{ Name: "query-ask", Usage: "find a miners ask", Flags: []cli.Flag{ &cli.StringFlag{ Name: "peerid", Usage: "specify peer ID of node to make query against", }, &cli.Int64Flag{ Name: "size", Usage: "data size in bytes", }, &cli.Int64Flag{ Name: "duration", Usage: "deal duration", }, }, Action: func(cctx *cli.Context) error { if cctx.NArg() != 1 { fmt.Println("Usage: query-ask [address]") return nil } maddr, err := address.NewFromString(cctx.Args().First()) if err != nil { return err } api, closer, err := GetFullNodeAPI(cctx) if err != nil { return err } defer closer() ctx := ReqContext(cctx) var pid peer.ID if pidstr := cctx.String("peerid"); pidstr != "" { p, err := peer.Decode(pidstr) if err != nil { return err } pid = p } else { ret, err := api.StateCall(ctx, &types.Message{ To: maddr, From: maddr, Method: actors.MAMethods.GetPeerID, }, types.EmptyTSK) if err != nil { return xerrors.Errorf("failed to get peerID for miner: %w", err) } if ret.ExitCode != 0 { return fmt.Errorf("call to GetPeerID was unsuccesful (exit code %d)", ret.ExitCode) } if peer.ID(ret.Return) == peer.ID("SETME") { return fmt.Errorf("the miner hasn't initialized yet") } p, err := peer.IDFromBytes(ret.Return) if err != nil { return err } pid = p } ask, err := api.ClientQueryAsk(ctx, pid, maddr) if err != nil { return err } fmt.Printf("Ask: %s\n", maddr) fmt.Printf("Price per GiB: %s\n", types.FIL(ask.Ask.Price)) size := cctx.Int64("size") if size == 0 { return nil } perEpoch := types.BigDiv(types.BigMul(ask.Ask.Price, types.NewInt(uint64(size))), types.NewInt(1<<30)) fmt.Printf("Price per Block: %s\n", types.FIL(perEpoch)) duration := cctx.Int64("duration") if duration == 0
fmt.Printf("Total Price: %s\n", types.FIL(types.BigMul(perEpoch, types.NewInt(uint64(duration))))) return nil }, } var clientListDeals = &cli.Command{ Name: "list-deals", Usage: "List storage market deals", Action: func(cctx *cli.Context) error { api, closer, err := GetFullNodeAPI(cctx) if err != nil { return err } defer closer() ctx := ReqContext(cctx) deals, err := api.ClientListDeals(ctx) if err != nil { return err } w := tabwriter.NewWriter(os.Stdout, 2, 4, 2, ' ', 0) fmt.Fprintf(w, "DealCid\tProvider\tState\tPieceRef\tSize\tPrice\tDuration\n") for _, d := range deals { fmt.Fprintf(w, "%s\t%s\t%s\t%x\t%d\t%s\t%d\n", d.ProposalCid, d.Provider, lapi.DealStates[d.State], d.PieceRef, d.Size, d.PricePerEpoch, d.Duration) } return w.Flush() }, }
{ return nil }
rhyme.py
#! /usr/bin/env python #-*- coding:utf-8 -*- from utils import * import pypinyin py_raw = os.path.join(DATA_RAW_DIR, 'pinyin.txt') _rhy_path = os.path.join(DATA_PROCESSED_DIR, 'rhy_dict.json') ''' Tonal and rhyming reference from: https://baike.baidu.com/item/绝句律诗格律 ''' ''' 类型一 ⊙平平仄仄,⊙仄仄平平。(韵)⊙仄平平仄,平平仄仄平。(韵) 例诗: 山中 王勃 长江悲已滞,万里念将归。况属高秋晚,山中黄叶飞。 ''' five_char_type_a = { 'tone': [ '*ppzz', '*zzpp', '*zppz', 'ppzzp' ], 'rhyme': [1, 3] } ''' 类型二 平平仄仄平,(韵)⊙仄仄平平。(韵)⊙仄⊙平仄,平平仄仄平。(韵) 例诗:壬辰元日试笔呈诸师友 陈忠远(即阿袁) 龙光绚九天,虎幄定三边。一守凤城道:“新年胜旧年!” ''' five_char_type_b = { 'tone': [ 'ppzzp', '*zzpp', '*z*pz', 'ppzzp' ], 'rhyme': [0, 1, 3] } ''' 类型三 ⊙仄平平仄,平平仄仄平。(韵)⊙平平仄仄,⊙仄仄平平。(韵) 例诗:南行别第 韦承庆 万里人南去,三春雁北飞。不知何岁月,得与尔同归。 ''' five_char_type_c = { 'tone': [ '*zppz', 'ppzzp', '*ppzz', '*zzpp' ], 'rhyme': [1, 3] } ''' 类型四 ⊙仄仄平平,(韵)平平仄仄平。(韵)⊙平平仄仄,⊙仄仄平平。(韵) 例诗: 塞下曲 卢纶 林暗草惊风,将军夜引弓。平明寻白羽,没在石棱中。 ''' five_char_type_d = { 'tone': [ '*zzpp', 'ppzzp', '*ppzz', '*zzpp' ], 'rhyme': [0, 1, 3] } five_char_tones = [ five_char_type_a, five_char_type_b, five_char_type_c, five_char_type_d ] ''' 类型一 平起、首句不押韵 ⊙平⊙仄平平仄, ⊙仄平平仄仄平。(韵) ⊙仄⊙平平仄仄, ⊙平⊙仄仄平平。(韵) 例诗:南游感兴 窦巩 伤心欲问前朝事, 惟见江流去不回。 日暮东风春草绿, 鹧鸪飞上越王台。 ''' seven_char_type_a = { 'tone': [ '*p*zppz', '*zppzzp', '*z*ppzz', '*p*zzpp' ], 'rhyme': [1, 3] } ''' 类型二 平起、首句押韵 ⊙平⊙仄仄平平,(韵) ⊙仄平平仄仄平。(韵) ⊙仄⊙平平仄仄, ⊙平⊙仄仄平平。(韵) 例诗:出塞 王昌龄 秦时明月汉时关, 万里长征人未还。 但使龙城飞将在, 不教胡马度阴山。 ''' seven_char_type_b = { 'tone': [ '*p*zzpp', '*zppzzp', '*z*ppzz', '*p*zzpp' ], 'rhyme': [0, 1, 3] } ''' 类型三 仄起、首句不押韵 ⊙仄⊙平平仄仄, ⊙平⊙仄仄平平。(韵) ⊙平⊙仄平平仄, ⊙仄平平仄仄平。(韵) 例诗:九月九日忆山东兄弟王维 独在异乡为异客, 每逢佳节倍思亲。 遥知兄弟登高处, 遍插茱萸少一人。 ''' seven_char_type_c = { 'tone': [ '*z*ppzz', '*p*zzpp', '*p*zppz', '*zppzzp' ], 'rhyme': [1, 3] } ''' 类型四 仄起、首句押韵 ⊙仄平平仄仄平,(韵) ⊙平⊙仄仄平平。(韵) ⊙平⊙仄平平仄, ⊙仄平平仄仄平。(韵) 例诗:从军行 王昌龄 青海长云暗雪山, 孤城遥望玉门关。 黄沙百战穿金甲, 不破楼兰终不还! ''' seven_char_type_d = { 'tone': [ '*zppzzp', '*p*zzpp', '*p*zppz', '*zppzzp' ], 'rhyme': [0, 1, 3] } seven_char_tones = [ seven_char_type_a, seven_char_type_b, seven_char_type_c, seven_char_type_d ] tone_rules = { 5: five_char_tones, 7: seven_char_tones } class RhymeUtil: def get_rhyme_category(self, vowel): vowel = vowel.upper() if vowel in ['A', 'IA', 'UA']: return 1 elif vowel in ['O', 'E', 'UO']: return 2 elif vowel in ['IE', 'VE']: return 3 elif vowel in ['AI', 'UAI']: return 4 elif vowel in ['EI', 'UI']: return 5 elif vowel in ['AO', 'IAO']: return 6 elif vowel in ['OU', 'IU']: return 7 elif vowel in ['AN', 'IAN', 'UAN', 'VAN']: return 8 elif vowel in ['EN', 'IN', 'UN', 'VN']: return 9 elif vowel in ['ANG', 'IANG', 'UANG']: return 10 elif vowel in ['ENG', 'ING']: return 11 elif vowel in ['ONG', 'IONG']: return 12 # elif (vowels == 'I' and not pinyin[0] in ['Z', 'C', 'S', 'R']) \ # or vowels == 'V': # return 13 elif vowel == 'I': return 14 elif vowel == 'U': return 15 else: return None def has_char(self, ch): """ Args: ch: A unicode character Returns: bool: Whether rhyming information exists for this character """ return True def get_possible_tones(self, ch): """ Args: ch: A unicode character Returns: [int]: A list of possible tones """ final_tones = pypinyin.pinyin(ch, style=pypinyin.FINALS_TONE3, heteronym=True, errors=u'default')[0] # select results for first and only char tones = map(lambda final_tone: final_tone[-1], final_tones) filtered_tones = filter(unicode.isdigit, tones) tones_int = map(int, filtered_tones) # deduplication deduped_tones = [] for tone in tones_int: if tone not in deduped_tones: deduped_tones.append(tone) return deduped_tones def get_possible_vowels(self, ch): """ Args: ch: A unicode character Returns: [str]: A list of possible vowels """ vowels = pypinyin.pinyin(ch, style=pypinyin.FINALS, heteronym=True, errors=u'default')[0] # select results for first and only char return vowels def get_possible_tone_types(self, ch): """ Args: ch: A unicode character Returns: str: 'p' or 'z' or '*' representing possible tone types """ tones = self.get_possible_tones(ch) pin_tones = {1, 2} & set(tones) ze_tones = {3, 4} & set(tones) if pin_tones and ze_tones: return '*' elif pin_tones: return 'p' elif ze_tones: return 'z' else: raise Exception('No tones associated with the character') def get_possible_rhyme_categories(self, ch): """ Args: ch: A unicode character Returns: [int]: A list of possible rhyme categories """ vowels = self.get_possible_vowels(ch) rhyme_categories = map(self.get_rhyme_category, vowels) filtered_categories = filter(None, rhyme_categories) return filtered_categories def can_rhyme(self, ch_list): """
section(*rhyme_categories_list) result = True if common_categories else False return result class RhymeEvaluator: def __init__(self): self.rhyme_util = RhymeUtil() def score_tone(self, rule, sentences): tone_rule = rule['tone'] score = 0. max_score = float(len(sentences) * len(sentences[0])) for line_index, line in enumerate(sentences): for ch_index, ch in enumerate(line): expected_tone_type = tone_rule[line_index][ch_index] possible_tone_type = self.rhyme_util.get_possible_tone_types(ch) tone_type_set = {expected_tone_type, possible_tone_type} if '*' in tone_type_set or len(tone_type_set) == 1: score += 1. percentage_score = score / max_score return percentage_score def score_rhyme(self, rule, sentences): rhyme_rule = rule['rhyme'] rhyme_chars = [sentences[line_number][-1] for line_number in rhyme_rule] score = 1. if self.rhyme_util.can_rhyme(rhyme_chars) else 0. return score def score(self, rule, sentences, split=0.5, output_split=False): tone_score = self.score_tone(rule, sentences) rhyme_score = self.score_rhyme(rule, sentences) tone_weight = split rhyme_weight = 1. - split combined_score = tone_score * tone_weight + rhyme_score * rhyme_weight if output_split: return combined_score, tone_score, rhyme_score else: return combined_score def eval(self, sentences, output_all_scores=False, output_split=False): """ Args: sentences: A list of unicode strings Returns: float: A score from 0 to 1 """ # check 4 lines if len(sentences) != 4: return 0. # check all lines are either 5 or 7 characters and same number of characters sentence_lengths = set([len(sentence) for sentence in sentences]) sentence_length = list(sentence_lengths)[0] if len(sentence_lengths) != 1 or sentence_length not in [5, 7]: return 0. rules = tone_rules[sentence_length] scores = map(lambda rule: self.score(rule, sentences, output_split=output_split), rules) if output_split: max_combined = max([score[0] for score in scores]) max_tone = max([score[1] for score in scores]) max_rhyme = max([score[2] for score in scores]) max_score = (max_combined, max_tone, max_rhyme) else: max_score = max(scores) if output_all_scores: return max_score, scores else: return max_score
Args: ch_list: A list of unicode characters Returns: bool: Whether if a list of unicode characters can rhyme """ rhyme_categories_list = [set(self.get_possible_rhyme_categories(ch)) for ch in ch_list] common_categories = set.inter
ibacktest-result.ts
import { IBacktestDataPoint } from './ibacktest-data-point';
export interface IBacktestResult { rebalanceData: IBacktestDataPoint[]; holdingData: IBacktestDataPoint[]; }
folder_permission_test.go
package api import ( "testing" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/guardian" . "github.com/smartystreets/goconvey/convey" ) func TestFolderPermissionApiEndpoint(t *testing.T) { Convey("Folder permissions test", t, func() { Convey("Given folder not exists", func() { mock := &fakeFolderService{ GetFolderByUIDError: m.ErrFolderNotFound, } origNewFolderService := dashboards.NewFolderService mockFolderService(mock) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", m.ROLE_EDITOR, func(sc *scenarioContext) { callGetFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 404) }) cmd := dtos.UpdateDashboardAclCommand{ Items: []dtos.DashboardAclUpdateItem{ {UserId: 1000, Permission: m.PERMISSION_ADMIN}, }, } updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { callUpdateFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 404) }) Reset(func() { dashboards.NewFolderService = origNewFolderService }) }) Convey("Given user has no admin permissions", func() { origNewGuardian := guardian.New guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{CanAdminValue: false}) mock := &fakeFolderService{ GetFolderByUIDResult: &m.Folder{ Id: 1, Uid: "uid", Title: "Folder", }, } origNewFolderService := dashboards.NewFolderService mockFolderService(mock) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", m.ROLE_EDITOR, func(sc *scenarioContext) { callGetFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 403) }) cmd := dtos.UpdateDashboardAclCommand{ Items: []dtos.DashboardAclUpdateItem{ {UserId: 1000, Permission: m.PERMISSION_ADMIN}, }, } updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { callUpdateFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 403) }) Reset(func() { guardian.New = origNewGuardian dashboards.NewFolderService = origNewFolderService }) }) Convey("Given user has admin permissions and permissions to update", func() { origNewGuardian := guardian.New guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ CanAdminValue: true, CheckPermissionBeforeUpdateValue: true, GetAclValue: []*m.DashboardAclInfoDTO{ {OrgId: 1, DashboardId: 1, UserId: 2, Permission: m.PERMISSION_VIEW}, {OrgId: 1, DashboardId: 1, UserId: 3, Permission: m.PERMISSION_EDIT}, {OrgId: 1, DashboardId: 1, UserId: 4, Permission: m.PERMISSION_ADMIN}, {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_VIEW}, {OrgId: 1, DashboardId: 1, TeamId: 2, Permission: m.PERMISSION_ADMIN}, }, }) mock := &fakeFolderService{ GetFolderByUIDResult: &m.Folder{ Id: 1, Uid: "uid", Title: "Folder", }, } origNewFolderService := dashboards.NewFolderService mockFolderService(mock) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", m.ROLE_ADMIN, func(sc *scenarioContext) { callGetFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 200) respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) So(err, ShouldBeNil) So(len(respJSON.MustArray()), ShouldEqual, 5) So(respJSON.GetIndex(0).Get("userId").MustInt(), ShouldEqual, 2) So(respJSON.GetIndex(0).Get("permission").MustInt(), ShouldEqual, m.PERMISSION_VIEW) }) cmd := dtos.UpdateDashboardAclCommand{ Items: []dtos.DashboardAclUpdateItem{ {UserId: 1000, Permission: m.PERMISSION_ADMIN}, }, } updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { callUpdateFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 200) }) Reset(func() { guardian.New = origNewGuardian dashboards.NewFolderService = origNewFolderService }) }) Convey("When trying to update permissions with duplicate permissions", func() { origNewGuardian := guardian.New guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ CanAdminValue: true, CheckPermissionBeforeUpdateValue: false, CheckPermissionBeforeUpdateError: guardian.ErrGuardianPermissionExists, }) mock := &fakeFolderService{ GetFolderByUIDResult: &m.Folder{ Id: 1, Uid: "uid", Title: "Folder", }, } origNewFolderService := dashboards.NewFolderService mockFolderService(mock) cmd := dtos.UpdateDashboardAclCommand{ Items: []dtos.DashboardAclUpdateItem{ {UserId: 1000, Permission: m.PERMISSION_ADMIN}, }, } updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { callUpdateFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 400) }) Reset(func() { guardian.New = origNewGuardian dashboards.NewFolderService = origNewFolderService }) }) Convey("When trying to override inherited permissions with lower presedence", func() { origNewGuardian := guardian.New guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ CanAdminValue: true, CheckPermissionBeforeUpdateValue: false, CheckPermissionBeforeUpdateError: guardian.ErrGuardianOverride}, ) mock := &fakeFolderService{ GetFolderByUIDResult: &m.Folder{ Id: 1, Uid: "uid", Title: "Folder", }, } origNewFolderService := dashboards.NewFolderService mockFolderService(mock) cmd := dtos.UpdateDashboardAclCommand{ Items: []dtos.DashboardAclUpdateItem{ {UserId: 1000, Permission: m.PERMISSION_ADMIN}, }, } updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { callUpdateFolderPermissions(sc) So(sc.resp.Code, ShouldEqual, 400) }) Reset(func() { guardian.New = origNewGuardian dashboards.NewFolderService = origNewFolderService }) }) }) } func callGetFolderPermissions(sc *scenarioContext)
func callUpdateFolderPermissions(sc *scenarioContext) { bus.AddHandler("test", func(cmd *m.UpdateDashboardAclCommand) error { return nil }) sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() } func updateFolderPermissionScenario(desc string, url string, routePattern string, cmd dtos.UpdateDashboardAclCommand, fn scenarioFunc) { Convey(desc+" "+url, func() { defer bus.ClearBusHandlers() sc := setupScenarioContext(url) sc.defaultHandler = wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.OrgId = TestOrgID sc.context.UserId = TestUserID return UpdateFolderPermissions(c, cmd) }) sc.m.Post(routePattern, sc.defaultHandler) fn(sc) }) }
{ sc.handlerFunc = GetFolderPermissionList sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() }
thread.rs
use std::ptr::NonNull; use std::collections::HashMap; use crate::page::{ DEFAULT_PAGE_CAP, ThreadsRef }; use crate::loom::sync::{ Arc, Mutex }; use crate::loom::cell::UnsafeCell; #[cfg(feature = "loom")] use loom::thread_local; pub struct ThreadHandle(Arc<Mutex<HashMap<ThreadsRef, Dtor>>>); #[cfg(all(feature = "parking_lot", not(feature = "loom")))] static THREAD_ID_POOL: Mutex<ThreadIdPool> = Mutex::new(ThreadIdPool::new()); #[cfg(not(feature = "parking_lot"))] static THREAD_ID_POOL: once_cell::sync::Lazy<Mutex<ThreadIdPool>> = once_cell::sync::Lazy::new(|| Mutex::new(ThreadIdPool::new())); thread_local!{ static THREAD_STATE: ThreadState = ThreadState::new(); } struct ThreadIdPool { max: usize, small_freelist: Vec<usize>, // TODO use heapless vec slow_freelist: Vec<usize> } struct ThreadState { id: usize, list: Arc<Mutex<HashMap<ThreadsRef, Dtor>>> } struct Dtor {
impl ThreadIdPool { const fn new() -> ThreadIdPool { ThreadIdPool { max: 0, small_freelist: Vec::new(), slow_freelist: Vec::new() } } fn alloc(&mut self) -> usize { if let Some(id) = self.small_freelist.pop() .or_else(|| self.slow_freelist.pop()) { if self.slow_freelist.capacity() != 0 && self.slow_freelist.is_empty() && self.small_freelist.len() < DEFAULT_PAGE_CAP / 2 { self.slow_freelist.shrink_to_fit(); } id } else { let id = self.max; self.max = id.checked_add(1).expect("thread id overflow"); id } } fn dealloc(&mut self, id: usize) { if id <= DEFAULT_PAGE_CAP { self.small_freelist.push(id); } else { self.slow_freelist.push(id) } } } impl ThreadState { fn new() -> ThreadState { ThreadState { id: THREAD_ID_POOL.lock().unwrap().alloc(), list: Arc::new(Mutex::new(HashMap::new())) } } } impl Dtor { fn new<T: 'static>(ptr: NonNull<UnsafeCell<Option<T>>>) -> Dtor { unsafe fn try_drop<T: 'static>(ptr: *mut ()) { let obj = &mut *ptr.cast::<UnsafeCell<Option<T>>>(); obj.with_mut(|val| { &mut *val }.take()); } Dtor { ptr: ptr.cast(), drop: try_drop::<T> } } unsafe fn drop(&self) { (self.drop)(self.ptr.as_ptr()) } } impl Drop for ThreadState { fn drop(&mut self) { let mut list = self.list.lock().unwrap(); for (tr, dtor) in list.drain() { unsafe { dtor.drop(); // # Safety // // because storage will ensure that all tracked `ThreadsRef` are valid. tr.remove(self.id); } } THREAD_ID_POOL.lock().unwrap() .dealloc(self.id); } } impl ThreadHandle { pub unsafe fn release(&self, tr: &ThreadsRef) { let dtor = { self.0.lock() .unwrap() .remove(tr) }; if let Some(dtor) = dtor { dtor.drop(); } } } #[inline] pub fn get() -> usize { THREAD_STATE.with(|state| state.id) } pub unsafe fn push<T: 'static>(tr: ThreadsRef, ptr: NonNull<UnsafeCell<Option<T>>>) -> ThreadHandle { let dtor = Dtor::new(ptr); THREAD_STATE.with(|state| { state.list.lock() .unwrap() .insert(tr, dtor); ThreadHandle(Arc::clone(&state.list)) }) }
ptr: NonNull<()>, drop: unsafe fn(*mut ()) }
monster_generated.rs
// automatically generated by the FlatBuffers compiler, do not modify use std::mem; use std::cmp::Ordering; extern crate flatbuffers; use self::flatbuffers::EndianScalar; #[allow(unused_imports, dead_code)] pub mod my_game { use std::mem; use std::cmp::Ordering; extern crate flatbuffers; use self::flatbuffers::EndianScalar; #[allow(unused_imports, dead_code)] pub mod sample { use std::mem; use std::cmp::Ordering; extern crate flatbuffers; use self::flatbuffers::EndianScalar; #[allow(non_camel_case_types)] #[repr(i8)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum Color { Red = 0, Green = 1, Blue = 2, } pub const ENUM_MIN_COLOR: i8 = 0; pub const ENUM_MAX_COLOR: i8 = 2; impl<'a> flatbuffers::Follow<'a> for Color { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { flatbuffers::read_scalar_at::<Self>(buf, loc) } } impl flatbuffers::EndianScalar for Color { #[inline] fn to_little_endian(self) -> Self { let n = i8::to_le(self as i8); let p = &n as *const i8 as *const Color; unsafe { *p } } #[inline] fn from_little_endian(self) -> Self { let n = i8::from_le(self as i8); let p = &n as *const i8 as *const Color; unsafe { *p } } } impl flatbuffers::Push for Color { type Output = Color; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { flatbuffers::emplace_scalar::<Color>(dst, *self); } } #[allow(non_camel_case_types)] pub const ENUM_VALUES_COLOR: [Color; 3] = [ Color::Red, Color::Green, Color::Blue ]; #[allow(non_camel_case_types)] pub const ENUM_NAMES_COLOR: [&str; 3] = [ "Red", "Green", "Blue" ]; pub fn enum_name_color(e: Color) -> &'static str { let index = e as i8; ENUM_NAMES_COLOR[index as usize] } #[allow(non_camel_case_types)] #[repr(u8)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum Equipment { NONE = 0, Weapon = 1, } pub const ENUM_MIN_EQUIPMENT: u8 = 0; pub const ENUM_MAX_EQUIPMENT: u8 = 1; impl<'a> flatbuffers::Follow<'a> for Equipment { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { flatbuffers::read_scalar_at::<Self>(buf, loc) } } impl flatbuffers::EndianScalar for Equipment { #[inline] fn to_little_endian(self) -> Self { let n = u8::to_le(self as u8); let p = &n as *const u8 as *const Equipment; unsafe { *p } } #[inline] fn from_little_endian(self) -> Self { let n = u8::from_le(self as u8); let p = &n as *const u8 as *const Equipment; unsafe { *p } } } impl flatbuffers::Push for Equipment { type Output = Equipment; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { flatbuffers::emplace_scalar::<Equipment>(dst, *self); } } #[allow(non_camel_case_types)] pub const ENUM_VALUES_EQUIPMENT: [Equipment; 2] = [ Equipment::NONE, Equipment::Weapon ]; #[allow(non_camel_case_types)] pub const ENUM_NAMES_EQUIPMENT: [&str; 2] = [ "NONE", "Weapon" ]; pub fn
(e: Equipment) -> &'static str { let index = e as u8; ENUM_NAMES_EQUIPMENT[index as usize] } pub struct EquipmentUnionTableOffset {} // struct Vec3, aligned to 4 #[repr(C, align(4))] #[derive(Clone, Copy, Debug, PartialEq)] pub struct Vec3 { x_: f32, y_: f32, z_: f32, } // pub struct Vec3 impl flatbuffers::SafeSliceAccess for Vec3 {} impl<'a> flatbuffers::Follow<'a> for Vec3 { type Inner = &'a Vec3; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { <&'a Vec3>::follow(buf, loc) } } impl<'a> flatbuffers::Follow<'a> for &'a Vec3 { type Inner = &'a Vec3; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { flatbuffers::follow_cast_ref::<Vec3>(buf, loc) } } impl<'b> flatbuffers::Push for Vec3 { type Output = Vec3; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { let src = unsafe { ::std::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size()) }; dst.copy_from_slice(src); } } impl<'b> flatbuffers::Push for &'b Vec3 { type Output = Vec3; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { let src = unsafe { ::std::slice::from_raw_parts(*self as *const Vec3 as *const u8, Self::size()) }; dst.copy_from_slice(src); } } impl Vec3 { pub fn new(_x: f32, _y: f32, _z: f32) -> Self { Vec3 { x_: _x.to_little_endian(), y_: _y.to_little_endian(), z_: _z.to_little_endian(), } } pub fn x(&self) -> f32 { self.x_.from_little_endian() } pub fn y(&self) -> f32 { self.y_.from_little_endian() } pub fn z(&self) -> f32 { self.z_.from_little_endian() } } pub enum MonsterOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct Monster<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for Monster<'a> { type Inner = Monster<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf, loc } } } } impl<'a> Monster<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { Monster { _tab: table, } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args MonsterArgs<'args>) -> flatbuffers::WIPOffset<Monster<'bldr>> { let mut builder = MonsterBuilder::new(_fbb); if let Some(x) = args.path { builder.add_path(x); } if let Some(x) = args.equipped { builder.add_equipped(x); } if let Some(x) = args.weapons { builder.add_weapons(x); } if let Some(x) = args.inventory { builder.add_inventory(x); } if let Some(x) = args.name { builder.add_name(x); } if let Some(x) = args.pos { builder.add_pos(x); } builder.add_hp(args.hp); builder.add_mana(args.mana); builder.add_equipped_type(args.equipped_type); builder.add_color(args.color); builder.finish() } pub const VT_POS: flatbuffers::VOffsetT = 4; pub const VT_MANA: flatbuffers::VOffsetT = 6; pub const VT_HP: flatbuffers::VOffsetT = 8; pub const VT_NAME: flatbuffers::VOffsetT = 10; pub const VT_INVENTORY: flatbuffers::VOffsetT = 14; pub const VT_COLOR: flatbuffers::VOffsetT = 16; pub const VT_WEAPONS: flatbuffers::VOffsetT = 18; pub const VT_EQUIPPED_TYPE: flatbuffers::VOffsetT = 20; pub const VT_EQUIPPED: flatbuffers::VOffsetT = 22; pub const VT_PATH: flatbuffers::VOffsetT = 24; #[inline] pub fn pos(&self) -> Option<&'a Vec3> { self._tab.get::<Vec3>(Monster::VT_POS, None) } #[inline] pub fn mana(&self) -> i16 { self._tab.get::<i16>(Monster::VT_MANA, Some(150)).unwrap() } #[inline] pub fn hp(&self) -> i16 { self._tab.get::<i16>(Monster::VT_HP, Some(100)).unwrap() } #[inline] pub fn name(&self) -> Option<&'a str> { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Monster::VT_NAME, None) } #[inline] pub fn inventory(&self) -> Option<&'a [u8]> { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(Monster::VT_INVENTORY, None).map(|v| v.safe_slice()) } #[inline] pub fn color(&self) -> Color { self._tab.get::<Color>(Monster::VT_COLOR, Some(Color::Blue)).unwrap() } #[inline] pub fn weapons(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon<'a>>>> { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Weapon<'a>>>>>(Monster::VT_WEAPONS, None) } #[inline] pub fn equipped_type(&self) -> Equipment { self._tab.get::<Equipment>(Monster::VT_EQUIPPED_TYPE, Some(Equipment::NONE)).unwrap() } #[inline] pub fn equipped(&self) -> Option<flatbuffers::Table<'a>> { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Monster::VT_EQUIPPED, None) } #[inline] pub fn path(&self) -> Option<&'a [Vec3]> { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<Vec3>>>(Monster::VT_PATH, None).map(|v| v.safe_slice() ) } #[inline] #[allow(non_snake_case)] pub fn equipped_as_weapon(&self) -> Option<Weapon<'a>> { if self.equipped_type() == Equipment::Weapon { self.equipped().map(Weapon::init_from_table) } else { None } } } pub struct MonsterArgs<'a> { pub pos: Option<&'a Vec3>, pub mana: i16, pub hp: i16, pub name: Option<flatbuffers::WIPOffset<&'a str>>, pub inventory: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>, pub color: Color, pub weapons: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon<'a>>>>>, pub equipped_type: Equipment, pub equipped: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>, pub path: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Vec3>>>, } impl<'a> Default for MonsterArgs<'a> { #[inline] fn default() -> Self { MonsterArgs { pos: None, mana: 150, hp: 100, name: None, inventory: None, color: Color::Blue, weapons: None, equipped_type: Equipment::NONE, equipped: None, path: None, } } } pub struct MonsterBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> { #[inline] pub fn add_pos(&mut self, pos: &Vec3) { self.fbb_.push_slot_always::<&Vec3>(Monster::VT_POS, pos); } #[inline] pub fn add_mana(&mut self, mana: i16) { self.fbb_.push_slot::<i16>(Monster::VT_MANA, mana, 150); } #[inline] pub fn add_hp(&mut self, hp: i16) { self.fbb_.push_slot::<i16>(Monster::VT_HP, hp, 100); } #[inline] pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_NAME, name); } #[inline] pub fn add_inventory(&mut self, inventory: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) { self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_INVENTORY, inventory); } #[inline] pub fn add_color(&mut self, color: Color) { self.fbb_.push_slot::<Color>(Monster::VT_COLOR, color, Color::Blue); } #[inline] pub fn add_weapons(&mut self, weapons: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Weapon<'b >>>>) { self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_WEAPONS, weapons); } #[inline] pub fn add_equipped_type(&mut self, equipped_type: Equipment) { self.fbb_.push_slot::<Equipment>(Monster::VT_EQUIPPED_TYPE, equipped_type, Equipment::NONE); } #[inline] pub fn add_equipped(&mut self, equipped: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) { self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_EQUIPPED, equipped); } #[inline] pub fn add_path(&mut self, path: flatbuffers::WIPOffset<flatbuffers::Vector<'b , Vec3>>) { self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_PATH, path); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> { let start = _fbb.start_table(); MonsterBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<Monster<'a>> { let o = self.fbb_.end_table(self.start_); flatbuffers::WIPOffset::new(o.value()) } } pub enum WeaponOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct Weapon<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for Weapon<'a> { type Inner = Weapon<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf, loc } } } } impl<'a> Weapon<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { Weapon { _tab: table, } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args WeaponArgs<'args>) -> flatbuffers::WIPOffset<Weapon<'bldr>> { let mut builder = WeaponBuilder::new(_fbb); if let Some(x) = args.name { builder.add_name(x); } builder.add_damage(args.damage); builder.finish() } pub const VT_NAME: flatbuffers::VOffsetT = 4; pub const VT_DAMAGE: flatbuffers::VOffsetT = 6; #[inline] pub fn name(&self) -> Option<&'a str> { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Weapon::VT_NAME, None) } #[inline] pub fn damage(&self) -> i16 { self._tab.get::<i16>(Weapon::VT_DAMAGE, Some(0)).unwrap() } } pub struct WeaponArgs<'a> { pub name: Option<flatbuffers::WIPOffset<&'a str>>, pub damage: i16, } impl<'a> Default for WeaponArgs<'a> { #[inline] fn default() -> Self { WeaponArgs { name: None, damage: 0, } } } pub struct WeaponBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> WeaponBuilder<'a, 'b> { #[inline] pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Weapon::VT_NAME, name); } #[inline] pub fn add_damage(&mut self, damage: i16) { self.fbb_.push_slot::<i16>(Weapon::VT_DAMAGE, damage, 0); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WeaponBuilder<'a, 'b> { let start = _fbb.start_table(); WeaponBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<Weapon<'a>> { let o = self.fbb_.end_table(self.start_); flatbuffers::WIPOffset::new(o.value()) } } #[inline] pub fn get_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> { flatbuffers::get_root::<Monster<'a>>(buf) } #[inline] pub fn get_size_prefixed_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> { flatbuffers::get_size_prefixed_root::<Monster<'a>>(buf) } #[inline] pub fn finish_monster_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<Monster<'a>>) { fbb.finish(root, None); } #[inline] pub fn finish_size_prefixed_monster_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<Monster<'a>>) { fbb.finish_size_prefixed(root, None); } } // pub mod Sample } // pub mod MyGame
enum_name_equipment
applyMods.js
import flattenMods from './flattenMods'; import identity from './identity'; export default function
(base, mods) { if(!mods) { return base; } if(!Array.isArray(mods) && typeof mods !== 'object') { mods = [mods]; } else if(!Array.isArray(mods) && typeof mods === 'object') { mods = flattenMods(mods); } return [base].concat( mods.filter(identity).map(mod => `${base}--${mod}`) ).join(' '); }
applyMods
api_errors.rs
use std::fmt; use thiserror::Error; use crate::errors::AsError; use crate::utils::Path; #[derive(Debug, Error)] pub struct GenericError { pub bucket: String, pub object: String, pub version_id: String, #[source] pub err: Option<anyhow::Error>, } impl GenericError { fn format_err(&self) -> String { match &self.err { Some(err) => format!(": ({})", err), None => "".to_string(), } } } impl fmt::Display for GenericError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let err = match &self.err { Some(err) => format!(": ({})", err), None => "".to_string(), }; write!( f, "{}/{}({}): {}", self.bucket, self.object, self.version_id, err, ) } } #[derive(Debug, Error)] #[non_exhaustive] pub enum ApiError { #[error("The request signature we calculated does not match the signature you provided. Check your key and signing method.")] SignatureDoesNotMatch, #[error("Storage reached its minimum free disk threshold.")] StorageFull, #[error("Please reduce your request rate")] SlowDown, #[error("Storage resources are insufficient for the read operation {}/{}", .0.bucket, .0.object)] InsufficientReadQuorum(GenericError), #[error("Storage resources are insufficient for the write operation {}/{}", .0.bucket, .0.object)] InsufficientWriteQuorum(GenericError), #[error("Invalid arguments provided for {}/{}{}", .0.bucket, .0.object, .0.format_err())] InvalidArgument(GenericError), #[error("Bucket not found: {}", .0.bucket)] BucketNotFound(GenericError), #[error("The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again.")] BucketAlreadyExists(GenericError), #[error("Bucket already owned by you: {}", .0.bucket)] BucketAlreadyOwnedByYou(GenericError), #[error("Bucket not empty: {}", .0.bucket)] BucketNotEmpty(GenericError), #[error("Invalid version id: {}/{}({})", .0.bucket, .0.object, .0.version_id)] InvalidVersionID(GenericError), #[error("Version not found: {}/{}({})", .0.bucket, .0.object, .0.version_id)] VersionNotFound(GenericError), #[error("Object not found: {}/{}", .0.bucket, .0.object)] ObjectNotFound(GenericError), #[error("Method not allowed: {}/{}", .0.bucket, .0.object)] MethodNotAllowed(GenericError), #[error("Object {}/{} already exists", .0.bucket, .0.object)] ObjectAlreadyExists(GenericError), #[error("Object exists on {} as directory {}", .0.bucket, .0.object)] ObjectExistsAsDirectory(GenericError), #[error("Prefix access is denied: {}/{}", .0.bucket, .0.object)] PrefixAccessDenied(GenericError), #[error("Parent is object: {}/{}", .0.bucket, Path::new(&.0.object).parent().map(|v| v.to_str()).flatten().unwrap_or(""))] ParentIsObject(GenericError), #[error("Bucket exists: {}", .0.bucket)] BucketExists(GenericError), #[error("Invalid combination of uploadID marker '{}' and marker '{}'", .upload_id_marker, .key_marker)] InvalidUploadIDKeyCombination { upload_id_marker: String, key_marker: String, }, #[error("Invalid combination of marker '{}' and prefix '{}'", .marker, .prefix)] InvalidMarkerPrefixCombination { marker: String, prefix: String }, #[error("No bucket policy configuration found for bucket: {}", .0.bucket)] BucketPolicyNotFound(GenericError), #[error("No bucket lifecycle configuration found for bucket : {}", .0.bucket)] BucketLifecycleNotFound(GenericError), #[error("No bucket encryption configuration found for bucket: {}", .0.bucket)] BucketSSEConfigNotFound(GenericError), #[error("No bucket tags found for bucket: {}", .0.bucket)] BucketTaggingNotFound(GenericError), #[error("No bucket object lock configuration found for bucket: {}", .0.bucket)] BucketObjectLockConfigNotFound(GenericError), #[error("No quota config found for bucket : {}", .0.bucket)] BucketQuotaConfigNotFound(GenericError), #[error("Bucket quota exceeded for bucket: {}", .0.bucket)] BucketQuotaExceeded(GenericError), #[error("The replication configuration was not found: {}", .0.bucket)] BucketReplicationConfigNotFound(GenericError), #[error("Destination bucket does not exist: {}", .0.bucket)] BucketRemoteDestinationNotFound(GenericError), #[error("Destination bucket does not have object lock enabled: {}", .0.bucket)] BucketReplicationDestinationMissingLock(GenericError), #[error("Remote target not found: {}", .0.bucket)] BucketRemoteTargetNotFound(GenericError), #[error("Remote service endpoint or target bucket {} not available: {}", .0.bucket, .0.format_err())] BucketRemoteConnectionErr(GenericError), #[error("Remote already exists for this bucket: {}", .0.bucket)] BucketRemoteAlreadyExists(GenericError), #[error("Remote with this label already exists for this bucket: {}", .0.bucket)] BucketRemoteLabelInUse(GenericError), #[error("Remote ARN type not valid: {}", .0.bucket)] BucketRemoteArnTypeInvalid(GenericError), #[error("Remote ARN has invalid format: {}", .0.bucket)] BucketRemoteArnInvalid(GenericError), #[error("Replication configuration exists with this ARN: {}", .0.bucket)] BucketRemoteRemoveDisallowed(GenericError), #[error("Remote target does not have versioning enabled: {}", .0.bucket)] BucketRemoteTargetNotVersioned(GenericError), #[error("Replication source does not have versioning enabled: {}", .0.bucket)] BucketReplicationSourceNotVersioned(GenericError), #[error("Transition storage class not found")] TransitionStorageClassNotFound(GenericError), #[error("The operation is not valid for the current state of the object {0}")] InvalidObjectState(GenericError), #[error("Bucket name invalid: {}", .0.bucket)] BucketNameInvalid(GenericError), #[error("Object name invalid: {}/{}", .0.bucket, .0.object)] ObjectNameInvalid(GenericError), #[error("Object name too long: {}/{}", .0.bucket, .0.object)] ObjectNameTooLong(GenericError), #[error("Object name contains forward slash as pefix: {}/{}", .0.bucket, .0.object)] ObjectNamePrefixAsSlash(GenericError), #[error("All access to this object has been disabled")] AllAccessDisabled(GenericError), #[error("{}/{} has incomplete body", .0.bucket, .0.object)] IncompleteBody(GenericError), #[error("The requested range \"bytes {offset_begin} -> {offset_end} of {resource_size}\" is not satisfiable.")] InvalidRange { offset_begin: usize, offset_end: usize, resource_size: usize, }, #[error("Size of the object greater than what is allowed(5G)")] ObjectTooLarge(GenericError), #[error("Size of the object less than what is expected")] ObjectTooSmall(GenericError), #[error("Operation timed out")] OperationTimedOut, #[error("Malformed upload id {upload_id}")] MalformedUploadID { upload_id: String }, #[error("Invalid upload id {upload_id}")] InvalidUploadID { bucket: String, object: String, upload_id: String, }, #[error("Specified part could not be found. PART_NUMBER {part_number}, Expected {exp_etag}, got {got_etag}")] InvalidPart { part_number: String, exp_etag: String, got_etag: String, }, #[error("Part size for {part_number} should be at least 5MB")] PartTooSmall { part_size: usize, part_number: usize, part_etag: String, }, #[error("Part size bigger than the allowed limit")] PartTooBig, #[error("ETag of the object has changed")] InvalidEtag, #[error("{message}")] NotImplemented { message: String }, #[error("Unsupported headers in Metadata")] UnsupportedMetadata, #[error("Backend down")] BackendDown, #[error("At least one of the pre-conditions you specified did not hold")] PreConditionFailed, } pub fn is_bucket_not_found(err: &anyhow::Error) -> bool { match err.as_error::<ApiError>() { Some(e) => { if let ApiError::BucketNotFound(_) = e { return true; } } _ => {} } false } pub fn is_object_not_found(err: &anyhow::Error) -> bool { match err.as_error::<ApiError>() { Some(e) => { if let ApiError::ObjectNotFound(_) = e { return true; } } _ => {} } false } pub fn
(err: &anyhow::Error) -> bool { match err.as_error::<ApiError>() { Some(e) => { if let ApiError::VersionNotFound(_) = e { return true; } } _ => {} } false } pub fn is_signature_not_match(err: &anyhow::Error) -> bool { match err.as_error::<ApiError>() { Some(e) => { if let ApiError::SignatureDoesNotMatch = e { return true; } } _ => {} } false }
is_version_not_found
ip.go
package ip import ( "fmt" "io/ioutil" "net" "net/http"
//go:generate mockgen -destination=mocks/http_client_mock.go -package=mocks github.com/omegion/go-ddclient/internal/ip HTTPClient // HTTPClient is an interface for http client. type HTTPClient interface { Get(url string) (resp *http.Response, err error) } // IP is struct for IP address. type IP struct { Client HTTPClient Provider Provider Address net.IP } // Check checks IP address with given provider. func (i *IP) Check() error { resp, err := i.Client.Get(i.Provider.GetURL().String()) if err != nil { return err } data, err := ioutil.ReadAll(resp.Body) if err != nil { return err } log.Debugln(fmt.Sprintf("IP provider %s called successfully.", i.Provider.GetName())) defer resp.Body.Close() i.Address, err = i.Provider.ExtractIP(data) if err != nil { return err } log.Debugln(fmt.Sprintf("IP provider %s returned IP %s.", i.Provider.GetName(), i.Address.String())) return nil }
log "github.com/sirupsen/logrus" ) //nolint:lll // go generate is ugly.
pprust.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::AnnNode::*; use abi; use ast::{self, TokenTree}; use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; use ast::Attribute; use attr::ThinAttributesExt; use ast_util; use util::parser::AssocOp; use attr; use owned_slice::OwnedSlice; use attr::{AttrMetaMethods, AttributeMethods}; use codemap::{self, CodeMap, BytePos}; use diagnostic; use parse::token::{self, BinOpToken, Token, InternedString}; use parse::lexer::comments; use parse; use print::pp::{self, break_offset, word, space, zerobreak, hardbreak}; use print::pp::{Breaks, eof}; use print::pp::Breaks::{Consistent, Inconsistent}; use ptr::P; use std_inject; use std::ascii; use std::io::{self, Write, Read}; use std::iter; pub enum AnnNode<'a> { NodeIdent(&'a ast::Ident), NodeName(&'a ast::Name), NodeBlock(&'a ast::Block), NodeItem(&'a ast::Item), NodeSubItem(ast::NodeId), NodeExpr(&'a ast::Expr), NodePat(&'a ast::Pat), } pub trait PpAnn { fn pre(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) } fn post(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) } } #[derive(Copy, Clone)] pub struct NoAnn; impl PpAnn for NoAnn {} #[derive(Copy, Clone)] pub struct CurrentCommentAndLiteral { pub cur_cmnt: usize, pub cur_lit: usize, } pub struct State<'a> { pub s: pp::Printer<'a>, cm: Option<&'a CodeMap>, comments: Option<Vec<comments::Comment> >, literals: Option<Vec<comments::Literal> >, cur_cmnt_and_lit: CurrentCommentAndLiteral, boxes: Vec<pp::Breaks>, ann: &'a (PpAnn+'a), } pub fn rust_printer<'a>(writer: Box<Write+'a>) -> State<'a> { static NO_ANN: NoAnn = NoAnn; rust_printer_annotated(writer, &NO_ANN) } pub fn rust_printer_annotated<'a>(writer: Box<Write+'a>, ann: &'a PpAnn) -> State<'a> { State { s: pp::mk_printer(writer, DEFAULT_COLUMNS), cm: None, comments: None, literals: None, cur_cmnt_and_lit: CurrentCommentAndLiteral { cur_cmnt: 0, cur_lit: 0 }, boxes: Vec::new(), ann: ann, } } pub const INDENT_UNIT: usize = 4; pub const DEFAULT_COLUMNS: usize = 78; /// Requires you to pass an input filename and reader so that /// it can scan the input text for comments and literals to /// copy forward. pub fn print_crate<'a>(cm: &'a CodeMap, span_diagnostic: &diagnostic::SpanHandler, krate: &ast::Crate, filename: String, input: &mut Read, out: Box<Write+'a>, ann: &'a PpAnn, is_expanded: bool) -> io::Result<()> { let mut s = State::new_from_input(cm, span_diagnostic, filename, input, out, ann, is_expanded); if is_expanded && !std_inject::no_std(krate) { // We need to print `#![no_std]` (and its feature gate) so that // compiling pretty-printed source won't inject libstd again. // However we don't want these attributes in the AST because // of the feature gate, so we fake them up here. // #![feature(prelude_import)] let prelude_import_meta = attr::mk_word_item(InternedString::new("prelude_import")); let list = attr::mk_list_item(InternedString::new("feature"), vec![prelude_import_meta]); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), list); try!(s.print_attribute(&fake_attr)); // #![no_std] let no_std_meta = attr::mk_word_item(InternedString::new("no_std")); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), no_std_meta); try!(s.print_attribute(&fake_attr)); } try!(s.print_mod(&krate.module, &krate.attrs)); try!(s.print_remaining_comments()); eof(&mut s.s) } impl<'a> State<'a> { pub fn new_from_input(cm: &'a CodeMap, span_diagnostic: &diagnostic::SpanHandler, filename: String, input: &mut Read, out: Box<Write+'a>, ann: &'a PpAnn, is_expanded: bool) -> State<'a> { let (cmnts, lits) = comments::gather_comments_and_literals( span_diagnostic, filename, input); State::new( cm, out, ann, Some(cmnts), // If the code is post expansion, don't use the table of // literals, since it doesn't correspond with the literals // in the AST anymore. if is_expanded { None } else { Some(lits) }) } pub fn new(cm: &'a CodeMap, out: Box<Write+'a>, ann: &'a PpAnn, comments: Option<Vec<comments::Comment>>, literals: Option<Vec<comments::Literal>>) -> State<'a> { State { s: pp::mk_printer(out, DEFAULT_COLUMNS), cm: Some(cm), comments: comments, literals: literals, cur_cmnt_and_lit: CurrentCommentAndLiteral { cur_cmnt: 0, cur_lit: 0 }, boxes: Vec::new(), ann: ann, } } } pub fn to_string<F>(f: F) -> String where F: FnOnce(&mut State) -> io::Result<()>, { let mut wr = Vec::new(); { let mut printer = rust_printer(Box::new(&mut wr)); f(&mut printer).unwrap(); eof(&mut printer.s).unwrap(); } String::from_utf8(wr).unwrap() } pub fn binop_to_string(op: BinOpToken) -> &'static str { match op { token::Plus => "+", token::Minus => "-", token::Star => "*", token::Slash => "/", token::Percent => "%", token::Caret => "^", token::And => "&", token::Or => "|", token::Shl => "<<", token::Shr => ">>", } } pub fn token_to_string(tok: &Token) -> String { match *tok { token::Eq => "=".to_string(), token::Lt => "<".to_string(), token::Le => "<=".to_string(), token::EqEq => "==".to_string(), token::Ne => "!=".to_string(), token::Ge => ">=".to_string(), token::Gt => ">".to_string(), token::Not => "!".to_string(), token::Tilde => "~".to_string(), token::OrOr => "||".to_string(), token::AndAnd => "&&".to_string(), token::BinOp(op) => binop_to_string(op).to_string(), token::BinOpEq(op) => format!("{}=", binop_to_string(op)), /* Structural symbols */ token::At => "@".to_string(), token::Dot => ".".to_string(), token::DotDot => "..".to_string(), token::DotDotDot => "...".to_string(), token::Comma => ",".to_string(), token::Semi => ";".to_string(), token::Colon => ":".to_string(), token::ModSep => "::".to_string(), token::RArrow => "->".to_string(), token::LArrow => "<-".to_string(), token::FatArrow => "=>".to_string(), token::OpenDelim(token::Paren) => "(".to_string(), token::CloseDelim(token::Paren) => ")".to_string(), token::OpenDelim(token::Bracket) => "[".to_string(), token::CloseDelim(token::Bracket) => "]".to_string(), token::OpenDelim(token::Brace) => "{".to_string(), token::CloseDelim(token::Brace) => "}".to_string(), token::Pound => "#".to_string(), token::Dollar => "$".to_string(), token::Question => "?".to_string(), /* Literals */ token::Literal(lit, suf) => { let mut out = match lit { token::Byte(b) => format!("b'{}'", b), token::Char(c) => format!("'{}'", c), token::Float(c) => c.to_string(), token::Integer(c) => c.to_string(), token::Str_(s) => format!("\"{}\"", s), token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}", delim=repeat("#", n), string=s), token::ByteStr(v) => format!("b\"{}\"", v), token::ByteStrRaw(s, n) => format!("br{delim}\"{string}\"{delim}", delim=repeat("#", n), string=s), }; if let Some(s) = suf { out.push_str(&s.as_str()) } out } /* Name components */ token::Ident(s, _) => s.to_string(), token::Lifetime(s) => s.to_string(), token::Underscore => "_".to_string(), /* Other */ token::DocComment(s) => s.to_string(), token::SubstNt(s, _) => format!("${}", s), token::MatchNt(s, t, _, _) => format!("${}:{}", s, t), token::Eof => "<eof>".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), token::SpecialVarNt(var) => format!("${}", var.as_str()), token::Interpolated(ref nt) => match *nt { token::NtExpr(ref e) => expr_to_string(&**e), token::NtMeta(ref e) => meta_item_to_string(&**e), token::NtTy(ref e) => ty_to_string(&**e), token::NtPath(ref e) => path_to_string(&**e), token::NtItem(ref e) => item_to_string(&**e), token::NtBlock(ref e) => block_to_string(&**e), token::NtStmt(ref e) => stmt_to_string(&**e), token::NtPat(ref e) => pat_to_string(&**e), token::NtIdent(ref e, _) => ident_to_string(**e), token::NtTT(ref e) => tt_to_string(&**e), token::NtArm(ref e) => arm_to_string(&*e), token::NtImplItem(ref e) => impl_item_to_string(&**e), token::NtTraitItem(ref e) => trait_item_to_string(&**e), token::NtGenerics(ref e) => generics_to_string(&*e), token::NtWhereClause(ref e) => where_clause_to_string(&*e), token::NtArg(ref e) => arg_to_string(&*e), } } } pub fn ty_to_string(ty: &ast::Ty) -> String { to_string(|s| s.print_type(ty)) } pub fn bounds_to_string(bounds: &[ast::TyParamBound]) -> String { to_string(|s| s.print_bounds("", bounds)) } pub fn pat_to_string(pat: &ast::Pat) -> String { to_string(|s| s.print_pat(pat)) } pub fn arm_to_string(arm: &ast::Arm) -> String { to_string(|s| s.print_arm(arm)) } pub fn expr_to_string(e: &ast::Expr) -> String { to_string(|s| s.print_expr(e)) } pub fn lifetime_to_string(e: &ast::Lifetime) -> String { to_string(|s| s.print_lifetime(e)) } pub fn tt_to_string(tt: &ast::TokenTree) -> String { to_string(|s| s.print_tt(tt)) } pub fn tts_to_string(tts: &[ast::TokenTree]) -> String { to_string(|s| s.print_tts(tts)) } pub fn stmt_to_string(stmt: &ast::Stmt) -> String { to_string(|s| s.print_stmt(stmt)) } pub fn attr_to_string(attr: &ast::Attribute) -> String { to_string(|s| s.print_attribute(attr)) } pub fn item_to_string(i: &ast::Item) -> String { to_string(|s| s.print_item(i)) } pub fn impl_item_to_string(i: &ast::ImplItem) -> String { to_string(|s| s.print_impl_item(i)) } pub fn trait_item_to_string(i: &ast::TraitItem) -> String { to_string(|s| s.print_trait_item(i)) } pub fn generics_to_string(generics: &ast::Generics) -> String { to_string(|s| s.print_generics(generics)) } pub fn where_clause_to_string(i: &ast::WhereClause) -> String { to_string(|s| s.print_where_clause(i)) } pub fn fn_block_to_string(p: &ast::FnDecl) -> String { to_string(|s| s.print_fn_block_args(p)) } pub fn path_to_string(p: &ast::Path) -> String { to_string(|s| s.print_path(p, false, 0)) } pub fn ident_to_string(id: ast::Ident) -> String { to_string(|s| s.print_ident(id)) } pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, constness: ast::Constness, name: ast::Ident, opt_explicit_self: Option<&ast::ExplicitSelf_>, generics: &ast::Generics) -> String { to_string(|s| { try!(s.head("")); try!(s.print_fn(decl, unsafety, constness, abi::Rust, Some(name), generics, opt_explicit_self, ast::Inherited)); try!(s.end()); // Close the head box s.end() // Close the outer box }) } pub fn block_to_string(blk: &ast::Block) -> String { to_string(|s| { // containing cbox, will be closed by print-block at } try!(s.cbox(INDENT_UNIT)); // head-ibox, will be closed by print-block after { try!(s.ibox(0)); s.print_block(blk) }) } pub fn meta_item_to_string(mi: &ast::MetaItem) -> String { to_string(|s| s.print_meta_item(mi)) } pub fn attribute_to_string(attr: &ast::Attribute) -> String { to_string(|s| s.print_attribute(attr)) } pub fn lit_to_string(l: &ast::Lit) -> String { to_string(|s| s.print_literal(l)) } pub fn explicit_self_to_string(explicit_self: &ast::ExplicitSelf_) -> String { to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) } pub fn variant_to_string(var: &ast::Variant) -> String { to_string(|s| s.print_variant(var)) } pub fn arg_to_string(arg: &ast::Arg) -> String { to_string(|s| s.print_arg(arg)) } pub fn mac_to_string(arg: &ast::Mac) -> String { to_string(|s| s.print_mac(arg, ::parse::token::Paren)) } pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> String { match vis { ast::Public => format!("pub {}", s), ast::Inherited => s.to_string() } } fn needs_parentheses(expr: &ast::Expr) -> bool { match expr.node { ast::ExprAssign(..) | ast::ExprBinary(..) | ast::ExprClosure(..) | ast::ExprAssignOp(..) | ast::ExprCast(..) | ast::ExprInPlace(..) => true, _ => false, } } pub trait PrintState<'a> { fn writer(&mut self) -> &mut pp::Printer<'a>; fn boxes(&mut self) -> &mut Vec<pp::Breaks>; fn comments(&mut self) -> &mut Option<Vec<comments::Comment>>; fn cur_cmnt_and_lit(&mut self) -> &mut CurrentCommentAndLiteral; fn literals(&self) -> &Option<Vec<comments::Literal>>; fn word_space(&mut self, w: &str) -> io::Result<()> { try!(word(self.writer(), w)); space(self.writer()) } fn popen(&mut self) -> io::Result<()> { word(self.writer(), "(") } fn pclose(&mut self) -> io::Result<()> { word(self.writer(), ")") } fn is_begin(&mut self) -> bool { match self.writer().last_token() { pp::Token::Begin(_) => true, _ => false, } } fn is_end(&mut self) -> bool { match self.writer().last_token() { pp::Token::End => true, _ => false, } } // is this the beginning of a line? fn is_bol(&mut self) -> bool { self.writer().last_token().is_eof() || self.writer().last_token().is_hardbreak_tok() } fn hardbreak_if_not_bol(&mut self) -> io::Result<()> { if !self.is_bol() { try!(hardbreak(self.writer())) } Ok(()) } // "raw box" fn rbox(&mut self, u: usize, b: pp::Breaks) -> io::Result<()> { self.boxes().push(b); pp::rbox(self.writer(), u, b) } fn ibox(&mut self, u: usize) -> io::Result<()> { self.boxes().push(pp::Breaks::Inconsistent); pp::ibox(self.writer(), u) } fn end(&mut self) -> io::Result<()> { self.boxes().pop().unwrap(); pp::end(self.writer()) } fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> io::Result<()> where F: FnMut(&mut Self, &T) -> io::Result<()>, { try!(self.rbox(0, b)); let mut first = true; for elt in elts { if first { first = false; } else { try!(self.word_space(",")); } try!(op(self, elt)); } self.end() } fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> { let mut cur_lit = self.cur_cmnt_and_lit().cur_lit; let mut result = None; if let &Some(ref lits) = self.literals() { while cur_lit < lits.len() { let ltrl = (*lits)[cur_lit].clone(); if ltrl.pos > pos { break; } cur_lit += 1; if ltrl.pos == pos { result = Some(ltrl); break; } } } self.cur_cmnt_and_lit().cur_lit = cur_lit; result } fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> { loop { match self.next_comment() { Some(ref cmnt) => { if (*cmnt).pos < pos { try!(self.print_comment(cmnt)); self.cur_cmnt_and_lit().cur_cmnt += 1; } else { break; } } _ => break } } Ok(()) } fn print_comment(&mut self, cmnt: &comments::Comment) -> io::Result<()> { match cmnt.style { comments::Mixed => { assert_eq!(cmnt.lines.len(), 1); try!(zerobreak(self.writer())); try!(word(self.writer(), &cmnt.lines[0])); zerobreak(self.writer()) } comments::Isolated => { try!(self.hardbreak_if_not_bol()); for line in &cmnt.lines { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { try!(word(self.writer(), &line[..])); } try!(hardbreak(self.writer())); } Ok(()) } comments::Trailing => { try!(word(self.writer(), " ")); if cmnt.lines.len() == 1 { try!(word(self.writer(), &cmnt.lines[0])); hardbreak(self.writer()) } else { try!(self.ibox(0)); for line in &cmnt.lines { if !line.is_empty() { try!(word(self.writer(), &line[..])); } try!(hardbreak(self.writer())); } self.end() } } comments::BlankLine => { // We need to do at least one, possibly two hardbreaks. let is_semi = match self.writer().last_token() { pp::Token::String(s, _) => ";" == s, _ => false }; if is_semi || self.is_begin() || self.is_end() { try!(hardbreak(self.writer())); } hardbreak(self.writer()) } } } fn next_comment(&mut self) -> Option<comments::Comment> { let cur_cmnt = self.cur_cmnt_and_lit().cur_cmnt; match *self.comments() { Some(ref cmnts) => { if cur_cmnt < cmnts.len() { Some(cmnts[cur_cmnt].clone()) } else { None } } _ => None } } fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> { try!(self.maybe_print_comment(lit.span.lo)); match self.next_lit(lit.span.lo) { Some(ref ltrl) => { return word(self.writer(), &(*ltrl).lit); } _ => () } match lit.node { ast::LitStr(ref st, style) => self.print_string(&st, style), ast::LitByte(byte) => { let mut res = String::from("b'"); res.extend(ascii::escape_default(byte).map(|c| c as char)); res.push('\''); word(self.writer(), &res[..]) } ast::LitChar(ch) => { let mut res = String::from("'"); res.extend(ch.escape_default()); res.push('\''); word(self.writer(), &res[..]) } ast::LitInt(i, t) => { match t { ast::SignedIntLit(st, ast::Plus) => { word(self.writer(), &ast_util::int_val_to_string(st, i as i64)) } ast::SignedIntLit(st, ast::Minus) => { let istr = ast_util::int_val_to_string(st, -(i as i64)); word(self.writer(), &format!("-{}", istr)) } ast::UnsignedIntLit(ut) => { word(self.writer(), &ast_util::uint_val_to_string(ut, i)) } ast::UnsuffixedIntLit(ast::Plus) => { word(self.writer(), &format!("{}", i)) } ast::UnsuffixedIntLit(ast::Minus) => { word(self.writer(), &format!("-{}", i)) } } } ast::LitFloat(ref f, t) => { word(self.writer(), &format!( "{}{}", &f, &ast_util::float_ty_to_string(t))) } ast::LitFloatUnsuffixed(ref f) => word(self.writer(), &f[..]), ast::LitBool(val) => { if val { word(self.writer(), "true") } else { word(self.writer(), "false") } } ast::LitByteStr(ref v) => { let mut escaped: String = String::new(); for &ch in v.iter() { escaped.extend(ascii::escape_default(ch) .map(|c| c as char)); } word(self.writer(), &format!("b\"{}\"", escaped)) } } } fn print_string(&mut self, st: &str, style: ast::StrStyle) -> io::Result<()> { let st = match style { ast::CookedStr => { (format!("\"{}\"", st.escape_default())) } ast::RawStr(n) => { (format!("r{delim}\"{string}\"{delim}", delim=repeat("#", n), string=st)) } }; word(self.writer(), &st[..]) } fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true) } fn print_inner_attributes_no_trailing_hardbreak(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, false) } fn print_outer_attributes(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Outer, false, true) } fn print_inner_attributes_inline(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Inner, true, true) } fn print_outer_attributes_inline(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Outer, true, true) } fn print_either_attributes(&mut self, attrs: &[ast::Attribute], kind: ast::AttrStyle, is_inline: bool, trailing_hardbreak: bool) -> io::Result<()> { let mut count = 0; for attr in attrs { if attr.node.style == kind { try!(self.print_attribute_inline(attr, is_inline)); if is_inline { try!(self.nbsp()); } count += 1; } } if count > 0 && trailing_hardbreak && !is_inline { try!(self.hardbreak_if_not_bol()); } Ok(()) } fn print_attribute(&mut self, attr: &ast::Attribute) -> io::Result<()> { self.print_attribute_inline(attr, false) } fn print_attribute_inline(&mut self, attr: &ast::Attribute, is_inline: bool) -> io::Result<()> { if !is_inline { try!(self.hardbreak_if_not_bol()); } try!(self.maybe_print_comment(attr.span.lo)); if attr.node.is_sugared_doc { word(self.writer(), &attr.value_str().unwrap()) } else { match attr.node.style { ast::AttrStyle::Inner => try!(word(self.writer(), "#![")), ast::AttrStyle::Outer => try!(word(self.writer(), "#[")), } try!(self.print_meta_item(&*attr.meta())); word(self.writer(), "]") } } fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> { try!(self.ibox(INDENT_UNIT)); match item.node { ast::MetaWord(ref name) => { try!(word(self.writer(), &name)); } ast::MetaNameValue(ref name, ref value) => { try!(self.word_space(&name[..])); try!(self.word_space("=")); try!(self.print_literal(value)); } ast::MetaList(ref name, ref items) => { try!(word(self.writer(), &name)); try!(self.popen()); try!(self.commasep(Consistent, &items[..], |s, i| s.print_meta_item(&**i))); try!(self.pclose()); } } self.end() } fn space_if_not_bol(&mut self) -> io::Result<()> { if !self.is_bol() { try!(space(self.writer())); } Ok(()) } fn nbsp(&mut self) -> io::Result<()> { word(self.writer(), " ") } } impl<'a> PrintState<'a> for State<'a> { fn writer(&mut self) -> &mut pp::Printer<'a> { &mut self.s } fn boxes(&mut self) -> &mut Vec<pp::Breaks> { &mut self.boxes } fn comments(&mut self) -> &mut Option<Vec<comments::Comment>> { &mut self.comments } fn cur_cmnt_and_lit(&mut self) -> &mut CurrentCommentAndLiteral { &mut self.cur_cmnt_and_lit } fn literals(&self) -> &Option<Vec<comments::Literal>> { &self.literals } } impl<'a> State<'a> { pub fn cbox(&mut self, u: usize) -> io::Result<()> { self.boxes.push(pp::Breaks::Consistent); pp::cbox(&mut self.s, u) } pub fn word_nbsp(&mut self, w: &str) -> io::Result<()> { try!(word(&mut self.s, w)); self.nbsp() } pub fn head(&mut self, w: &str) -> io::Result<()> { // outer-box is consistent try!(self.cbox(INDENT_UNIT)); // head-box is inconsistent try!(self.ibox(w.len() + 1)); // keyword that starts the head if !w.is_empty() { try!(self.word_nbsp(w)); } Ok(()) } pub fn bopen(&mut self) -> io::Result<()> { try!(word(&mut self.s, "{")); self.end() // close the head-box } pub fn bclose_(&mut self, span: codemap::Span, indented: usize) -> io::Result<()> { self.bclose_maybe_open(span, indented, true) } pub fn bclose_maybe_open (&mut self, span: codemap::Span, indented: usize, close_box: bool) -> io::Result<()> { try!(self.maybe_print_comment(span.hi)); try!(self.break_offset_if_not_bol(1, -(indented as isize))); try!(word(&mut self.s, "}")); if close_box { try!(self.end()); // close the outer-box } Ok(()) } pub fn bclose(&mut self, span: codemap::Span) -> io::Result<()> { self.bclose_(span, INDENT_UNIT) } pub fn in_cbox(&self) -> bool { match self.boxes.last() { Some(&last_box) => last_box == pp::Breaks::Consistent, None => false } } pub fn break_offset_if_not_bol(&mut self, n: usize, off: isize) -> io::Result<()> { if !self.is_bol() { break_offset(&mut self.s, n, off) } else { if off != 0 && self.s.last_token().is_hardbreak_tok() { // We do something pretty sketchy here: tuck the nonzero // offset-adjustment we were going to deposit along with the // break into the previous hardbreak. self.s.replace_last_token(pp::hardbreak_tok_offset(off)); } Ok(()) } } // Synthesizes a comment that was not textually present in the original source // file. pub fn synth_comment(&mut self, text: String) -> io::Result<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); try!(word(&mut self.s, &text[..])); try!(space(&mut self.s)); word(&mut self.s, "*/") } pub fn commasep_cmnt<T, F, G>(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) -> io::Result<()> where F: FnMut(&mut State, &T) -> io::Result<()>, G: FnMut(&T) -> codemap::Span, { try!(self.rbox(0, b)); let len = elts.len(); let mut i = 0; for elt in elts { try!(self.maybe_print_comment(get_span(elt).hi)); try!(op(self, elt)); i += 1; if i < len { try!(word(&mut self.s, ",")); try!(self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi))); try!(self.space_if_not_bol()); } } self.end() } pub fn commasep_exprs(&mut self, b: Breaks, exprs: &[P<ast::Expr>]) -> io::Result<()> { self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&**e), |e| e.span) } pub fn print_mod(&mut self, _mod: &ast::Mod, attrs: &[ast::Attribute]) -> io::Result<()> { try!(self.print_inner_attributes(attrs)); for item in &_mod.items { try!(self.print_item(&**item)); } Ok(()) } pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod, attrs: &[ast::Attribute]) -> io::Result<()> { try!(self.print_inner_attributes(attrs)); for item in &nmod.items { try!(self.print_foreign_item(&**item)); } Ok(()) } pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) -> io::Result<()> { if let Some(l) = *lifetime { try!(self.print_lifetime(&l)); try!(self.nbsp()); } Ok(()) } pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> { try!(self.maybe_print_comment(ty.span.lo)); try!(self.ibox(0)); match ty.node { ast::TyVec(ref ty) => { try!(word(&mut self.s, "[")); try!(self.print_type(&**ty)); try!(word(&mut self.s, "]")); } ast::TyPtr(ref mt) => { try!(word(&mut self.s, "*")); match mt.mutbl { ast::MutMutable => try!(self.word_nbsp("mut")), ast::MutImmutable => try!(self.word_nbsp("const")), } try!(self.print_type(&*mt.ty)); } ast::TyRptr(ref lifetime, ref mt) => { try!(word(&mut self.s, "&")); try!(self.print_opt_lifetime(lifetime)); try!(self.print_mt(mt)); } ast::TyTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(&**ty))); if elts.len() == 1 { try!(word(&mut self.s, ",")); } try!(self.pclose()); } ast::TyParen(ref typ) => { try!(self.popen()); try!(self.print_type(&**typ)); try!(self.pclose()); } ast::TyBareFn(ref f) => { let generics = ast::Generics { lifetimes: f.lifetimes.clone(), ty_params: OwnedSlice::empty(), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), }, }; try!(self.print_ty_fn(f.abi, f.unsafety, &*f.decl, None, &generics, None)); } ast::TyPath(None, ref path) => { try!(self.print_path(path, false, 0)); } ast::TyPath(Some(ref qself), ref path) => { try!(self.print_qpath(path, qself, false)) } ast::TyObjectSum(ref ty, ref bounds) => { try!(self.print_type(&**ty)); try!(self.print_bounds("+", &bounds[..])); } ast::TyPolyTraitRef(ref bounds) => { try!(self.print_bounds("", &bounds[..])); } ast::TyFixedLengthVec(ref ty, ref v) => { try!(word(&mut self.s, "[")); try!(self.print_type(&**ty)); try!(word(&mut self.s, "; ")); try!(self.print_expr(&**v)); try!(word(&mut self.s, "]")); } ast::TyTypeof(ref e) => { try!(word(&mut self.s, "typeof(")); try!(self.print_expr(&**e)); try!(word(&mut self.s, ")")); } ast::TyInfer => { try!(word(&mut self.s, "_")); } ast::TyMac(ref m) => { try!(self.print_mac(m, token::Paren)); } } self.end() } pub fn print_foreign_item(&mut self, item: &ast::ForeignItem) -> io::Result<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); try!(self.print_outer_attributes(&item.attrs)); match item.node { ast::ForeignItemFn(ref decl, ref generics) => { try!(self.head("")); try!(self.print_fn(decl, ast::Unsafety::Normal, ast::Constness::NotConst, abi::Rust, Some(item.ident), generics, None, item.vis)); try!(self.end()); // end head-ibox try!(word(&mut self.s, ";")); self.end() // end the outer fn box } ast::ForeignItemStatic(ref t, m) => { try!(self.head(&visibility_qualified(item.vis, "static"))); if m { try!(self.word_space("mut")); } try!(self.print_ident(item.ident)); try!(self.word_space(":")); try!(self.print_type(&**t)); try!(word(&mut self.s, ";")); try!(self.end()); // end the head-ibox self.end() // end the outer cbox } } } fn print_associated_const(&mut self, ident: ast::Ident, ty: &ast::Ty, default: Option<&ast::Expr>, vis: ast::Visibility) -> io::Result<()> { try!(word(&mut self.s, &visibility_qualified(vis, ""))); try!(self.word_space("const")); try!(self.print_ident(ident)); try!(self.word_space(":")); try!(self.print_type(ty)); if let Some(expr) = default { try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_expr(expr)); } word(&mut self.s, ";") } fn print_associated_type(&mut self, ident: ast::Ident, bounds: Option<&ast::TyParamBounds>, ty: Option<&ast::Ty>) -> io::Result<()> { try!(self.word_space("type")); try!(self.print_ident(ident)); if let Some(bounds) = bounds { try!(self.print_bounds(":", bounds)); } if let Some(ty) = ty { try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_type(ty)); } word(&mut self.s, ";") } /// Pretty-print an item pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); try!(self.print_outer_attributes(&item.attrs)); try!(self.ann.pre(self, NodeItem(item))); match item.node { ast::ItemExternCrate(ref optional_path) => { try!(self.head(&visibility_qualified(item.vis, "extern crate"))); if let Some(p) = *optional_path { let val = p.as_str(); if val.contains("-") { try!(self.print_string(&val, ast::CookedStr)); } else { try!(self.print_name(p)); } try!(space(&mut self.s)); try!(word(&mut self.s, "as")); try!(space(&mut self.s)); } try!(self.print_ident(item.ident)); try!(word(&mut self.s, ";")); try!(self.end()); // end inner head-block try!(self.end()); // end outer head-block } ast::ItemUse(ref vp) => { try!(self.head(&visibility_qualified(item.vis, "use"))); try!(self.print_view_path(&**vp)); try!(word(&mut self.s, ";")); try!(self.end()); // end inner head-block try!(self.end()); // end outer head-block } ast::ItemStatic(ref ty, m, ref expr) => { try!(self.head(&visibility_qualified(item.vis, "static"))); if m == ast::MutMutable { try!(self.word_space("mut")); } try!(self.print_ident(item.ident)); try!(self.word_space(":")); try!(self.print_type(&**ty)); try!(space(&mut self.s)); try!(self.end()); // end the head-ibox try!(self.word_space("=")); try!(self.print_expr(&**expr)); try!(word(&mut self.s, ";")); try!(self.end()); // end the outer cbox } ast::ItemConst(ref ty, ref expr) => { try!(self.head(&visibility_qualified(item.vis, "const"))); try!(self.print_ident(item.ident)); try!(self.word_space(":")); try!(self.print_type(&**ty)); try!(space(&mut self.s)); try!(self.end()); // end the head-ibox try!(self.word_space("=")); try!(self.print_expr(&**expr)); try!(word(&mut self.s, ";")); try!(self.end()); // end the outer cbox } ast::ItemFn(ref decl, unsafety, constness, abi, ref typarams, ref body) => { try!(self.head("")); try!(self.print_fn( decl, unsafety, constness, abi, Some(item.ident), typarams, None, item.vis )); try!(word(&mut self.s, " ")); try!(self.print_block_with_attrs(&**body, &item.attrs)); } ast::ItemMod(ref _mod) => { try!(self.head(&visibility_qualified(item.vis, "mod"))); try!(self.print_ident(item.ident)); try!(self.nbsp()); try!(self.bopen()); try!(self.print_mod(_mod, &item.attrs)); try!(self.bclose(item.span)); } ast::ItemForeignMod(ref nmod) => { try!(self.head("extern")); try!(self.word_nbsp(&nmod.abi.to_string())); try!(self.bopen()); try!(self.print_foreign_mod(nmod, &item.attrs)); try!(self.bclose(item.span)); } ast::ItemTy(ref ty, ref params) => { try!(self.ibox(INDENT_UNIT)); try!(self.ibox(0)); try!(self.word_nbsp(&visibility_qualified(item.vis, "type"))); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); try!(self.end()); // end the inner ibox try!(self.print_where_clause(&params.where_clause)); try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_type(&**ty)); try!(word(&mut self.s, ";")); try!(self.end()); // end the outer ibox } ast::ItemEnum(ref enum_definition, ref params) => { try!(self.print_enum_def( enum_definition, params, item.ident, item.span, item.vis )); } ast::ItemStruct(ref struct_def, ref generics) => { try!(self.head(&visibility_qualified(item.vis,"struct"))); try!(self.print_struct(&struct_def, generics, item.ident, item.span, true)); } ast::ItemDefaultImpl(unsafety, ref trait_ref) => { try!(self.head("")); try!(self.print_visibility(item.vis)); try!(self.print_unsafety(unsafety)); try!(self.word_nbsp("impl")); try!(self.print_trait_ref(trait_ref)); try!(space(&mut self.s)); try!(self.word_space("for")); try!(self.word_space("..")); try!(self.bopen()); try!(self.bclose(item.span)); } ast::ItemImpl(unsafety, polarity, ref generics, ref opt_trait, ref ty, ref impl_items) => { try!(self.head("")); try!(self.print_visibility(item.vis)); try!(self.print_unsafety(unsafety)); try!(self.word_nbsp("impl")); if generics.is_parameterized() { try!(self.print_generics(generics)); try!(space(&mut self.s)); } match polarity { ast::ImplPolarity::Negative => { try!(word(&mut self.s, "!")); }, _ => {} } match *opt_trait { Some(ref t) => { try!(self.print_trait_ref(t)); try!(space(&mut self.s)); try!(self.word_space("for")); } None => {} } try!(self.print_type(&**ty)); try!(self.print_where_clause(&generics.where_clause)); try!(space(&mut self.s)); try!(self.bopen()); try!(self.print_inner_attributes(&item.attrs)); for impl_item in impl_items { try!(self.print_impl_item(impl_item)); } try!(self.bclose(item.span)); } ast::ItemTrait(unsafety, ref generics, ref bounds, ref trait_items) => { try!(self.head("")); try!(self.print_visibility(item.vis)); try!(self.print_unsafety(unsafety)); try!(self.word_nbsp("trait")); try!(self.print_ident(item.ident)); try!(self.print_generics(generics)); let mut real_bounds = Vec::with_capacity(bounds.len()); for b in bounds.iter() { if let TraitTyParamBound(ref ptr, ast::TraitBoundModifier::Maybe) = *b { try!(space(&mut self.s)); try!(self.word_space("for ?")); try!(self.print_trait_ref(&ptr.trait_ref)); } else { real_bounds.push(b.clone()); } } try!(self.print_bounds(":", &real_bounds[..])); try!(self.print_where_clause(&generics.where_clause)); try!(word(&mut self.s, " ")); try!(self.bopen()); for trait_item in trait_items { try!(self.print_trait_item(trait_item)); } try!(self.bclose(item.span)); } ast::ItemMac(codemap::Spanned { ref node, .. }) => { try!(self.print_visibility(item.vis)); try!(self.print_path(&node.path, false, 0)); try!(word(&mut self.s, "! ")); try!(self.print_ident(item.ident)); try!(self.cbox(INDENT_UNIT)); try!(self.popen()); try!(self.print_tts(&node.tts[..])); try!(self.pclose()); try!(word(&mut self.s, ";")); try!(self.end()); } } self.ann.post(self, NodeItem(item)) } fn print_trait_ref(&mut self, t: &ast::TraitRef) -> io::Result<()> { self.print_path(&t.path, false, 0) } fn print_formal_lifetime_list(&mut self, lifetimes: &[ast::LifetimeDef]) -> io::Result<()> { if !lifetimes.is_empty() { try!(word(&mut self.s, "for<")); let mut comma = false; for lifetime_def in lifetimes { if comma { try!(self.word_space(",")) } try!(self.print_lifetime_def(lifetime_def)); comma = true; } try!(word(&mut self.s, ">")); } Ok(()) } fn print_poly_trait_ref(&mut self, t: &ast::PolyTraitRef) -> io::Result<()> { try!(self.print_formal_lifetime_list(&t.bound_lifetimes)); self.print_trait_ref(&t.trait_ref) } pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef, generics: &ast::Generics, ident: ast::Ident, span: codemap::Span, visibility: ast::Visibility) -> io::Result<()> { try!(self.head(&visibility_qualified(visibility, "enum"))); try!(self.print_ident(ident)); try!(self.print_generics(generics)); try!(self.print_where_clause(&generics.where_clause)); try!(space(&mut self.s)); self.print_variants(&enum_definition.variants, span) } pub fn print_variants(&mut self, variants: &[P<ast::Variant>], span: codemap::Span) -> io::Result<()> { try!(self.bopen()); for v in variants { try!(self.space_if_not_bol()); try!(self.maybe_print_comment(v.span.lo)); try!(self.print_outer_attributes(&v.node.attrs)); try!(self.ibox(INDENT_UNIT)); try!(self.print_variant(&**v)); try!(word(&mut self.s, ",")); try!(self.end()); try!(self.maybe_print_trailing_comment(v.span, None)); } self.bclose(span) } pub fn print_visibility(&mut self, vis: ast::Visibility) -> io::Result<()> { match vis { ast::Public => self.word_nbsp("pub"), ast::Inherited => Ok(()) } } pub fn print_struct(&mut self, struct_def: &ast::VariantData, generics: &ast::Generics, ident: ast::Ident, span: codemap::Span, print_finalizer: bool) -> io::Result<()> { try!(self.print_ident(ident)); try!(self.print_generics(generics)); if !struct_def.is_struct() { if struct_def.is_tuple() { try!(self.popen()); try!(self.commasep( Inconsistent, struct_def.fields(), |s, field| { match field.node.kind { ast::NamedField(..) => panic!("unexpected named field"), ast::UnnamedField(vis) => { try!(s.print_visibility(vis)); try!(s.maybe_print_comment(field.span.lo)); s.print_type(&*field.node.ty) } } } )); try!(self.pclose()); } try!(self.print_where_clause(&generics.where_clause)); if print_finalizer { try!(word(&mut self.s, ";")); } try!(self.end()); self.end() // close the outer-box } else { try!(self.print_where_clause(&generics.where_clause)); try!(self.nbsp()); try!(self.bopen()); try!(self.hardbreak_if_not_bol()); for field in struct_def.fields() { match field.node.kind { ast::UnnamedField(..) => panic!("unexpected unnamed field"), ast::NamedField(ident, visibility) => { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(field.span.lo)); try!(self.print_outer_attributes(&field.node.attrs)); try!(self.print_visibility(visibility)); try!(self.print_ident(ident)); try!(self.word_nbsp(":")); try!(self.print_type(&*field.node.ty)); try!(word(&mut self.s, ",")); } } } self.bclose(span) } } /// This doesn't deserve to be called "pretty" printing, but it should be /// meaning-preserving. A quick hack that might help would be to look at the /// spans embedded in the TTs to decide where to put spaces and newlines. /// But it'd be better to parse these according to the grammar of the /// appropriate macro, transcribe back into the grammar we just parsed from, /// and then pretty-print the resulting AST nodes (so, e.g., we print /// expression arguments as expressions). It can be done! I think. pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> { match *tt { TokenTree::Token(_, ref tk) => { try!(word(&mut self.s, &token_to_string(tk))); match *tk { parse::token::DocComment(..) => { hardbreak(&mut self.s) } _ => Ok(()) } } TokenTree::Delimited(_, ref delimed) => { try!(word(&mut self.s, &token_to_string(&delimed.open_token()))); try!(space(&mut self.s)); try!(self.print_tts(&delimed.tts)); try!(space(&mut self.s)); word(&mut self.s, &token_to_string(&delimed.close_token())) }, TokenTree::Sequence(_, ref seq) => { try!(word(&mut self.s, "$(")); for tt_elt in &seq.tts { try!(self.print_tt(tt_elt)); } try!(word(&mut self.s, ")")); match seq.separator { Some(ref tk) => { try!(word(&mut self.s, &token_to_string(tk))); } None => {}, } match seq.op { ast::ZeroOrMore => word(&mut self.s, "*"), ast::OneOrMore => word(&mut self.s, "+"), } } } } pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> { try!(self.ibox(0)); let mut suppress_space = false; for (i, tt) in tts.iter().enumerate() { if i != 0 && !suppress_space { try!(space(&mut self.s)); } try!(self.print_tt(tt)); // There should be no space between the module name and the following `::` in paths, // otherwise imported macros get re-parsed from crate metadata incorrectly (#20701) suppress_space = match *tt { TokenTree::Token(_, token::Ident(_, token::ModName)) | TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) | TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true, _ => false } } self.end() } pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> { try!(self.head("")); let generics = ast_util::empty_generics(); try!(self.print_struct(&v.node.data, &generics, v.node.name, v.span, false)); match v.node.disr_expr { Some(ref d) => { try!(space(&mut self.s)); try!(self.word_space("=")); self.print_expr(&**d) } _ => Ok(()) } } pub fn print_method_sig(&mut self, ident: ast::Ident, m: &ast::MethodSig, vis: ast::Visibility) -> io::Result<()> { self.print_fn(&m.decl, m.unsafety, m.constness, m.abi, Some(ident), &m.generics, Some(&m.explicit_self.node), vis) } pub fn print_trait_item(&mut self, ti: &ast::TraitItem) -> io::Result<()> { try!(self.ann.pre(self, NodeSubItem(ti.id))); try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(ti.span.lo)); try!(self.print_outer_attributes(&ti.attrs)); match ti.node { ast::ConstTraitItem(ref ty, ref default) => { try!(self.print_associated_const(ti.ident, &ty, default.as_ref().map(|expr| &**expr), ast::Inherited)); } ast::MethodTraitItem(ref sig, ref body) => { if body.is_some() { try!(self.head("")); } try!(self.print_method_sig(ti.ident, sig, ast::Inherited)); if let Some(ref body) = *body { try!(self.nbsp()); try!(self.print_block_with_attrs(body, &ti.attrs)); } else { try!(word(&mut self.s, ";")); } } ast::TypeTraitItem(ref bounds, ref default) => { try!(self.print_associated_type(ti.ident, Some(bounds), default.as_ref().map(|ty| &**ty))); } } self.ann.post(self, NodeSubItem(ti.id)) } pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> { try!(self.ann.pre(self, NodeSubItem(ii.id))); try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(ii.span.lo)); try!(self.print_outer_attributes(&ii.attrs)); match ii.node { ast::ImplItemKind::Const(ref ty, ref expr) => { try!(self.print_associated_const(ii.ident, &ty, Some(&expr), ii.vis)); } ast::ImplItemKind::Method(ref sig, ref body) => { try!(self.head("")); try!(self.print_method_sig(ii.ident, sig, ii.vis)); try!(self.nbsp()); try!(self.print_block_with_attrs(body, &ii.attrs)); } ast::ImplItemKind::Type(ref ty) => { try!(self.print_associated_type(ii.ident, None, Some(ty))); } ast::ImplItemKind::Macro(codemap::Spanned { ref node, .. }) => { // code copied from ItemMac: try!(self.print_path(&node.path, false, 0)); try!(word(&mut self.s, "! ")); try!(self.cbox(INDENT_UNIT)); try!(self.popen()); try!(self.print_tts(&node.tts[..])); try!(self.pclose()); try!(word(&mut self.s, ";")); try!(self.end()) } } self.ann.post(self, NodeSubItem(ii.id)) } pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> { try!(self.maybe_print_comment(st.span.lo)); match st.node { ast::StmtDecl(ref decl, _) => { try!(self.print_decl(&**decl)); } ast::StmtExpr(ref expr, _) => { try!(self.space_if_not_bol()); try!(self.print_expr_outer_attr_style(&**expr, false)); } ast::StmtSemi(ref expr, _) => { try!(self.space_if_not_bol()); try!(self.print_expr_outer_attr_style(&**expr, false)); try!(word(&mut self.s, ";")); } ast::StmtMac(ref mac, style, ref attrs) => { try!(self.space_if_not_bol()); try!(self.print_outer_attributes(attrs.as_attr_slice())); let delim = match style { ast::MacStmtWithBraces => token::Brace, _ => token::Paren }; try!(self.print_mac(&**mac, delim)); match style { ast::MacStmtWithBraces => {} _ => try!(word(&mut self.s, ";")), } } } if parse::classify::stmt_ends_with_semi(&st.node) { try!(word(&mut self.s, ";")); } self.maybe_print_trailing_comment(st.span, None) } pub fn print_block(&mut self, blk: &ast::Block) -> io::Result<()> { self.print_block_with_attrs(blk, &[]) } pub fn print_block_unclosed(&mut self, blk: &ast::Block) -> io::Result<()> { self.print_block_unclosed_indent(blk, INDENT_UNIT) } pub fn print_block_unclosed_with_attrs(&mut self, blk: &ast::Block, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_block_maybe_unclosed(blk, INDENT_UNIT, attrs, false) } pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block, indented: usize) -> io::Result<()> { self.print_block_maybe_unclosed(blk, indented, &[], false) } pub fn print_block_with_attrs(&mut self, blk: &ast::Block, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_block_maybe_unclosed(blk, INDENT_UNIT, attrs, true) } pub fn print_block_maybe_unclosed(&mut self, blk: &ast::Block, indented: usize, attrs: &[ast::Attribute], close_box: bool) -> io::Result<()> { match blk.rules { ast::UnsafeBlock(..) => try!(self.word_space("unsafe")), ast::DefaultBlock => () } try!(self.maybe_print_comment(blk.span.lo)); try!(self.ann.pre(self, NodeBlock(blk))); try!(self.bopen()); try!(self.print_inner_attributes(attrs)); for st in &blk.stmts { try!(self.print_stmt(&**st)); } match blk.expr { Some(ref expr) => { try!(self.space_if_not_bol()); try!(self.print_expr_outer_attr_style(&**expr, false)); try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))); } _ => () } try!(self.bclose_maybe_open(blk.span, indented, close_box)); self.ann.post(self, NodeBlock(blk)) } fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> { match els { Some(_else) => { match _else.node { // "another else-if" ast::ExprIf(ref i, ref then, ref e) => { try!(self.cbox(INDENT_UNIT - 1)); try!(self.ibox(0)); try!(word(&mut self.s, " else if ")); try!(self.print_expr(&**i)); try!(space(&mut self.s)); try!(self.print_block(&**then)); self.print_else(e.as_ref().map(|e| &**e)) } // "another else-if-let" ast::ExprIfLet(ref pat, ref expr, ref then, ref e) => { try!(self.cbox(INDENT_UNIT - 1)); try!(self.ibox(0)); try!(word(&mut self.s, " else if let ")); try!(self.print_pat(&**pat)); try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_expr(&**expr)); try!(space(&mut self.s)); try!(self.print_block(&**then)); self.print_else(e.as_ref().map(|e| &**e)) } // "final else" ast::ExprBlock(ref b) => { try!(self.cbox(INDENT_UNIT - 1)); try!(self.ibox(0)); try!(word(&mut self.s, " else ")); self.print_block(&**b) } // BLEAH, constraints would be great here _ => { panic!("print_if saw if with weird alternative"); } } } _ => Ok(()) } } pub fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block, elseopt: Option<&ast::Expr>) -> io::Result<()> { try!(self.head("if")); try!(self.print_expr(test)); try!(space(&mut self.s)); try!(self.print_block(blk)); self.print_else(elseopt) } pub fn print_if_let(&mut self, pat: &ast::Pat, expr: &ast::Expr, blk: &ast::Block, elseopt: Option<&ast::Expr>) -> io::Result<()> { try!(self.head("if let")); try!(self.print_pat(pat)); try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_expr(expr)); try!(space(&mut self.s)); try!(self.print_block(blk)); self.print_else(elseopt) } pub fn print_mac(&mut self, m: &ast::Mac, delim: token::DelimToken) -> io::Result<()> { try!(self.print_path(&m.node.path, false, 0)); try!(word(&mut self.s, "!")); match delim { token::Paren => try!(self.popen()), token::Bracket => try!(word(&mut self.s, "[")), token::Brace => { // head-ibox, will be closed by bopen() try!(self.ibox(0)); // Don't ask me why the regular bopen() does // more then just opening a brace... try!(self.bopen()) } } try!(self.print_tts(&m.node.tts)); match delim { token::Paren => self.pclose(), token::Bracket => word(&mut self.s, "]"), token::Brace => self.bclose(m.span), } } fn print_call_post(&mut self, args: &[P<ast::Expr>]) -> io::Result<()> { try!(self.popen()); try!(self.commasep_exprs(Inconsistent, args)); self.pclose() } pub fn check_expr_bin_needs_paren(&mut self, sub_expr: &ast::Expr, binop: ast::BinOp) -> bool { match sub_expr.node { ast::ExprBinary(ref sub_op, _, _) => { if AssocOp::from_ast_binop(sub_op.node).precedence() < AssocOp::from_ast_binop(binop.node).precedence() { true } else { false } } _ => true } } pub fn print_expr_maybe_paren(&mut self, expr: &ast::Expr) -> io::Result<()> { let needs_par = needs_parentheses(expr); if needs_par { try!(self.popen()); } try!(self.print_expr(expr)); if needs_par { try!(self.pclose()); } Ok(()) } fn print_expr_in_place(&mut self, place: &ast::Expr, expr: &ast::Expr) -> io::Result<()> { try!(self.print_expr_maybe_paren(place)); try!(space(&mut self.s)); try!(self.word_space("<-")); self.print_expr_maybe_paren(expr) } fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>], attrs: &[Attribute]) -> io::Result<()> { try!(self.ibox(INDENT_UNIT)); try!(word(&mut self.s, "[")); try!(self.print_inner_attributes_inline(attrs)); try!(self.commasep_exprs(Inconsistent, &exprs[..])); try!(word(&mut self.s, "]")); self.end() } fn print_expr_repeat(&mut self, element: &ast::Expr, count: &ast::Expr, attrs: &[Attribute]) -> io::Result<()> { try!(self.ibox(INDENT_UNIT)); try!(word(&mut self.s, "[")); try!(self.print_inner_attributes_inline(attrs)); try!(self.print_expr(element)); try!(self.word_space(";")); try!(self.print_expr(count)); try!(word(&mut self.s, "]")); self.end() } fn print_expr_struct(&mut self, path: &ast::Path, fields: &[ast::Field], wth: &Option<P<ast::Expr>>, attrs: &[Attribute]) -> io::Result<()> { try!(self.print_path(path, true, 0)); try!(word(&mut self.s, "{")); try!(self.print_inner_attributes_inline(attrs)); try!(self.commasep_cmnt( Consistent, &fields[..], |s, field| { try!(s.ibox(INDENT_UNIT)); try!(s.print_ident(field.ident.node)); try!(s.word_space(":")); try!(s.print_expr(&*field.expr)); s.end() }, |f| f.span)); match *wth { Some(ref expr) => { try!(self.ibox(INDENT_UNIT)); if !fields.is_empty() { try!(word(&mut self.s, ",")); try!(space(&mut self.s)); } try!(word(&mut self.s, "..")); try!(self.print_expr(&**expr)); try!(self.end()); } _ => if !fields.is_empty() { try!(word(&mut self.s, ",")) } } try!(word(&mut self.s, "}")); Ok(()) } fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>], attrs: &[Attribute]) -> io::Result<()> { try!(self.popen()); try!(self.print_inner_attributes_inline(attrs)); try!(self.commasep_exprs(Inconsistent, &exprs[..])); if exprs.len() == 1 { try!(word(&mut self.s, ",")); } self.pclose() } fn print_expr_call(&mut self, func: &ast::Expr, args: &[P<ast::Expr>]) -> io::Result<()> { try!(self.print_expr_maybe_paren(func)); self.print_call_post(args) } fn print_expr_method_call(&mut self, ident: ast::SpannedIdent, tys: &[P<ast::Ty>], args: &[P<ast::Expr>]) -> io::Result<()> { let base_args = &args[1..]; try!(self.print_expr(&*args[0])); try!(word(&mut self.s, ".")); try!(self.print_ident(ident.node)); if !tys.is_empty() { try!(word(&mut self.s, "::<")); try!(self.commasep(Inconsistent, tys, |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ">")); } self.print_call_post(base_args) } fn print_expr_binary(&mut self, op: ast::BinOp, lhs: &ast::Expr, rhs: &ast::Expr) -> io::Result<()> { if self.check_expr_bin_needs_paren(lhs, op) { try!(self.print_expr_maybe_paren(lhs)); } else { try!(self.print_expr(lhs)); } try!(space(&mut self.s)); try!(self.word_space(ast_util::binop_to_string(op.node))); if self.check_expr_bin_needs_paren(rhs, op) { self.print_expr_maybe_paren(rhs) } else { self.print_expr(rhs) } } fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr) -> io::Result<()> { try!(word(&mut self.s, ast_util::unop_to_string(op))); self.print_expr_maybe_paren(expr) } fn print_expr_addr_of(&mut self, mutability: ast::Mutability, expr: &ast::Expr) -> io::Result<()> { try!(word(&mut self.s, "&")); try!(self.print_mutability(mutability)); self.print_expr_maybe_paren(expr) } pub fn print_expr(&mut self, expr: &ast::Expr) -> io::Result<()> { self.print_expr_outer_attr_style(expr, true) } fn print_expr_outer_attr_style(&mut self, expr: &ast::Expr, is_inline: bool) -> io::Result<()> { try!(self.maybe_print_comment(expr.span.lo)); let attrs = expr.attrs.as_attr_slice(); if is_inline { try!(self.print_outer_attributes_inline(attrs)); } else { try!(self.print_outer_attributes(attrs)); } try!(self.ibox(INDENT_UNIT)); try!(self.ann.pre(self, NodeExpr(expr))); match expr.node { ast::ExprBox(ref expr) => { try!(self.word_space("box")); try!(self.print_expr(expr)); } ast::ExprInPlace(ref place, ref expr) => { try!(self.print_expr_in_place(place, expr)); } ast::ExprVec(ref exprs) => { try!(self.print_expr_vec(&exprs[..], attrs)); } ast::ExprRepeat(ref element, ref count) => { try!(self.print_expr_repeat(&**element, &**count, attrs)); } ast::ExprStruct(ref path, ref fields, ref wth) => { try!(self.print_expr_struct(path, &fields[..], wth, attrs)); } ast::ExprTup(ref exprs) => { try!(self.print_expr_tup(&exprs[..], attrs)); } ast::ExprCall(ref func, ref args) => { try!(self.print_expr_call(&**func, &args[..])); } ast::ExprMethodCall(ident, ref tys, ref args) => { try!(self.print_expr_method_call(ident, &tys[..], &args[..])); } ast::ExprBinary(op, ref lhs, ref rhs) => { try!(self.print_expr_binary(op, &**lhs, &**rhs)); } ast::ExprUnary(op, ref expr) => { try!(self.print_expr_unary(op, &**expr)); } ast::ExprAddrOf(m, ref expr) => { try!(self.print_expr_addr_of(m, &**expr)); } ast::ExprLit(ref lit) => { try!(self.print_literal(&**lit)); } ast::ExprCast(ref expr, ref ty) => { if let ast::ExprCast(..) = expr.node { try!(self.print_expr(&**expr)); } else { try!(self.print_expr_maybe_paren(&**expr)); } try!(space(&mut self.s)); try!(self.word_space("as")); try!(self.print_type(&**ty)); } ast::ExprIf(ref test, ref blk, ref elseopt) => { try!(self.print_if(&**test, &**blk, elseopt.as_ref().map(|e| &**e))); } ast::ExprIfLet(ref pat, ref expr, ref blk, ref elseopt) => { try!(self.print_if_let(&**pat, &**expr, &** blk, elseopt.as_ref().map(|e| &**e))); } ast::ExprWhile(ref test, ref blk, opt_ident) => { if let Some(ident) = opt_ident { try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("while")); try!(self.print_expr(&**test)); try!(space(&mut self.s)); try!(self.print_block_with_attrs(&**blk, attrs)); } ast::ExprWhileLet(ref pat, ref expr, ref blk, opt_ident) => { if let Some(ident) = opt_ident { try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("while let")); try!(self.print_pat(&**pat)); try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_expr(&**expr)); try!(space(&mut self.s)); try!(self.print_block_with_attrs(&**blk, attrs)); } ast::ExprForLoop(ref pat, ref iter, ref blk, opt_ident) => { if let Some(ident) = opt_ident { try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("for")); try!(self.print_pat(&**pat)); try!(space(&mut self.s)); try!(self.word_space("in")); try!(self.print_expr(&**iter)); try!(space(&mut self.s)); try!(self.print_block_with_attrs(&**blk, attrs)); } ast::ExprLoop(ref blk, opt_ident) => { if let Some(ident) = opt_ident { try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("loop")); try!(space(&mut self.s)); try!(self.print_block_with_attrs(&**blk, attrs)); } ast::ExprMatch(ref expr, ref arms) => { try!(self.cbox(INDENT_UNIT)); try!(self.ibox(4)); try!(self.word_nbsp("match")); try!(self.print_expr(&**expr)); try!(space(&mut self.s)); try!(self.bopen()); try!(self.print_inner_attributes_no_trailing_hardbreak(attrs)); for arm in arms { try!(self.print_arm(arm)); } try!(self.bclose_(expr.span, INDENT_UNIT)); } ast::ExprClosure(capture_clause, ref decl, ref body) => { try!(self.print_capture_clause(capture_clause)); try!(self.print_fn_block_args(&**decl)); try!(space(&mut self.s)); let default_return = match decl.output { ast::DefaultReturn(..) => true, _ => false }; if !default_return || !body.stmts.is_empty() || body.expr.is_none() { try!(self.print_block_unclosed(&**body)); } else { // we extract the block, so as not to create another set of boxes let i_expr = body.expr.as_ref().unwrap(); match i_expr.node { ast::ExprBlock(ref blk) => { try!(self.print_block_unclosed_with_attrs( &**blk, i_expr.attrs.as_attr_slice())); } _ => { // this is a bare expression try!(self.print_expr(&**i_expr)); try!(self.end()); // need to close a box } } } // a box will be closed by print_expr, but we didn't want an overall // wrapper so we closed the corresponding opening. so create an // empty box to satisfy the close. try!(self.ibox(0)); } ast::ExprBlock(ref blk) => { // containing cbox, will be closed by print-block at } try!(self.cbox(INDENT_UNIT)); // head-box, will be closed by print-block after { try!(self.ibox(0)); try!(self.print_block_with_attrs(&**blk, attrs)); } ast::ExprAssign(ref lhs, ref rhs) => { try!(self.print_expr(&**lhs)); try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_expr(&**rhs)); } ast::ExprAssignOp(op, ref lhs, ref rhs) => { try!(self.print_expr(&**lhs)); try!(space(&mut self.s)); try!(word(&mut self.s, ast_util::binop_to_string(op.node))); try!(self.word_space("=")); try!(self.print_expr(&**rhs)); } ast::ExprField(ref expr, id) => { try!(self.print_expr(&**expr)); try!(word(&mut self.s, ".")); try!(self.print_ident(id.node)); } ast::ExprTupField(ref expr, id) => { try!(self.print_expr(&**expr)); try!(word(&mut self.s, ".")); try!(self.print_usize(id.node)); } ast::ExprIndex(ref expr, ref index) => { try!(self.print_expr(&**expr)); try!(word(&mut self.s, "[")); try!(self.print_expr(&**index)); try!(word(&mut self.s, "]")); } ast::ExprRange(ref start, ref end) => { if let &Some(ref e) = start { try!(self.print_expr(&**e)); } try!(word(&mut self.s, "..")); if let &Some(ref e) = end { try!(self.print_expr(&**e)); } } ast::ExprPath(None, ref path) => { try!(self.print_path(path, true, 0)) } ast::ExprPath(Some(ref qself), ref path) => { try!(self.print_qpath(path, qself, true)) } ast::ExprBreak(opt_ident) => { try!(word(&mut self.s, "break")); try!(space(&mut self.s)); if let Some(ident) = opt_ident { try!(self.print_ident(ident.node)); try!(space(&mut self.s)); } } ast::ExprAgain(opt_ident) => { try!(word(&mut self.s, "continue")); try!(space(&mut self.s)); if let Some(ident) = opt_ident { try!(self.print_ident(ident.node)); try!(space(&mut self.s)) } } ast::ExprRet(ref result) => { try!(word(&mut self.s, "return")); match *result { Some(ref expr) => { try!(word(&mut self.s, " ")); try!(self.print_expr(&**expr)); } _ => () } } ast::ExprInlineAsm(ref a) => { try!(word(&mut self.s, "asm!")); try!(self.popen()); try!(self.print_string(&a.asm, a.asm_str_style)); try!(self.word_space(":")); try!(self.commasep(Inconsistent, &a.outputs, |s, &(ref co, ref o, is_rw)| { match co.slice_shift_char() { Some(('=', operand)) if is_rw => { try!(s.print_string(&format!("+{}", operand), ast::CookedStr)) } _ => try!(s.print_string(&co, ast::CookedStr)) } try!(s.popen()); try!(s.print_expr(&**o)); try!(s.pclose()); Ok(()) })); try!(space(&mut self.s)); try!(self.word_space(":")); try!(self.commasep(Inconsistent, &a.inputs, |s, &(ref co, ref o)| { try!(s.print_string(&co, ast::CookedStr)); try!(s.popen()); try!(s.print_expr(&**o)); try!(s.pclose()); Ok(()) })); try!(space(&mut self.s)); try!(self.word_space(":")); try!(self.commasep(Inconsistent, &a.clobbers, |s, co| { try!(s.print_string(&co, ast::CookedStr)); Ok(()) })); let mut options = vec!(); if a.volatile { options.push("volatile"); } if a.alignstack { options.push("alignstack"); } if a.dialect == ast::AsmDialect::Intel { options.push("intel"); } if !options.is_empty() { try!(space(&mut self.s)); try!(self.word_space(":")); try!(self.commasep(Inconsistent, &*options, |s, &co| { try!(s.print_string(co, ast::CookedStr)); Ok(()) })); } try!(self.pclose()); } ast::ExprMac(ref m) => try!(self.print_mac(m, token::Paren)), ast::ExprParen(ref e) => { try!(self.popen()); try!(self.print_inner_attributes_inline(attrs)); try!(self.print_expr(&**e)); try!(self.pclose()); } } try!(self.ann.post(self, NodeExpr(expr))); self.end() } pub fn print_local_decl(&mut self, loc: &ast::Local) -> io::Result<()> { try!(self.print_pat(&*loc.pat)); if let Some(ref ty) = loc.ty { try!(self.word_space(":")); try!(self.print_type(&**ty)); } Ok(()) } pub fn print_decl(&mut self, decl: &ast::Decl) -> io::Result<()> { try!(self.maybe_print_comment(decl.span.lo)); match decl.node { ast::DeclLocal(ref loc) => { try!(self.print_outer_attributes(loc.attrs.as_attr_slice())); try!(self.space_if_not_bol()); try!(self.ibox(INDENT_UNIT)); try!(self.word_nbsp("let")); try!(self.ibox(INDENT_UNIT)); try!(self.print_local_decl(&**loc)); try!(self.end()); if let Some(ref init) = loc.init { try!(self.nbsp()); try!(self.word_space("=")); try!(self.print_expr(&**init)); } self.end() } ast::DeclItem(ref item) => self.print_item(&**item) } } pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()>
pub fn print_usize(&mut self, i: usize) -> io::Result<()> { word(&mut self.s, &i.to_string()) } pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> { try!(word(&mut self.s, &name.as_str())); self.ann.post(self, NodeName(&name)) } pub fn print_for_decl(&mut self, loc: &ast::Local, coll: &ast::Expr) -> io::Result<()> { try!(self.print_local_decl(loc)); try!(space(&mut self.s)); try!(self.word_space("in")); self.print_expr(coll) } fn print_path(&mut self, path: &ast::Path, colons_before_params: bool, depth: usize) -> io::Result<()> { try!(self.maybe_print_comment(path.span.lo)); let mut first = !path.global; for segment in &path.segments[..path.segments.len()-depth] { if first { first = false } else { try!(word(&mut self.s, "::")) } try!(self.print_ident(segment.identifier)); try!(self.print_path_parameters(&segment.parameters, colons_before_params)); } Ok(()) } fn print_qpath(&mut self, path: &ast::Path, qself: &ast::QSelf, colons_before_params: bool) -> io::Result<()> { try!(word(&mut self.s, "<")); try!(self.print_type(&qself.ty)); if qself.position > 0 { try!(space(&mut self.s)); try!(self.word_space("as")); let depth = path.segments.len() - qself.position; try!(self.print_path(&path, false, depth)); } try!(word(&mut self.s, ">")); try!(word(&mut self.s, "::")); let item_segment = path.segments.last().unwrap(); try!(self.print_ident(item_segment.identifier)); self.print_path_parameters(&item_segment.parameters, colons_before_params) } fn print_path_parameters(&mut self, parameters: &ast::PathParameters, colons_before_params: bool) -> io::Result<()> { if parameters.is_empty() { return Ok(()); } if colons_before_params { try!(word(&mut self.s, "::")) } match *parameters { ast::AngleBracketedParameters(ref data) => { try!(word(&mut self.s, "<")); let mut comma = false; for lifetime in &data.lifetimes { if comma { try!(self.word_space(",")) } try!(self.print_lifetime(lifetime)); comma = true; } if !data.types.is_empty() { if comma { try!(self.word_space(",")) } try!(self.commasep( Inconsistent, &data.types, |s, ty| s.print_type(&**ty))); comma = true; } for binding in data.bindings.iter() { if comma { try!(self.word_space(",")) } try!(self.print_ident(binding.ident)); try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_type(&*binding.ty)); comma = true; } try!(word(&mut self.s, ">")) } ast::ParenthesizedParameters(ref data) => { try!(word(&mut self.s, "(")); try!(self.commasep( Inconsistent, &data.inputs, |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ")")); match data.output { None => { } Some(ref ty) => { try!(self.space_if_not_bol()); try!(self.word_space("->")); try!(self.print_type(&**ty)); } } } } Ok(()) } pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> { try!(self.maybe_print_comment(pat.span.lo)); try!(self.ann.pre(self, NodePat(pat))); /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ match pat.node { ast::PatWild => try!(word(&mut self.s, "_")), ast::PatIdent(binding_mode, ref path1, ref sub) => { match binding_mode { ast::BindByRef(mutbl) => { try!(self.word_nbsp("ref")); try!(self.print_mutability(mutbl)); } ast::BindByValue(ast::MutImmutable) => {} ast::BindByValue(ast::MutMutable) => { try!(self.word_nbsp("mut")); } } try!(self.print_ident(path1.node)); match *sub { Some(ref p) => { try!(word(&mut self.s, "@")); try!(self.print_pat(&**p)); } None => () } } ast::PatEnum(ref path, ref args_) => { try!(self.print_path(path, true, 0)); match *args_ { None => try!(word(&mut self.s, "(..)")), Some(ref args) => { if !args.is_empty() { try!(self.popen()); try!(self.commasep(Inconsistent, &args[..], |s, p| s.print_pat(&**p))); try!(self.pclose()); } } } } ast::PatQPath(ref qself, ref path) => { try!(self.print_qpath(path, qself, false)); } ast::PatStruct(ref path, ref fields, etc) => { try!(self.print_path(path, true, 0)); try!(self.nbsp()); try!(self.word_space("{")); try!(self.commasep_cmnt( Consistent, &fields[..], |s, f| { try!(s.cbox(INDENT_UNIT)); if !f.node.is_shorthand { try!(s.print_ident(f.node.ident)); try!(s.word_nbsp(":")); } try!(s.print_pat(&*f.node.pat)); s.end() }, |f| f.node.pat.span)); if etc { if !fields.is_empty() { try!(self.word_space(",")); } try!(word(&mut self.s, "..")); } try!(space(&mut self.s)); try!(word(&mut self.s, "}")); } ast::PatTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&**p))); if elts.len() == 1 { try!(word(&mut self.s, ",")); } try!(self.pclose()); } ast::PatBox(ref inner) => { try!(word(&mut self.s, "box ")); try!(self.print_pat(&**inner)); } ast::PatRegion(ref inner, mutbl) => { try!(word(&mut self.s, "&")); if mutbl == ast::MutMutable { try!(word(&mut self.s, "mut ")); } try!(self.print_pat(&**inner)); } ast::PatLit(ref e) => try!(self.print_expr(&**e)), ast::PatRange(ref begin, ref end) => { try!(self.print_expr(&**begin)); try!(space(&mut self.s)); try!(word(&mut self.s, "...")); try!(self.print_expr(&**end)); } ast::PatVec(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, &before[..], |s, p| s.print_pat(&**p))); if let Some(ref p) = *slice { if !before.is_empty() { try!(self.word_space(",")); } if p.node != ast::PatWild { try!(self.print_pat(&**p)); } try!(word(&mut self.s, "..")); if !after.is_empty() { try!(self.word_space(",")); } } try!(self.commasep(Inconsistent, &after[..], |s, p| s.print_pat(&**p))); try!(word(&mut self.s, "]")); } ast::PatMac(ref m) => try!(self.print_mac(m, token::Paren)), } self.ann.post(self, NodePat(pat)) } fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> { // I have no idea why this check is necessary, but here it // is :( if arm.attrs.is_empty() { try!(space(&mut self.s)); } try!(self.cbox(INDENT_UNIT)); try!(self.ibox(0)); try!(self.print_outer_attributes(&arm.attrs)); let mut first = true; for p in &arm.pats { if first { first = false; } else { try!(space(&mut self.s)); try!(self.word_space("|")); } try!(self.print_pat(&**p)); } try!(space(&mut self.s)); if let Some(ref e) = arm.guard { try!(self.word_space("if")); try!(self.print_expr(&**e)); try!(space(&mut self.s)); } try!(self.word_space("=>")); match arm.body.node { ast::ExprBlock(ref blk) => { // the block will close the pattern's ibox try!(self.print_block_unclosed_indent(&**blk, INDENT_UNIT)); // If it is a user-provided unsafe block, print a comma after it if let ast::UnsafeBlock(ast::UserProvided) = blk.rules { try!(word(&mut self.s, ",")); } } _ => { try!(self.end()); // close the ibox for the pattern try!(self.print_expr(&*arm.body)); try!(word(&mut self.s, ",")); } } self.end() // close enclosing cbox } // Returns whether it printed anything fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf_, mutbl: ast::Mutability) -> io::Result<bool> { try!(self.print_mutability(mutbl)); match *explicit_self { ast::SelfStatic => { return Ok(false); } ast::SelfValue(_) => { try!(word(&mut self.s, "self")); } ast::SelfRegion(ref lt, m, _) => { try!(word(&mut self.s, "&")); try!(self.print_opt_lifetime(lt)); try!(self.print_mutability(m)); try!(word(&mut self.s, "self")); } ast::SelfExplicit(ref typ, _) => { try!(word(&mut self.s, "self")); try!(self.word_space(":")); try!(self.print_type(&**typ)); } } return Ok(true); } pub fn print_fn(&mut self, decl: &ast::FnDecl, unsafety: ast::Unsafety, constness: ast::Constness, abi: abi::Abi, name: Option<ast::Ident>, generics: &ast::Generics, opt_explicit_self: Option<&ast::ExplicitSelf_>, vis: ast::Visibility) -> io::Result<()> { try!(self.print_fn_header_info(unsafety, constness, abi, vis)); if let Some(name) = name { try!(self.nbsp()); try!(self.print_ident(name)); } try!(self.print_generics(generics)); try!(self.print_fn_args_and_ret(decl, opt_explicit_self)); self.print_where_clause(&generics.where_clause) } pub fn print_fn_args(&mut self, decl: &ast::FnDecl, opt_explicit_self: Option<&ast::ExplicitSelf_>) -> io::Result<()> { // It is unfortunate to duplicate the commasep logic, but we want the // self type and the args all in the same box. try!(self.rbox(0, Inconsistent)); let mut first = true; if let Some(explicit_self) = opt_explicit_self { let m = match *explicit_self { ast::SelfStatic => ast::MutImmutable, _ => match decl.inputs[0].pat.node { ast::PatIdent(ast::BindByValue(m), _, _) => m, _ => ast::MutImmutable } }; first = !try!(self.print_explicit_self(explicit_self, m)); } // HACK(eddyb) ignore the separately printed self argument. let args = if first { &decl.inputs[..] } else { &decl.inputs[1..] }; for arg in args { if first { first = false; } else { try!(self.word_space(",")); } try!(self.print_arg(arg)); } self.end() } pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl, opt_explicit_self: Option<&ast::ExplicitSelf_>) -> io::Result<()> { try!(self.popen()); try!(self.print_fn_args(decl, opt_explicit_self)); if decl.variadic { try!(word(&mut self.s, ", ...")); } try!(self.pclose()); self.print_fn_output(decl) } pub fn print_fn_block_args( &mut self, decl: &ast::FnDecl) -> io::Result<()> { try!(word(&mut self.s, "|")); try!(self.print_fn_args(decl, None)); try!(word(&mut self.s, "|")); if let ast::DefaultReturn(..) = decl.output { return Ok(()); } try!(self.space_if_not_bol()); try!(self.word_space("->")); match decl.output { ast::Return(ref ty) => { try!(self.print_type(&**ty)); self.maybe_print_comment(ty.span.lo) } ast::DefaultReturn(..) => unreachable!(), ast::NoReturn(span) => { try!(self.word_nbsp("!")); self.maybe_print_comment(span.lo) } } } pub fn print_capture_clause(&mut self, capture_clause: ast::CaptureClause) -> io::Result<()> { match capture_clause { ast::CaptureByValue => self.word_space("move"), ast::CaptureByRef => Ok(()), } } pub fn print_bounds(&mut self, prefix: &str, bounds: &[ast::TyParamBound]) -> io::Result<()> { if !bounds.is_empty() { try!(word(&mut self.s, prefix)); let mut first = true; for bound in bounds { try!(self.nbsp()); if first { first = false; } else { try!(self.word_space("+")); } try!(match *bound { TraitTyParamBound(ref tref, TraitBoundModifier::None) => { self.print_poly_trait_ref(tref) } TraitTyParamBound(ref tref, TraitBoundModifier::Maybe) => { try!(word(&mut self.s, "?")); self.print_poly_trait_ref(tref) } RegionTyParamBound(ref lt) => { self.print_lifetime(lt) } }) } Ok(()) } else { Ok(()) } } pub fn print_lifetime(&mut self, lifetime: &ast::Lifetime) -> io::Result<()> { self.print_name(lifetime.name) } pub fn print_lifetime_def(&mut self, lifetime: &ast::LifetimeDef) -> io::Result<()> { try!(self.print_lifetime(&lifetime.lifetime)); let mut sep = ":"; for v in &lifetime.bounds { try!(word(&mut self.s, sep)); try!(self.print_lifetime(v)); sep = "+"; } Ok(()) } pub fn print_generics(&mut self, generics: &ast::Generics) -> io::Result<()> { let total = generics.lifetimes.len() + generics.ty_params.len(); if total == 0 { return Ok(()); } try!(word(&mut self.s, "<")); let mut ints = Vec::new(); for i in 0..total { ints.push(i); } try!(self.commasep(Inconsistent, &ints[..], |s, &idx| { if idx < generics.lifetimes.len() { let lifetime = &generics.lifetimes[idx]; s.print_lifetime_def(lifetime) } else { let idx = idx - generics.lifetimes.len(); let param = &generics.ty_params[idx]; s.print_ty_param(param) } })); try!(word(&mut self.s, ">")); Ok(()) } pub fn print_ty_param(&mut self, param: &ast::TyParam) -> io::Result<()> { try!(self.print_ident(param.ident)); try!(self.print_bounds(":", &param.bounds)); match param.default { Some(ref default) => { try!(space(&mut self.s)); try!(self.word_space("=")); self.print_type(&**default) } _ => Ok(()) } } pub fn print_where_clause(&mut self, where_clause: &ast::WhereClause) -> io::Result<()> { if where_clause.predicates.is_empty() { return Ok(()) } try!(space(&mut self.s)); try!(self.word_space("where")); for (i, predicate) in where_clause.predicates.iter().enumerate() { if i != 0 { try!(self.word_space(",")); } match *predicate { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bound_lifetimes, ref bounded_ty, ref bounds, ..}) => { try!(self.print_formal_lifetime_list(bound_lifetimes)); try!(self.print_type(&**bounded_ty)); try!(self.print_bounds(":", bounds)); } ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime, ref bounds, ..}) => { try!(self.print_lifetime(lifetime)); try!(word(&mut self.s, ":")); for (i, bound) in bounds.iter().enumerate() { try!(self.print_lifetime(bound)); if i != 0 { try!(word(&mut self.s, ":")); } } } ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{ref path, ref ty, ..}) => { try!(self.print_path(path, false, 0)); try!(space(&mut self.s)); try!(self.word_space("=")); try!(self.print_type(&**ty)); } } } Ok(()) } pub fn print_view_path(&mut self, vp: &ast::ViewPath) -> io::Result<()> { match vp.node { ast::ViewPathSimple(ident, ref path) => { try!(self.print_path(path, false, 0)); if path.segments.last().unwrap().identifier.name != ident.name { try!(space(&mut self.s)); try!(self.word_space("as")); try!(self.print_ident(ident)); } Ok(()) } ast::ViewPathGlob(ref path) => { try!(self.print_path(path, false, 0)); word(&mut self.s, "::*") } ast::ViewPathList(ref path, ref idents) => { if path.segments.is_empty() { try!(word(&mut self.s, "{")); } else { try!(self.print_path(path, false, 0)); try!(word(&mut self.s, "::{")); } try!(self.commasep(Inconsistent, &idents[..], |s, w| { match w.node { ast::PathListIdent { name, rename, .. } => { try!(s.print_ident(name)); if let Some(ident) = rename { try!(space(&mut s.s)); try!(s.word_space("as")); try!(s.print_ident(ident)); } Ok(()) }, ast::PathListMod { rename, .. } => { try!(word(&mut s.s, "self")); if let Some(ident) = rename { try!(space(&mut s.s)); try!(s.word_space("as")); try!(s.print_ident(ident)); } Ok(()) } } })); word(&mut self.s, "}") } } } pub fn print_mutability(&mut self, mutbl: ast::Mutability) -> io::Result<()> { match mutbl { ast::MutMutable => self.word_nbsp("mut"), ast::MutImmutable => Ok(()), } } pub fn print_mt(&mut self, mt: &ast::MutTy) -> io::Result<()> { try!(self.print_mutability(mt.mutbl)); self.print_type(&*mt.ty) } pub fn print_arg(&mut self, input: &ast::Arg) -> io::Result<()> { try!(self.ibox(INDENT_UNIT)); match input.ty.node { ast::TyInfer => try!(self.print_pat(&*input.pat)), _ => { match input.pat.node { ast::PatIdent(_, ref path1, _) if path1.node.name == parse::token::special_idents::invalid.name => { // Do nothing. } _ => { try!(self.print_pat(&*input.pat)); try!(word(&mut self.s, ":")); try!(space(&mut self.s)); } } try!(self.print_type(&*input.ty)); } } self.end() } pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> io::Result<()> { if let ast::DefaultReturn(..) = decl.output { return Ok(()); } try!(self.space_if_not_bol()); try!(self.ibox(INDENT_UNIT)); try!(self.word_space("->")); match decl.output { ast::NoReturn(_) => try!(self.word_nbsp("!")), ast::DefaultReturn(..) => unreachable!(), ast::Return(ref ty) => try!(self.print_type(&**ty)) } try!(self.end()); match decl.output { ast::Return(ref output) => self.maybe_print_comment(output.span.lo), _ => Ok(()) } } pub fn print_ty_fn(&mut self, abi: abi::Abi, unsafety: ast::Unsafety, decl: &ast::FnDecl, name: Option<ast::Ident>, generics: &ast::Generics, opt_explicit_self: Option<&ast::ExplicitSelf_>) -> io::Result<()> { try!(self.ibox(INDENT_UNIT)); if !generics.lifetimes.is_empty() || !generics.ty_params.is_empty() { try!(word(&mut self.s, "for")); try!(self.print_generics(generics)); } let generics = ast::Generics { lifetimes: Vec::new(), ty_params: OwnedSlice::empty(), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), }, }; try!(self.print_fn(decl, unsafety, ast::Constness::NotConst, abi, name, &generics, opt_explicit_self, ast::Inherited)); self.end() } pub fn maybe_print_trailing_comment(&mut self, span: codemap::Span, next_pos: Option<BytePos>) -> io::Result<()> { let cm = match self.cm { Some(cm) => cm, _ => return Ok(()) }; match self.next_comment() { Some(ref cmnt) => { if (*cmnt).style != comments::Trailing { return Ok(()) } let span_line = cm.lookup_char_pos(span.hi); let comment_line = cm.lookup_char_pos((*cmnt).pos); let mut next = (*cmnt).pos + BytePos(1); match next_pos { None => (), Some(p) => next = p } if span.hi < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { try!(self.print_comment(cmnt)); self.cur_cmnt_and_lit.cur_cmnt += 1; } } _ => () } Ok(()) } pub fn print_remaining_comments(&mut self) -> io::Result<()> { // If there aren't any remaining comments, then we need to manually // make sure there is a line break at the end. if self.next_comment().is_none() { try!(hardbreak(&mut self.s)); } loop { match self.next_comment() { Some(ref cmnt) => { try!(self.print_comment(cmnt)); self.cur_cmnt_and_lit.cur_cmnt += 1; } _ => break } } Ok(()) } pub fn print_opt_abi_and_extern_if_nondefault(&mut self, opt_abi: Option<abi::Abi>) -> io::Result<()> { match opt_abi { Some(abi::Rust) => Ok(()), Some(abi) => { try!(self.word_nbsp("extern")); self.word_nbsp(&abi.to_string()) } None => Ok(()) } } pub fn print_extern_opt_abi(&mut self, opt_abi: Option<abi::Abi>) -> io::Result<()> { match opt_abi { Some(abi) => { try!(self.word_nbsp("extern")); self.word_nbsp(&abi.to_string()) } None => Ok(()) } } pub fn print_fn_header_info(&mut self, unsafety: ast::Unsafety, constness: ast::Constness, abi: abi::Abi, vis: ast::Visibility) -> io::Result<()> { try!(word(&mut self.s, &visibility_qualified(vis, ""))); match constness { ast::Constness::NotConst => {} ast::Constness::Const => try!(self.word_nbsp("const")) } try!(self.print_unsafety(unsafety)); if abi != abi::Rust { try!(self.word_nbsp("extern")); try!(self.word_nbsp(&abi.to_string())); } word(&mut self.s, "fn") } pub fn print_unsafety(&mut self, s: ast::Unsafety) -> io::Result<()> { match s { ast::Unsafety::Normal => Ok(()), ast::Unsafety::Unsafe => self.word_nbsp("unsafe"), } } } fn repeat(s: &str, n: usize) -> String { iter::repeat(s).take(n).collect() } #[cfg(test)] mod tests { use super::*; use ast; use ast_util; use codemap; use parse::token; #[test] fn test_fun_to_string() { let abba_ident = token::str_to_ident("abba"); let decl = ast::FnDecl { inputs: Vec::new(), output: ast::DefaultReturn(codemap::DUMMY_SP), variadic: false }; let generics = ast_util::empty_generics(); assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal, ast::Constness::NotConst, abba_ident, None, &generics), "fn abba()"); } #[test] fn test_variant_to_string() { let ident = token::str_to_ident("principal_skinner"); let var = codemap::respan(codemap::DUMMY_SP, ast::Variant_ { name: ident, attrs: Vec::new(), // making this up as I go.... ? data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), disr_expr: None, }); let varstr = variant_to_string(&var); assert_eq!(varstr, "principal_skinner"); } #[test] fn test_signed_int_to_string() { let pos_int = ast::LitInt(42, ast::SignedIntLit(ast::TyI32, ast::Plus)); let neg_int = ast::LitInt((!42 + 1) as u64, ast::SignedIntLit(ast::TyI32, ast::Minus)); assert_eq!(format!("-{}", lit_to_string(&codemap::dummy_spanned(pos_int))), lit_to_string(&codemap::dummy_spanned(neg_int))); } }
{ try!(word(&mut self.s, &ident.name.as_str())); self.ann.post(self, NodeIdent(&ident)) }
cablegatedata.py
countries = ["Afghanistan", "Albania", "Algeria", "Andorra", "Angola", "Antigua and Barbuda", "Argentina", "Armenia", "Australia", "Austria", "Azerbaijan", "Bahamas", "Bahrain", "Bangladesh", "Barbados", "Belarus", "Belgium", "Belize", "Benin", "Bhutan", "Bolivia", "Bosnia and Herzegovina", "Botswana", "Brazil", "Brunei", "Bulgaria", "Burkina Faso", "Burundi", "Cambodia", "Cameroon", "Canada", "Cape Verde", "Central African Republic", "Chad", "Chile", "China", "Colombia", "Comoros", "Congo", "Cook Islands", "Costa Rica", "Cote d'Ivoire", "Croatia", "Cuba", "Cyprus", "Czech Republic", "Democratic Republic of Congo", "Denmark", "Djibouti", "Dominica", "Dominican Republic", "Ecuador", "Egypt", "El Salvador", "Equatorial Guinea", "Eritrea", "Estonia", "Ethiopia", "Fiji", "Finland", "France", "Gabon", "Gambia", "Georgia", "Germany", "Ghana", "Greece", "Grenada", "Guatemala", "Guinea", "Guinea-Bissau", "Guyana", "Haiti", "Honduras", "Hungary", "Iceland", "India", "Indonesia", "Iran", "Iraq", "Ireland", "Israel", "Italy", "Jamaica", "Japan", "Jordan", "Kazakhstan", "Kenya", "Kiribati", "Kuwait", "Kyrgyzstan", "Laos", "Latvia", "Lebanon", "Lesotho", "Liberia", "Libya", "Lithuania", "Luxembourg", "Macedonia", "Madagascar", "Malawi", "Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands", "Mauritania", "Mauritius", "Mexico", "Micronesia (country)", "Moldova", "Mongolia", "Montenegro", "Morocco", "Mozambique", "Myanmar", "Namibia", "Nauru", "Nepal", "Netherlands", "New Zealand", "Nicaragua", "Niger", "Nigeria", "Niue", "North Korea", "Norway", "Oman", "Pakistan", "Palau", "Panama", "Papua New Guinea", "Paraguay", "Peru", "Philippines", "Poland", "Portugal", "Qatar", "Romania", "Russia", "Rwanda", "Saint Kitts and Nevis", "Saint Lucia", "Saint Vincent and the Grenadines", "Samoa", "Sao Tome and Principe", "Saudi Arabia", "Senegal", "Serbia", "Seychelles", "Sierra Leone", "Singapore", "Slovakia", "Slovenia", "Solomon Islands", "Somalia", "South Africa", "South Korea", "Spain", "Sri Lanka", "Sudan (former)", "Suriname", "Swaziland", "Sweden", "Switzerland", "Syria", "Tajikistan", "Tanzania", "Thailand", "Timor", "Togo", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey", "Turkmenistan", "Tuvalu", "Uganda", "Ukraine", "United Arab Emirates", "United Kingdom", "United States", "Uruguay", "Uzbekistan", "Vanuatu", "Venezuela", "Vietnam", "Yemen", "Zambia", "Zimbabwe"] res = {'06HONGKONG4795': {'date': {'month': 'DEC', 'year': '2006'}, 'entity_involved': ['RHMFIUU/HQ', 'USDOC', 'OEA', 'LHINES/DFARROW USDOC', 'FCS', 'the Export Administration Act', 'the Office of Enforcement Analysis', 'the USDOC Bureau of Industry and Security', 'BIS', 'Export Control', 'Advanced Energy-Shenzhen ', 'Baltrans', 'ECCN', 'International Rectifier of Leominster', 'International Rectifier', 'Advanced Energy', 'ECO', 'Airfreight Operations', 'Operations Manager', 'Airfreight', 'Federal Express', "Advanced Energy's", 'BIS '], 'from': 'AMCONSUL HONG KONG', 'keywords': ['subject', 'ankel', 'providers', 'street', 'route'], 'most_common_words': [('Advanced', 14), ('Energy', 14), ('Baltrans', 10), ('Mr.', 10), ('Lam', 9), ('shipment', 8), ('Hong', 8), ('Kong', 8), ('items', 8), ('ECO', 6), ('USDOC', 5), ('export', 5), ('OEA', 4), ('provided', 4)], 'people_involved': ['RUCPDOC', 'RUEHC', 'SIPDIS ', 'WILLIAM ZARIT ', 'BMGT BEXP', 'ETRD ETTC', 'Philip Ankel', 'Tai Yip Street', 'Theodore Shum', 'Gordon Lam', 'Lam', 'Cunningham'], 'place_involved': ['KOWLOON', 'HONG KONG', 'CHINA', 'MASSACHUSETTS', 'UNITED STATES', 'SHENZHEN'], 'place_of_document': 'HONGKONG', 'subject': 'EXTRANCHECK: POST SHIPMENT VERIFICATION: ADVANCED ' 'ENERGY-SHENZHEN C/O ' 'BALTRANS LOGISTRIC ', 'tags': ['BMGT', 'BEXP', 'HK', 'ETRD', 'ETTC']}, '06HOCHIMINHCITY917': {'date': {'month': 'AUG', 'year': '2006'}, 'entity_involved': ['RUEHC/SECSTATE WASHDC PRIORITY', 'RUCNARF', 'RUEHHM/AMCONSUL HO', 'PHUM PGOV PREF KIRF', 'Consul General', 'State', 'the Montagnard Foundation', 'ConGen', 'GVN', 'Southern Evangelical Church of Vietnam', 'Dak Nong', 'SBU', 'Vietnamese Embassy', 'PNTR', 'Congress', 'WINNICK'], 'from': 'AMCONSUL HO CHI MINH CITY', 'keywords': ['subject', 'migrants', 'congress', 'collective', 'leader'], 'most_common_words': [('police', 12), ('ethnic', 7), ('minority', 7), ('Adrong', 7), ('contact', 7), ('province', 6), ('HCMC', 5), ('United', 5), ('States', 5), ('Central', 5), ('Highlands', 5), ('SECV', 5), ('contacts', 4)], 'people_involved': ['RUEHCHI RUEHDT RUEHNH', 'HO CHI MINH CITY', '000917 ', 'SIPDIS ', 'E.O.', 'DECL', 'Seth Winnick', 'Y Ngo Adrong', 'Adrong', 'Siu Y Kim', 'Gia Lai', 'Chu Se', 'Kim', 'Dega', 'Phu Yen'], 'place_involved': ['CENTRAL HIGHLANDS', 'HCMC', 'UNITED STATES', 'DAK LAK', 'CAMBODIA', 'VIETNAM', 'WASHINGTON'], 'place_of_document': 'HOCHIMINHCITY', 'subject': 'POLICE BRUTALITY RISING; CENTRAL HIGHLANDS DEATH CONFIRMED ', 'tags': ['PHUM', 'PGOV', 'PREF', 'KIRF', 'VM']}, '06JERUSALEM906': {'date': {'month': 'MAR', 'year': '2006'}, 'entity_involved': ['RUEHC/SECSTATE WASHDC', '0698', 'RHEHNSC', 'NSC', 'RUEHBS/USEU BRUSSELS', 'FRONT OFFICE', 'NEA/IPA', 'WILLIAMS/GREENE/WAECHTER', 'ABRAMS', 'PHUM PREF EAID ECON', 'SBU', 'the World Food Program', 'WFP', 'ECON', 'the PA Ministry of National Economy', 'UNRWA', 'Market Monitoring'], 'from': 'AMCONSUL JERUSALEM', 'keywords': ['subject', 'vulnerability', 'collective', 'works', 'phum'], 'most_common_words': [('days', 11), ('food', 7), ('IMMEDIATE', 5), ('Gaza', 5), ('price', 5), ('flour', 4), ('WASHDC', 3), ('WFP', 3), ('March', 3), ('Karni', 3), ('stocks', 3), ('report', 3), ('percent', 3), ('JERUSALEM', 2)], 'people_involved': ['000906 ', 'SIPDIS ', 'NEA', 'DORAN', 'MUSTAFA ', 'Arnold Vercken', 'Karni'], 'place_involved': ['GAZA', 'WEST BANK/GAZA COUNTRY', 'U.S.'], 'place_of_document': 'JERUSALEM', 'subject': 'KARNI CLOSURE CAUSING FOOD SHORTAGE IN GAZA ', 'tags': ['PHUM', 'PREF', 'EAID', 'ECON', 'KWBG']}, '09BERLIN831': {'date': {'month': 'JUL', 'year': '2009'}, 'entity_involved': ['RUEHC/SECSTATE WASHDC', 'RUEHAD', 'AMEMBASSY ABU DHABI', 'RUEHUJA', 'AMEMBASSY ABUJA PRIORITY', 'RUEHAK', 'AMEMBASSY ANKARA', 'RUEHTH', 'AMEMBASSY ATHENS', 'RUEHBS/', 'AMEMBASSY', 'RUEHEG', 'AMEMBASSY CAIRO', 'RUEHBY', 'AMEMBASSY CANBERRA', 'RUEHCP', 'AMEMBASSY COPENHAGEN', 'RUEHDJ', 'RUEHKL', 'AMEMBASSY KUALA LUMPUR', 'RUEHLI', 'AMEMBASSY LONDON', 'RUEHMD', 'RUEHMV', 'AMEMBASSY MONROVIA', 'RUEHMO', 'RUEHMS/AMEMBASSY MUSCAT', 'RUEHNR', 'AMEMBASSY NAIROBI', 'RUEHNE', 'AMEMBASSY NEW DELHI', 'RUEHNY', 'AMEMBASSY OSLO', 'RUEHOT', 'AMEMBASSY OTTAWA', 'RUEHZP', 'AMEMBASSY PANAMA', 'RUEHFR', 'AMEMBASSY PARIS', 'RUEHRH', 'AMEMBASSY RIYADH', 'RUEHRO', 'RUEHYN', 'RUEHGP/AMEMBASSY SINGAPORE', 'RUEHSM', 'AMEMBASSY STOCKHOLM', 'RUEHTC', 'RUEHKO/AMEMBASSY TOKYO', 'RUCNDT/USMISSION', 'EWWT', 'PHSA', 'PHUM PREL', 'GM', 'CGPCS', 'ON PARTICIPATION ISSUE', 'MFA UN Security Council Action', 'the Contact Group for Piracy', 'Turkish', 'German', 'the International Criminal Tribunal'], 'from': 'AMEMBASSY BERLIN', 'keywords': ['subject', 'expertise', 'stockhausen', '091715z', 'ruehul'], 'most_common_words': [('AMEMBASSY', 32), ('PRIORITY', 32), ('Germany', 5), ('national', 5), ('Stockhausen', 4), ('said', 4), ('cases', 4), ('region', 4), ('BERLIN', 3), ('CGPCS', 3), ('U.S.', 3), ('countries', 3), ('piracy', 3)], 'people_involved': ['RUEHBJ', '0210RUEHLO/', 'SIPDIS ', 'E.O.', 'DECL', 'STAN OTTO', 'Dirk Stockhausen', 'Koenig'], 'place_involved': ['BRUSSELS', 'MOSCOW', 'HAGUE', 'NEW YORK', 'BERLIN', 'GERMANY', 'U.S.', 'SOMALIA', 'NETHERLANDS', 'KENYA', 'CAMBODIA', 'ARUSHA', 'TANZANIA',
'RWANDA'], 'place_of_document': 'BERLIN', 'subject': 'CGPCS: GERMANY AGREES ON PARTICIPATION ISSUE, BUT IS STILL ' 'OFFSIDE REGARDING ' 'INTERNATIONAL ' 'TRIBUNAL ', 'tags': ['EWWT', 'MARR', 'PGOV', 'PHSA', 'PHUM', 'PREL', 'MOPS', 'GM']}}
source.py
import inspect from pathlib import Path from typing import Optional, Tuple, Union from discord import Embed from discord.ext import commands from bot.bot import Bot from bot.constants import URLs from bot.converters import SourceConverter from bot.exts.info.tags import TagIdentifier SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, TagIdentifier, commands.ExtensionNotLoaded] class
(commands.Cog): """Displays information about the bot's source code.""" def __init__(self, bot: Bot): self.bot = bot @commands.command(name="source", aliases=("src",)) async def source_command(self, ctx: commands.Context, *, source_item: SourceConverter = None) -> None: """Display information and a GitHub link to the source code of a command, tag, or cog.""" if not source_item: embed = Embed(title="Bot's GitHub Repository") embed.add_field(name="Repository", value=f"[Go to GitHub]({URLs.github_bot_repo})") embed.set_thumbnail(url="https://avatars1.githubusercontent.com/u/9919") await ctx.send(embed=embed) return embed = await self.build_embed(source_item) await ctx.send(embed=embed) def get_source_link(self, source_item: SourceType) -> Tuple[str, str, Optional[int]]: """ Build GitHub link of source item, return this link, file location and first line number. Raise BadArgument if `source_item` is a dynamically-created object (e.g. via internal eval). """ if isinstance(source_item, commands.Command): source_item = inspect.unwrap(source_item.callback) src = source_item.__code__ filename = src.co_filename elif isinstance(source_item, TagIdentifier): tags_cog = self.bot.get_cog("Tags") filename = tags_cog.tags[source_item].file_path else: src = type(source_item) try: filename = inspect.getsourcefile(src) except TypeError: raise commands.BadArgument("Cannot get source for a dynamically-created object.") if not isinstance(source_item, TagIdentifier): try: lines, first_line_no = inspect.getsourcelines(src) except OSError: raise commands.BadArgument("Cannot get source for a dynamically-created object.") lines_extension = f"#L{first_line_no}-L{first_line_no+len(lines)-1}" else: first_line_no = None lines_extension = "" # Handle tag file location differently than others to avoid errors in some cases if not first_line_no: file_location = Path(filename).relative_to("bot/") else: file_location = Path(filename).relative_to(Path.cwd()).as_posix() url = f"{URLs.github_bot_repo}/blob/main/{file_location}{lines_extension}" return url, file_location, first_line_no or None async def build_embed(self, source_object: SourceType) -> Optional[Embed]: """Build embed based on source object.""" url, location, first_line = self.get_source_link(source_object) if isinstance(source_object, commands.HelpCommand): title = "Help Command" description = source_object.__doc__.splitlines()[1] elif isinstance(source_object, commands.Command): description = source_object.short_doc title = f"Command: {source_object.qualified_name}" elif isinstance(source_object, TagIdentifier): title = f"Tag: {source_object}" description = "" else: title = f"Cog: {source_object.qualified_name}" description = source_object.description.splitlines()[0] embed = Embed(title=title, description=description) embed.add_field(name="Source Code", value=f"[Go to GitHub]({url})") line_text = f":{first_line}" if first_line else "" embed.set_footer(text=f"{location}{line_text}") return embed async def setup(bot: Bot) -> None: """Load the BotSource cog.""" await bot.add_cog(BotSource(bot))
BotSource
test_mollufy.py
import unittest from mollufy import mollufy class MollufyTestSimple(unittest.TestCase): def test_mollufy_word_2chars(self): # TEST 1: Mollufy simple 2-characters noun word self.assertEqual(mollufy.mollufy("블루"), "블?루") self.assertEqual(mollufy.mollufy("하루"), "하?루") self.assertEqual(mollufy.mollufy("감정"), "감?정") def test_mollufy_word_manychars_without_param(self): # TEST 2: Ensure 3-characters-or-above noun word not to be mollufied without parameter self.assertEqual(mollufy.mollufy("마술사"), "마술사") self.assertEqual(mollufy.mollufy("모니터"), "모니터") self.assertEqual(mollufy.mollufy("아이스크림"), "아이스크림") def test_mollufy_word_manychars(self): # TEST 3: Mollufy 3-characters-or-above noun word with parameter self.assertEqual(mollufy.mollufy("슬리퍼", True), "슬리?퍼") self.assertEqual(mollufy.mollufy("이구동성", True), "이구동?성") self.assertEqual(mollufy.mollufy("아메리카노", True), "아메리카?노") def test_mollufy_non_noun_word(self): # TEST 4: Ensure non-noun words not to be mollufied self.assertEqual(mollufy.mollufy("좋아"), "좋아") self.assertEqual(mollufy.mollufy("그만해", True), "그만해") self.ass
ters noun word self.assertEqual(mollufy.mollufy("안녕하세요"), "안?녕하세요") self.assertEqual(mollufy.mollufy("바다에 갑시다"), "바?다에 갑시다") self.assertEqual(mollufy.mollufy("재미있는 게임인데"), "재미있는 게?임인데") def test_mollufy_sentence_with_one_manychar_word(self): # TEST 6: Mollufy sentence with one 3-characters-or-above noun word self.assertEqual(mollufy.mollufy("참관인이세요?", True), "참관?인이세요?") self.assertEqual(mollufy.mollufy("보드카 너무 써", True), "보드?카 너무 써") self.assertEqual(mollufy.mollufy("필라멘트가 타버렸네", True), "필라멘?트가 타버렸네") def test_mollufy_sentence_with_many_2chars_words(self): # TEST 7: Mollufy sentence with many 2-characters noun words self.assertEqual(mollufy.mollufy("내가 재미있는 게임을 하나 알아냈는데, 나중에 검색해봐"), "내가 재미있는 게?임을 하나 알아냈는데, 나?중에 검?색해봐") self.assertEqual(mollufy.mollufy("그야말로 연애재판 너는 나에게 얼마만큼의 죄를 물을 거니?"), "그야말로 연?애재?판 너는 나에게 얼?마만큼의 죄를 물을 거니?") self.assertEqual(mollufy.mollufy("두 글자 명사가 다수 존재하는 문장을 생각하기는 곤란하다"), "두 글?자 명?사가 다?수 존?재하는 문?장을 생?각하기는 곤?란하다") def test_mollufy_sentence_with_many_words(self): # TEST 8: Mollufy sentence with many noun words (without no length limit) self.assertEqual(mollufy.mollufy("대한민국의 영토는 한반도와 그 부속도서로 한다.", True), "대한민?국의 영?토는 한반?도와 그 부?속도?서로 한다.") self.assertEqual(mollufy.mollufy("대한민국은 통일을 지향하며, 자유민주적 기본질서에 입각한 평화적 통일 정책을 수립하고 이를 추진한다.", True), "대한민?국은 통?일을 지?향하며, 자?유민?주적 기?본질?서에 입?각한 평?화적 통?일 정?책을 수?립하고 이를 추?진한다.") self.assertEqual(mollufy.mollufy("블루 아카이브 정말 건전하고 건강하고 밝은 게임인데...", True), "블?루 아카이?브 정말 건?전하고 건?강하고 밝은 게?임인데...") def test_mollufy_sentence_with_many_words_without_param(self): # TEST 9: Mollufy 2-characters noun words in sentence, not 3-characters-or-above noun words self.assertEqual(mollufy.mollufy("그래픽 디자인은 특정 메시지 (혹은 콘텐츠)와 이를 전달하려는 대상자에게 걸맞은 매체 (인쇄물, 웹사이트, 동영상 등)를 선택하여 표현 또는 제작하는 창의적인 과정이다."), "그래픽 디자인은 특?정 메시지 (혹은 콘텐츠)와 이를 전?달하려는 대상자에게 걸맞은 매?체 (인쇄물, 웹사이트, 동영상 등)를 선?택하여 표?현 또는 제?작하는 창?의적인 과?정이다.") class MollufyTestMeme(unittest.TestCase): def test_mollufy_meme_words(self): # TEST 10: Meme words self.assertEqual(mollufy.mollufy("몰루"), "몰?루") self.assertEqual(mollufy.mollufy("코하루"), "코하?루") self.assertEqual(mollufy.mollufy("아루"), "아?루") self.assertEqual(mollufy.mollufy("네루"), "네?루") def test_mollufy_meme_sentences(self): # TEST 11: Meme sentences self.assertEqual(mollufy.mollufy("몰루는건가..."), "몰?루는건가...") self.assertEqual(mollufy.mollufy("내가 몰루가 될께..."), "내가 몰?루가 될께...") class MollufyTestAltmark(unittest.TestCase): def test_mollufy_altmark(self): # TEST 12: Mollufy with alternative mark: [!] self.assertEqual(mollufy.mollufy("바람", alternativeMark=True), "바!람") self.assertEqual(mollufy.mollufy("아루", alternativeMark=True), "아!루") self.assertEqual(mollufy.mollufy("스튜디오", True, True), "스튜디!오") self.assertEqual(mollufy.mollufy("각설탕을 커피에 타먹으면 달게요 안 달게요~", True, True), "각설!탕을 커!피에 타먹으면 달게요 안 달게요~") if __name__ == "__main__": unittest.main()
ertEqual(mollufy.mollufy("냠냠쩝쩝", True), "냠냠쩝쩝") class MollufyTestSentence(unittest.TestCase): def test_mollufy_sentence_with_one_2chars_word(self): # TEST 5: Mollufy sentence with one 2-charac
server_test.go
package poker_test import ( "fmt" "io" "net/http" "net/http/httptest" "reflect" "strings" "testing" "time" "github.com/gorilla/websocket" poker "github.com/marcetin/nauci-go-sa-testovima/websockets/v2" ) var ( dummyGame = &GameSpy{} tenMS = 10 * time.Millisecond ) func mustMakePlayerServer(t *testing.T, store poker.PlayerStore, game poker.Game) *poker.PlayerServer { server, err := poker.NewPlayerServer(store, game) if err != nil { t.Fatal("problem creating player server", err) } return server } func TestGETPlayers(t *testing.T) { store := poker.StubPlayerStore{ Scores: map[string]int{ "Pepper": 20, "Floyd": 10, }, } server := mustMakePlayerServer(t, &store, dummyGame) t.Run("returns Pepper's score", func(t *testing.T) { request := newGetScoreRequest("Pepper") response := httptest.NewRecorder() server.ServeHTTP(response, request) assertStatus(t, response, http.StatusOK) assertResponseBody(t, response.Body.String(), "20") }) t.Run("returns Floyd's score", func(t *testing.T) { request := newGetScoreRequest("Floyd") response := httptest.NewRecorder() server.ServeHTTP(response, request) assertStatus(t, response, http.StatusOK) assertResponseBody(t, response.Body.String(), "10") }) t.Run("returns 404 on missing players", func(t *testing.T) { request := newGetScoreRequest("Apollo") response := httptest.NewRecorder() server.ServeHTTP(response, request) assertStatus(t, response, http.StatusNotFound) }) } func
(t *testing.T) { store := poker.StubPlayerStore{ Scores: map[string]int{}, } server := mustMakePlayerServer(t, &store, dummyGame) t.Run("it records wins on POST", func(t *testing.T) { player := "Pepper" request := newPostWinRequest(player) response := httptest.NewRecorder() server.ServeHTTP(response, request) assertStatus(t, response, http.StatusAccepted) poker.AssertPlayerWin(t, &store, player) }) } func TestLeague(t *testing.T) { t.Run("it returns the League table as JSON", func(t *testing.T) { wantedLeague := []poker.Player{ {Name: "Cleo", Wins: 32}, {Name: "Chris", Wins: 20}, {Name: "Tiest", Wins: 14}, } store := poker.StubPlayerStore{League: wantedLeague} server := mustMakePlayerServer(t, &store, dummyGame) request := newLeagueRequest() response := httptest.NewRecorder() server.ServeHTTP(response, request) got := getLeagueFromResponse(t, response.Body) assertStatus(t, response, http.StatusOK) assertLeague(t, got, wantedLeague) assertContentType(t, response, "application/json") }) } func TestGame(t *testing.T) { t.Run("GET /game returns 200", func(t *testing.T) { server := mustMakePlayerServer(t, &poker.StubPlayerStore{}, dummyGame) request := newGameRequest() response := httptest.NewRecorder() server.ServeHTTP(response, request) assertStatus(t, response, http.StatusOK) }) t.Run("start a game with 3 players, send some blind alerts down WS and declare Ruth the winner", func(t *testing.T) { wantedBlindAlert := "Blind is 100" winner := "Ruth" game := &GameSpy{BlindAlert: []byte(wantedBlindAlert)} server := httptest.NewServer(mustMakePlayerServer(t, dummyPlayerStore, game)) ws := mustDialWS(t, "ws"+strings.TrimPrefix(server.URL, "http")+"/ws") defer server.Close() defer ws.Close() writeWSMessage(t, ws, "3") writeWSMessage(t, ws, winner) assertGameStartedWith(t, game, 3) assertFinishCalledWith(t, game, winner) within(t, tenMS, func() { assertWebsocketGotMsg(t, ws, wantedBlindAlert) }) }) } func assertWebsocketGotMsg(t *testing.T, ws *websocket.Conn, want string) { _, msg, _ := ws.ReadMessage() if string(msg) != want { t.Errorf(`got "%s", want "%s"`, string(msg), want) } } func retryUntil(d time.Duration, f func() bool) bool { deadline := time.Now().Add(d) for time.Now().Before(deadline) { if f() { return true } } return false } func within(t testing.TB, d time.Duration, assert func()) { t.Helper() done := make(chan struct{}, 1) go func() { assert() done <- struct{}{} }() select { case <-time.After(d): t.Error("timed out") case <-done: } } func writeWSMessage(t testing.TB, conn *websocket.Conn, message string) { t.Helper() if err := conn.WriteMessage(websocket.TextMessage, []byte(message)); err != nil { t.Fatalf("could not send message over ws connection %v", err) } } func assertContentType(t testing.TB, response *httptest.ResponseRecorder, want string) { t.Helper() if response.Header().Get("content-type") != want { t.Errorf("response did not have content-type of %s, got %v", want, response.HeaderMap) } } func getLeagueFromResponse(t *testing.T, body io.Reader) []poker.Player { t.Helper() league, err := poker.NewLeague(body) if err != nil { t.Fatalf("Unable to parse response from server %q into slice of Player, '%v'", body, err) } return league } func assertLeague(t *testing.T, got, want []poker.Player) { t.Helper() if !reflect.DeepEqual(got, want) { t.Errorf("got %v want %v", got, want) } } func assertStatus(t *testing.T, got *httptest.ResponseRecorder, want int) { t.Helper() if got.Code != want { t.Errorf("did not get correct status, got %d, want %d", got.Code, want) } } func newGameRequest() *http.Request { req, _ := http.NewRequest(http.MethodGet, "/game", nil) return req } func newLeagueRequest() *http.Request { req, _ := http.NewRequest(http.MethodGet, "/league", nil) return req } func newGetScoreRequest(name string) *http.Request { req, _ := http.NewRequest(http.MethodGet, fmt.Sprintf("/players/%s", name), nil) return req } func newPostWinRequest(name string) *http.Request { req, _ := http.NewRequest(http.MethodPost, fmt.Sprintf("/players/%s", name), nil) return req } func assertResponseBody(t *testing.T, got, want string) { t.Helper() if got != want { t.Errorf("response body is wrong, got %q want %q", got, want) } } func mustDialWS(t *testing.T, url string) *websocket.Conn { ws, _, err := websocket.DefaultDialer.Dial(url, nil) if err != nil { t.Fatalf("could not open a ws connection on %s %v", url, err) } return ws }
TestStoreWins
greengrass.rs
//! Types for the `Greengrass` service. /// The [`AWS::Greengrass::ConnectorDefinition`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html) resource type. #[derive(Debug, Default)] pub struct ConnectorDefinition { properties: ConnectorDefinitionProperties } /// Properties for the `ConnectorDefinition` resource. #[derive(Debug, Default)] pub struct ConnectorDefinitionProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::connector_definition::ConnectorDefinitionVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for ConnectorDefinitionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for ConnectorDefinitionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ConnectorDefinitionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ConnectorDefinitionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ConnectorDefinitionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::connector_definition::ConnectorDefinitionVersion>> = None; let mut name: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ConnectorDefinitionProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for ConnectorDefinition { type Properties = ConnectorDefinitionProperties; const TYPE: &'static str = "AWS::Greengrass::ConnectorDefinition"; fn properties(&self) -> &ConnectorDefinitionProperties { &self.properties } fn properties_mut(&mut self) -> &mut ConnectorDefinitionProperties { &mut self.properties } } impl ::private::Sealed for ConnectorDefinition {} impl From<ConnectorDefinitionProperties> for ConnectorDefinition { fn from(properties: ConnectorDefinitionProperties) -> ConnectorDefinition { ConnectorDefinition { properties } } } /// The [`AWS::Greengrass::ConnectorDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html) resource type. #[derive(Debug, Default)] pub struct ConnectorDefinitionVersion { properties: ConnectorDefinitionVersionProperties } /// Properties for the `ConnectorDefinitionVersion` resource. #[derive(Debug, Default)] pub struct ConnectorDefinitionVersionProperties { /// Property [`ConnectorDefinitionId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html#cfn-greengrass-connectordefinitionversion-connectordefinitionid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub connector_definition_id: ::Value<String>, /// Property [`Connectors`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html#cfn-greengrass-connectordefinitionversion-connectors). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub connectors: ::ValueList<self::connector_definition_version::Connector>, } impl ::serde::Serialize for ConnectorDefinitionVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ConnectorDefinitionId", &self.connector_definition_id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Connectors", &self.connectors)?; ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for ConnectorDefinitionVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ConnectorDefinitionVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ConnectorDefinitionVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ConnectorDefinitionVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut connector_definition_id: Option<::Value<String>> = None; let mut connectors: Option<::ValueList<self::connector_definition_version::Connector>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ConnectorDefinitionId" => { connector_definition_id = ::serde::de::MapAccess::next_value(&mut map)?; } "Connectors" => { connectors = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ConnectorDefinitionVersionProperties { connector_definition_id: connector_definition_id.ok_or(::serde::de::Error::missing_field("ConnectorDefinitionId"))?, connectors: connectors.ok_or(::serde::de::Error::missing_field("Connectors"))?, }) } } d.deserialize_map(Visitor) } } impl ::Resource for ConnectorDefinitionVersion { type Properties = ConnectorDefinitionVersionProperties; const TYPE: &'static str = "AWS::Greengrass::ConnectorDefinitionVersion"; fn properties(&self) -> &ConnectorDefinitionVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut ConnectorDefinitionVersionProperties { &mut self.properties } } impl ::private::Sealed for ConnectorDefinitionVersion {} impl From<ConnectorDefinitionVersionProperties> for ConnectorDefinitionVersion { fn from(properties: ConnectorDefinitionVersionProperties) -> ConnectorDefinitionVersion { ConnectorDefinitionVersion { properties } } } /// The [`AWS::Greengrass::CoreDefinition`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html) resource type. #[derive(Debug, Default)] pub struct CoreDefinition { properties: CoreDefinitionProperties } /// Properties for the `CoreDefinition` resource. #[derive(Debug, Default)] pub struct CoreDefinitionProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::core_definition::CoreDefinitionVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for CoreDefinitionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for CoreDefinitionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<CoreDefinitionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = CoreDefinitionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type CoreDefinitionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::core_definition::CoreDefinitionVersion>> = None; let mut name: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(CoreDefinitionProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for CoreDefinition { type Properties = CoreDefinitionProperties; const TYPE: &'static str = "AWS::Greengrass::CoreDefinition"; fn properties(&self) -> &CoreDefinitionProperties { &self.properties } fn properties_mut(&mut self) -> &mut CoreDefinitionProperties { &mut self.properties } } impl ::private::Sealed for CoreDefinition {} impl From<CoreDefinitionProperties> for CoreDefinition { fn from(properties: CoreDefinitionProperties) -> CoreDefinition { CoreDefinition { properties } } } /// The [`AWS::Greengrass::CoreDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html) resource type. #[derive(Debug, Default)] pub struct CoreDefinitionVersion { properties: CoreDefinitionVersionProperties } /// Properties for the `CoreDefinitionVersion` resource. #[derive(Debug, Default)] pub struct CoreDefinitionVersionProperties { /// Property [`CoreDefinitionId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html#cfn-greengrass-coredefinitionversion-coredefinitionid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub core_definition_id: ::Value<String>, /// Property [`Cores`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html#cfn-greengrass-coredefinitionversion-cores). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub cores: ::ValueList<self::core_definition_version::Core>, } impl ::serde::Serialize for CoreDefinitionVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "CoreDefinitionId", &self.core_definition_id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Cores", &self.cores)?; ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for CoreDefinitionVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<CoreDefinitionVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = CoreDefinitionVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type CoreDefinitionVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut core_definition_id: Option<::Value<String>> = None; let mut cores: Option<::ValueList<self::core_definition_version::Core>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "CoreDefinitionId" => { core_definition_id = ::serde::de::MapAccess::next_value(&mut map)?; } "Cores" => { cores = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(CoreDefinitionVersionProperties { core_definition_id: core_definition_id.ok_or(::serde::de::Error::missing_field("CoreDefinitionId"))?, cores: cores.ok_or(::serde::de::Error::missing_field("Cores"))?, }) } } d.deserialize_map(Visitor) } } impl ::Resource for CoreDefinitionVersion { type Properties = CoreDefinitionVersionProperties; const TYPE: &'static str = "AWS::Greengrass::CoreDefinitionVersion"; fn properties(&self) -> &CoreDefinitionVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut CoreDefinitionVersionProperties { &mut self.properties } } impl ::private::Sealed for CoreDefinitionVersion {} impl From<CoreDefinitionVersionProperties> for CoreDefinitionVersion { fn from(properties: CoreDefinitionVersionProperties) -> CoreDefinitionVersion { CoreDefinitionVersion { properties } } } /// The [`AWS::Greengrass::DeviceDefinition`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html) resource type. #[derive(Debug, Default)] pub struct DeviceDefinition { properties: DeviceDefinitionProperties } /// Properties for the `DeviceDefinition` resource. #[derive(Debug, Default)] pub struct DeviceDefinitionProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::device_definition::DeviceDefinitionVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for DeviceDefinitionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for DeviceDefinitionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<DeviceDefinitionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = DeviceDefinitionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type DeviceDefinitionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::device_definition::DeviceDefinitionVersion>> = None; let mut name: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(DeviceDefinitionProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for DeviceDefinition { type Properties = DeviceDefinitionProperties; const TYPE: &'static str = "AWS::Greengrass::DeviceDefinition"; fn properties(&self) -> &DeviceDefinitionProperties { &self.properties } fn properties_mut(&mut self) -> &mut DeviceDefinitionProperties { &mut self.properties } } impl ::private::Sealed for DeviceDefinition {} impl From<DeviceDefinitionProperties> for DeviceDefinition { fn from(properties: DeviceDefinitionProperties) -> DeviceDefinition { DeviceDefinition { properties } } } /// The [`AWS::Greengrass::DeviceDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html) resource type. #[derive(Debug, Default)] pub struct DeviceDefinitionVersion { properties: DeviceDefinitionVersionProperties } /// Properties for the `DeviceDefinitionVersion` resource. #[derive(Debug, Default)] pub struct DeviceDefinitionVersionProperties { /// Property [`DeviceDefinitionId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html#cfn-greengrass-devicedefinitionversion-devicedefinitionid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub device_definition_id: ::Value<String>, /// Property [`Devices`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html#cfn-greengrass-devicedefinitionversion-devices). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub devices: ::ValueList<self::device_definition_version::Device>, } impl ::serde::Serialize for DeviceDefinitionVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "DeviceDefinitionId", &self.device_definition_id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Devices", &self.devices)?; ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for DeviceDefinitionVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<DeviceDefinitionVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = DeviceDefinitionVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type DeviceDefinitionVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut device_definition_id: Option<::Value<String>> = None; let mut devices: Option<::ValueList<self::device_definition_version::Device>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DeviceDefinitionId" => { device_definition_id = ::serde::de::MapAccess::next_value(&mut map)?; } "Devices" => { devices = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(DeviceDefinitionVersionProperties { device_definition_id: device_definition_id.ok_or(::serde::de::Error::missing_field("DeviceDefinitionId"))?, devices: devices.ok_or(::serde::de::Error::missing_field("Devices"))?, }) } } d.deserialize_map(Visitor) } } impl ::Resource for DeviceDefinitionVersion { type Properties = DeviceDefinitionVersionProperties; const TYPE: &'static str = "AWS::Greengrass::DeviceDefinitionVersion"; fn properties(&self) -> &DeviceDefinitionVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut DeviceDefinitionVersionProperties { &mut self.properties } } impl ::private::Sealed for DeviceDefinitionVersion {} impl From<DeviceDefinitionVersionProperties> for DeviceDefinitionVersion { fn from(properties: DeviceDefinitionVersionProperties) -> DeviceDefinitionVersion { DeviceDefinitionVersion { properties } } } /// The [`AWS::Greengrass::FunctionDefinition`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html) resource type. #[derive(Debug, Default)] pub struct FunctionDefinition { properties: FunctionDefinitionProperties } /// Properties for the `FunctionDefinition` resource. #[derive(Debug, Default)] pub struct FunctionDefinitionProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::function_definition::FunctionDefinitionVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for FunctionDefinitionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for FunctionDefinitionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<FunctionDefinitionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = FunctionDefinitionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type FunctionDefinitionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::function_definition::FunctionDefinitionVersion>> = None; let mut name: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(FunctionDefinitionProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for FunctionDefinition { type Properties = FunctionDefinitionProperties; const TYPE: &'static str = "AWS::Greengrass::FunctionDefinition"; fn properties(&self) -> &FunctionDefinitionProperties { &self.properties } fn properties_mut(&mut self) -> &mut FunctionDefinitionProperties { &mut self.properties } } impl ::private::Sealed for FunctionDefinition {} impl From<FunctionDefinitionProperties> for FunctionDefinition { fn from(properties: FunctionDefinitionProperties) -> FunctionDefinition { FunctionDefinition { properties } } } /// The [`AWS::Greengrass::FunctionDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html) resource type. #[derive(Debug, Default)] pub struct FunctionDefinitionVersion { properties: FunctionDefinitionVersionProperties } /// Properties for the `FunctionDefinitionVersion` resource. #[derive(Debug, Default)] pub struct FunctionDefinitionVersionProperties { /// Property [`DefaultConfig`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-defaultconfig). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub default_config: Option<::Value<self::function_definition_version::DefaultConfig>>, /// Property [`FunctionDefinitionId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-functiondefinitionid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub function_definition_id: ::Value<String>, /// Property [`Functions`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-functions). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub functions: ::ValueList<self::function_definition_version::Function>, } impl ::serde::Serialize for FunctionDefinitionVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref default_config) = self.default_config { ::serde::ser::SerializeMap::serialize_entry(&mut map, "DefaultConfig", default_config)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionDefinitionId", &self.function_definition_id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Functions", &self.functions)?; ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for FunctionDefinitionVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<FunctionDefinitionVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = FunctionDefinitionVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type FunctionDefinitionVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut default_config: Option<::Value<self::function_definition_version::DefaultConfig>> = None; let mut function_definition_id: Option<::Value<String>> = None; let mut functions: Option<::ValueList<self::function_definition_version::Function>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DefaultConfig" => { default_config = ::serde::de::MapAccess::next_value(&mut map)?; } "FunctionDefinitionId" => { function_definition_id = ::serde::de::MapAccess::next_value(&mut map)?; } "Functions" => { functions = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(FunctionDefinitionVersionProperties { default_config: default_config, function_definition_id: function_definition_id.ok_or(::serde::de::Error::missing_field("FunctionDefinitionId"))?, functions: functions.ok_or(::serde::de::Error::missing_field("Functions"))?, }) } } d.deserialize_map(Visitor) } } impl ::Resource for FunctionDefinitionVersion { type Properties = FunctionDefinitionVersionProperties; const TYPE: &'static str = "AWS::Greengrass::FunctionDefinitionVersion"; fn properties(&self) -> &FunctionDefinitionVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut FunctionDefinitionVersionProperties { &mut self.properties } } impl ::private::Sealed for FunctionDefinitionVersion {} impl From<FunctionDefinitionVersionProperties> for FunctionDefinitionVersion { fn from(properties: FunctionDefinitionVersionProperties) -> FunctionDefinitionVersion { FunctionDefinitionVersion { properties } } } /// The [`AWS::Greengrass::Group`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html) resource type. #[derive(Debug, Default)] pub struct Group { properties: GroupProperties } /// Properties for the `Group` resource. #[derive(Debug, Default)] pub struct GroupProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::group::GroupVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`RoleArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-rolearn). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub role_arn: Option<::Value<String>>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for GroupProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref role_arn) = self.role_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "RoleArn", role_arn)?; } if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for GroupProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<GroupProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = GroupProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type GroupProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::group::GroupVersion>> = None; let mut name: Option<::Value<String>> = None; let mut role_arn: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "RoleArn" => { role_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(GroupProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, role_arn: role_arn, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for Group { type Properties = GroupProperties; const TYPE: &'static str = "AWS::Greengrass::Group"; fn properties(&self) -> &GroupProperties { &self.properties } fn properties_mut(&mut self) -> &mut GroupProperties { &mut self.properties } } impl ::private::Sealed for Group {} impl From<GroupProperties> for Group { fn from(properties: GroupProperties) -> Group { Group { properties } } } /// The [`AWS::Greengrass::GroupVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html) resource type. #[derive(Debug, Default)] pub struct GroupVersion { properties: GroupVersionProperties } /// Properties for the `GroupVersion` resource. #[derive(Debug, Default)] pub struct GroupVersionProperties { /// Property [`ConnectorDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-connectordefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub connector_definition_version_arn: Option<::Value<String>>, /// Property [`CoreDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-coredefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub core_definition_version_arn: Option<::Value<String>>, /// Property [`DeviceDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-devicedefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub device_definition_version_arn: Option<::Value<String>>, /// Property [`FunctionDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-functiondefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub function_definition_version_arn: Option<::Value<String>>, /// Property [`GroupId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-groupid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_id: ::Value<String>, /// Property [`LoggerDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-loggerdefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub logger_definition_version_arn: Option<::Value<String>>, /// Property [`ResourceDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-resourcedefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_definition_version_arn: Option<::Value<String>>, /// Property [`SubscriptionDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-subscriptiondefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub subscription_definition_version_arn: Option<::Value<String>>, } impl ::serde::Serialize for GroupVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref connector_definition_version_arn) = self.connector_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ConnectorDefinitionVersionArn", connector_definition_version_arn)?; } if let Some(ref core_definition_version_arn) = self.core_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "CoreDefinitionVersionArn", core_definition_version_arn)?; } if let Some(ref device_definition_version_arn) = self.device_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "DeviceDefinitionVersionArn", device_definition_version_arn)?; } if let Some(ref function_definition_version_arn) = self.function_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionDefinitionVersionArn", function_definition_version_arn)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupId", &self.group_id)?; if let Some(ref logger_definition_version_arn) = self.logger_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "LoggerDefinitionVersionArn", logger_definition_version_arn)?; } if let Some(ref resource_definition_version_arn) = self.resource_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceDefinitionVersionArn", resource_definition_version_arn)?; } if let Some(ref subscription_definition_version_arn) = self.subscription_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SubscriptionDefinitionVersionArn", subscription_definition_version_arn)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for GroupVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<GroupVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = GroupVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type GroupVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut connector_definition_version_arn: Option<::Value<String>> = None; let mut core_definition_version_arn: Option<::Value<String>> = None; let mut device_definition_version_arn: Option<::Value<String>> = None; let mut function_definition_version_arn: Option<::Value<String>> = None; let mut group_id: Option<::Value<String>> = None; let mut logger_definition_version_arn: Option<::Value<String>> = None; let mut resource_definition_version_arn: Option<::Value<String>> = None; let mut subscription_definition_version_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ConnectorDefinitionVersionArn" => { connector_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "CoreDefinitionVersionArn" => { core_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "DeviceDefinitionVersionArn" => { device_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "FunctionDefinitionVersionArn" => { function_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "GroupId" => { group_id = ::serde::de::MapAccess::next_value(&mut map)?; } "LoggerDefinitionVersionArn" => { logger_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceDefinitionVersionArn" => { resource_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "SubscriptionDefinitionVersionArn" => { subscription_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(GroupVersionProperties { connector_definition_version_arn: connector_definition_version_arn, core_definition_version_arn: core_definition_version_arn, device_definition_version_arn: device_definition_version_arn, function_definition_version_arn: function_definition_version_arn, group_id: group_id.ok_or(::serde::de::Error::missing_field("GroupId"))?, logger_definition_version_arn: logger_definition_version_arn, resource_definition_version_arn: resource_definition_version_arn, subscription_definition_version_arn: subscription_definition_version_arn, }) } } d.deserialize_map(Visitor) } } impl ::Resource for GroupVersion { type Properties = GroupVersionProperties; const TYPE: &'static str = "AWS::Greengrass::GroupVersion"; fn properties(&self) -> &GroupVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut GroupVersionProperties { &mut self.properties } } impl ::private::Sealed for GroupVersion {} impl From<GroupVersionProperties> for GroupVersion { fn from(properties: GroupVersionProperties) -> GroupVersion { GroupVersion { properties } } } /// The [`AWS::Greengrass::LoggerDefinition`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html) resource type. #[derive(Debug, Default)] pub struct LoggerDefinition { properties: LoggerDefinitionProperties } /// Properties for the `LoggerDefinition` resource. #[derive(Debug, Default)] pub struct LoggerDefinitionProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::logger_definition::LoggerDefinitionVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for LoggerDefinitionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for LoggerDefinitionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<LoggerDefinitionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = LoggerDefinitionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type LoggerDefinitionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::logger_definition::LoggerDefinitionVersion>> = None; let mut name: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(LoggerDefinitionProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for LoggerDefinition { type Properties = LoggerDefinitionProperties; const TYPE: &'static str = "AWS::Greengrass::LoggerDefinition"; fn properties(&self) -> &LoggerDefinitionProperties { &self.properties } fn properties_mut(&mut self) -> &mut LoggerDefinitionProperties { &mut self.properties } } impl ::private::Sealed for LoggerDefinition {} impl From<LoggerDefinitionProperties> for LoggerDefinition { fn from(properties: LoggerDefinitionProperties) -> LoggerDefinition { LoggerDefinition { properties } } } /// The [`AWS::Greengrass::LoggerDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html) resource type. #[derive(Debug, Default)] pub struct LoggerDefinitionVersion { properties: LoggerDefinitionVersionProperties } /// Properties for the `LoggerDefinitionVersion` resource. #[derive(Debug, Default)] pub struct LoggerDefinitionVersionProperties { /// Property [`LoggerDefinitionId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html#cfn-greengrass-loggerdefinitionversion-loggerdefinitionid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub logger_definition_id: ::Value<String>, /// Property [`Loggers`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html#cfn-greengrass-loggerdefinitionversion-loggers). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub loggers: ::ValueList<self::logger_definition_version::Logger>, } impl ::serde::Serialize for LoggerDefinitionVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "LoggerDefinitionId", &self.logger_definition_id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Loggers", &self.loggers)?; ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for LoggerDefinitionVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<LoggerDefinitionVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = LoggerDefinitionVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type LoggerDefinitionVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut logger_definition_id: Option<::Value<String>> = None; let mut loggers: Option<::ValueList<self::logger_definition_version::Logger>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "LoggerDefinitionId" => { logger_definition_id = ::serde::de::MapAccess::next_value(&mut map)?; } "Loggers" => { loggers = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(LoggerDefinitionVersionProperties { logger_definition_id: logger_definition_id.ok_or(::serde::de::Error::missing_field("LoggerDefinitionId"))?, loggers: loggers.ok_or(::serde::de::Error::missing_field("Loggers"))?, }) } } d.deserialize_map(Visitor) } } impl ::Resource for LoggerDefinitionVersion { type Properties = LoggerDefinitionVersionProperties; const TYPE: &'static str = "AWS::Greengrass::LoggerDefinitionVersion"; fn properties(&self) -> &LoggerDefinitionVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut LoggerDefinitionVersionProperties { &mut self.properties } } impl ::private::Sealed for LoggerDefinitionVersion {} impl From<LoggerDefinitionVersionProperties> for LoggerDefinitionVersion { fn from(properties: LoggerDefinitionVersionProperties) -> LoggerDefinitionVersion { LoggerDefinitionVersion { properties } } } /// The [`AWS::Greengrass::ResourceDefinition`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html) resource type. #[derive(Debug, Default)] pub struct ResourceDefinition { properties: ResourceDefinitionProperties } /// Properties for the `ResourceDefinition` resource. #[derive(Debug, Default)] pub struct ResourceDefinitionProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::resource_definition::ResourceDefinitionVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for ResourceDefinitionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for ResourceDefinitionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceDefinitionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceDefinitionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceDefinitionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::resource_definition::ResourceDefinitionVersion>> = None; let mut name: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceDefinitionProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for ResourceDefinition { type Properties = ResourceDefinitionProperties; const TYPE: &'static str = "AWS::Greengrass::ResourceDefinition"; fn properties(&self) -> &ResourceDefinitionProperties { &self.properties } fn properties_mut(&mut self) -> &mut ResourceDefinitionProperties { &mut self.properties } } impl ::private::Sealed for ResourceDefinition {} impl From<ResourceDefinitionProperties> for ResourceDefinition { fn from(properties: ResourceDefinitionProperties) -> ResourceDefinition { ResourceDefinition { properties } } } /// The [`AWS::Greengrass::ResourceDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html) resource type. #[derive(Debug, Default)] pub struct ResourceDefinitionVersion { properties: ResourceDefinitionVersionProperties } /// Properties for the `ResourceDefinitionVersion` resource. #[derive(Debug, Default)] pub struct ResourceDefinitionVersionProperties { /// Property [`ResourceDefinitionId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html#cfn-greengrass-resourcedefinitionversion-resourcedefinitionid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_definition_id: ::Value<String>, /// Property [`Resources`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html#cfn-greengrass-resourcedefinitionversion-resources). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resources: ::ValueList<self::resource_definition_version::ResourceInstance>, } impl ::serde::Serialize for ResourceDefinitionVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceDefinitionId", &self.resource_definition_id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Resources", &self.resources)?; ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for ResourceDefinitionVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceDefinitionVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceDefinitionVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceDefinitionVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut resource_definition_id: Option<::Value<String>> = None; let mut resources: Option<::ValueList<self::resource_definition_version::ResourceInstance>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ResourceDefinitionId" => { resource_definition_id = ::serde::de::MapAccess::next_value(&mut map)?; } "Resources" => { resources = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceDefinitionVersionProperties { resource_definition_id: resource_definition_id.ok_or(::serde::de::Error::missing_field("ResourceDefinitionId"))?, resources: resources.ok_or(::serde::de::Error::missing_field("Resources"))?, }) } } d.deserialize_map(Visitor) } } impl ::Resource for ResourceDefinitionVersion { type Properties = ResourceDefinitionVersionProperties; const TYPE: &'static str = "AWS::Greengrass::ResourceDefinitionVersion"; fn properties(&self) -> &ResourceDefinitionVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut ResourceDefinitionVersionProperties { &mut self.properties } } impl ::private::Sealed for ResourceDefinitionVersion {} impl From<ResourceDefinitionVersionProperties> for ResourceDefinitionVersion { fn from(properties: ResourceDefinitionVersionProperties) -> ResourceDefinitionVersion { ResourceDefinitionVersion { properties } } } /// The [`AWS::Greengrass::SubscriptionDefinition`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html) resource type. #[derive(Debug, Default)] pub struct SubscriptionDefinition { properties: SubscriptionDefinitionProperties } /// Properties for the `SubscriptionDefinition` resource. #[derive(Debug, Default)] pub struct SubscriptionDefinitionProperties { /// Property [`InitialVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-initialversion). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub initial_version: Option<::Value<self::subscription_definition::SubscriptionDefinitionVersion>>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-name). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub name: ::Value<String>, /// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-tags). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub tags: Option<::Value<::json::Value>>, } impl ::serde::Serialize for SubscriptionDefinitionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref initial_version) = self.initial_version { ::serde::ser::SerializeMap::serialize_entry(&mut map, "InitialVersion", initial_version)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; if let Some(ref tags) = self.tags { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?; } ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for SubscriptionDefinitionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<SubscriptionDefinitionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = SubscriptionDefinitionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type SubscriptionDefinitionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut initial_version: Option<::Value<self::subscription_definition::SubscriptionDefinitionVersion>> = None; let mut name: Option<::Value<String>> = None; let mut tags: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "InitialVersion" => { initial_version = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "Tags" => { tags = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(SubscriptionDefinitionProperties { initial_version: initial_version, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, tags: tags, }) } } d.deserialize_map(Visitor) } } impl ::Resource for SubscriptionDefinition { type Properties = SubscriptionDefinitionProperties; const TYPE: &'static str = "AWS::Greengrass::SubscriptionDefinition"; fn properties(&self) -> &SubscriptionDefinitionProperties { &self.properties } fn properties_mut(&mut self) -> &mut SubscriptionDefinitionProperties { &mut self.properties } } impl ::private::Sealed for SubscriptionDefinition {} impl From<SubscriptionDefinitionProperties> for SubscriptionDefinition { fn from(properties: SubscriptionDefinitionProperties) -> SubscriptionDefinition { SubscriptionDefinition { properties } } } /// The [`AWS::Greengrass::SubscriptionDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html) resource type. #[derive(Debug, Default)] pub struct SubscriptionDefinitionVersion { properties: SubscriptionDefinitionVersionProperties } /// Properties for the `SubscriptionDefinitionVersion` resource. #[derive(Debug, Default)] pub struct SubscriptionDefinitionVersionProperties { /// Property [`SubscriptionDefinitionId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinitionversion-subscriptiondefinitionid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub subscription_definition_id: ::Value<String>, /// Property [`Subscriptions`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinitionversion-subscriptions). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub subscriptions: ::ValueList<self::subscription_definition_version::Subscription>, } impl ::serde::Serialize for SubscriptionDefinitionVersionProperties { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "SubscriptionDefinitionId", &self.subscription_definition_id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Subscriptions", &self.subscriptions)?; ::serde::ser::SerializeMap::end(map) } } impl<'de> ::serde::Deserialize<'de> for SubscriptionDefinitionVersionProperties { fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<SubscriptionDefinitionVersionProperties, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = SubscriptionDefinitionVersionProperties; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type SubscriptionDefinitionVersionProperties") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut subscription_definition_id: Option<::Value<String>> = None; let mut subscriptions: Option<::ValueList<self::subscription_definition_version::Subscription>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "SubscriptionDefinitionId" => { subscription_definition_id = ::serde::de::MapAccess::next_value(&mut map)?; } "Subscriptions" => { subscriptions = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(SubscriptionDefinitionVersionProperties { subscription_definition_id: subscription_definition_id.ok_or(::serde::de::Error::missing_field("SubscriptionDefinitionId"))?, subscriptions: subscriptions.ok_or(::serde::de::Error::missing_field("Subscriptions"))?, }) } } d.deserialize_map(Visitor) } } impl ::Resource for SubscriptionDefinitionVersion { type Properties = SubscriptionDefinitionVersionProperties; const TYPE: &'static str = "AWS::Greengrass::SubscriptionDefinitionVersion"; fn properties(&self) -> &SubscriptionDefinitionVersionProperties { &self.properties } fn properties_mut(&mut self) -> &mut SubscriptionDefinitionVersionProperties { &mut self.properties } } impl ::private::Sealed for SubscriptionDefinitionVersion {} impl From<SubscriptionDefinitionVersionProperties> for SubscriptionDefinitionVersion { fn from(properties: SubscriptionDefinitionVersionProperties) -> SubscriptionDefinitionVersion { SubscriptionDefinitionVersion { properties } } } pub mod connector_definition { //! Property types for the `ConnectorDefinition` resource. /// The [`AWS::Greengrass::ConnectorDefinition.Connector`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html) property type. #[derive(Debug, Default)] pub struct Connector { /// Property [`ConnectorArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-connectorarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub connector_arn: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Parameters`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-parameters). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub parameters: Option<::Value<::json::Value>>, } impl ::codec::SerializeValue for Connector { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ConnectorArn", &self.connector_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; if let Some(ref parameters) = self.parameters { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Parameters", parameters)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Connector { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Connector, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Connector; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Connector") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut connector_arn: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut parameters: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ConnectorArn" => { connector_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Parameters" => { parameters = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Connector { connector_arn: connector_arn.ok_or(::serde::de::Error::missing_field("ConnectorArn"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, parameters: parameters, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ConnectorDefinition.ConnectorDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connectordefinitionversion.html) property type. #[derive(Debug, Default)] pub struct ConnectorDefinitionVersion { /// Property [`Connectors`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connectordefinitionversion.html#cfn-greengrass-connectordefinition-connectordefinitionversion-connectors). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub connectors: ::ValueList<Connector>, } impl ::codec::SerializeValue for ConnectorDefinitionVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Connectors", &self.connectors)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ConnectorDefinitionVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ConnectorDefinitionVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ConnectorDefinitionVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ConnectorDefinitionVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut connectors: Option<::ValueList<Connector>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Connectors" => { connectors = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ConnectorDefinitionVersion { connectors: connectors.ok_or(::serde::de::Error::missing_field("Connectors"))?, }) } } d.deserialize_map(Visitor) } } } pub mod connector_definition_version { //! Property types for the `ConnectorDefinitionVersion` resource. /// The [`AWS::Greengrass::ConnectorDefinitionVersion.Connector`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html) property type. #[derive(Debug, Default)] pub struct Connector { /// Property [`ConnectorArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-connectorarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub connector_arn: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Parameters`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-parameters). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub parameters: Option<::Value<::json::Value>>, } impl ::codec::SerializeValue for Connector { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ConnectorArn", &self.connector_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; if let Some(ref parameters) = self.parameters { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Parameters", parameters)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Connector { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Connector, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Connector; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Connector") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut connector_arn: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut parameters: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ConnectorArn" => { connector_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Parameters" => { parameters = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Connector { connector_arn: connector_arn.ok_or(::serde::de::Error::missing_field("ConnectorArn"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, parameters: parameters, }) } } d.deserialize_map(Visitor) } } } pub mod core_definition { //! Property types for the `CoreDefinition` resource. /// The [`AWS::Greengrass::CoreDefinition.Core`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html) property type. #[derive(Debug, Default)] pub struct Core { /// Property [`CertificateArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-certificatearn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub certificate_arn: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`SyncShadow`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-syncshadow). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sync_shadow: Option<::Value<bool>>, /// Property [`ThingArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-thingarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub thing_arn: ::Value<String>, } impl ::codec::SerializeValue for Core { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "CertificateArn", &self.certificate_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; if let Some(ref sync_shadow) = self.sync_shadow { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SyncShadow", sync_shadow)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "ThingArn", &self.thing_arn)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Core { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Core, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Core; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Core") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut certificate_arn: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut sync_shadow: Option<::Value<bool>> = None; let mut thing_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "CertificateArn" => { certificate_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "SyncShadow" => { sync_shadow = ::serde::de::MapAccess::next_value(&mut map)?; } "ThingArn" => { thing_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Core { certificate_arn: certificate_arn.ok_or(::serde::de::Error::missing_field("CertificateArn"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, sync_shadow: sync_shadow, thing_arn: thing_arn.ok_or(::serde::de::Error::missing_field("ThingArn"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::CoreDefinition.CoreDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-coredefinitionversion.html) property type. #[derive(Debug, Default)] pub struct CoreDefinitionVersion { /// Property [`Cores`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-coredefinitionversion.html#cfn-greengrass-coredefinition-coredefinitionversion-cores). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub cores: ::ValueList<Core>, } impl ::codec::SerializeValue for CoreDefinitionVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Cores", &self.cores)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for CoreDefinitionVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<CoreDefinitionVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = CoreDefinitionVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type CoreDefinitionVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut cores: Option<::ValueList<Core>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Cores" => { cores = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(CoreDefinitionVersion { cores: cores.ok_or(::serde::de::Error::missing_field("Cores"))?, }) } } d.deserialize_map(Visitor) } } } pub mod core_definition_version { //! Property types for the `CoreDefinitionVersion` resource. /// The [`AWS::Greengrass::CoreDefinitionVersion.Core`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html) property type. #[derive(Debug, Default)] pub struct Core { /// Property [`CertificateArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-certificatearn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub certificate_arn: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`SyncShadow`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-syncshadow). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sync_shadow: Option<::Value<bool>>, /// Property [`ThingArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-thingarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub thing_arn: ::Value<String>, } impl ::codec::SerializeValue for Core { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "CertificateArn", &self.certificate_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; if let Some(ref sync_shadow) = self.sync_shadow { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SyncShadow", sync_shadow)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "ThingArn", &self.thing_arn)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Core { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Core, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Core; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Core") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut certificate_arn: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut sync_shadow: Option<::Value<bool>> = None; let mut thing_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "CertificateArn" => { certificate_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "SyncShadow" => { sync_shadow = ::serde::de::MapAccess::next_value(&mut map)?; } "ThingArn" => { thing_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Core { certificate_arn: certificate_arn.ok_or(::serde::de::Error::missing_field("CertificateArn"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, sync_shadow: sync_shadow, thing_arn: thing_arn.ok_or(::serde::de::Error::missing_field("ThingArn"))?, }) } } d.deserialize_map(Visitor) } } } pub mod device_definition { //! Property types for the `DeviceDefinition` resource. /// The [`AWS::Greengrass::DeviceDefinition.Device`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html) property type. #[derive(Debug, Default)] pub struct Device { /// Property [`CertificateArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-certificatearn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub certificate_arn: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`SyncShadow`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-syncshadow). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sync_shadow: Option<::Value<bool>>, /// Property [`ThingArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-thingarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub thing_arn: ::Value<String>, } impl ::codec::SerializeValue for Device { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "CertificateArn", &self.certificate_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; if let Some(ref sync_shadow) = self.sync_shadow { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SyncShadow", sync_shadow)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "ThingArn", &self.thing_arn)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Device { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Device, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Device; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Device") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut certificate_arn: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut sync_shadow: Option<::Value<bool>> = None; let mut thing_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "CertificateArn" => { certificate_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "SyncShadow" => { sync_shadow = ::serde::de::MapAccess::next_value(&mut map)?; } "ThingArn" => { thing_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Device { certificate_arn: certificate_arn.ok_or(::serde::de::Error::missing_field("CertificateArn"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, sync_shadow: sync_shadow, thing_arn: thing_arn.ok_or(::serde::de::Error::missing_field("ThingArn"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::DeviceDefinition.DeviceDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-devicedefinitionversion.html) property type. #[derive(Debug, Default)] pub struct DeviceDefinitionVersion { /// Property [`Devices`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-devicedefinitionversion.html#cfn-greengrass-devicedefinition-devicedefinitionversion-devices). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub devices: ::ValueList<Device>, } impl ::codec::SerializeValue for DeviceDefinitionVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Devices", &self.devices)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for DeviceDefinitionVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<DeviceDefinitionVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = DeviceDefinitionVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type DeviceDefinitionVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut devices: Option<::ValueList<Device>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Devices" => { devices = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(DeviceDefinitionVersion { devices: devices.ok_or(::serde::de::Error::missing_field("Devices"))?, }) } } d.deserialize_map(Visitor) } } } pub mod device_definition_version { //! Property types for the `DeviceDefinitionVersion` resource. /// The [`AWS::Greengrass::DeviceDefinitionVersion.Device`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html) property type. #[derive(Debug, Default)] pub struct Device { /// Property [`CertificateArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-certificatearn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub certificate_arn: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`SyncShadow`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-syncshadow). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sync_shadow: Option<::Value<bool>>, /// Property [`ThingArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-thingarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub thing_arn: ::Value<String>, } impl ::codec::SerializeValue for Device { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "CertificateArn", &self.certificate_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; if let Some(ref sync_shadow) = self.sync_shadow { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SyncShadow", sync_shadow)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "ThingArn", &self.thing_arn)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Device { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Device, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Device; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Device") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut certificate_arn: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut sync_shadow: Option<::Value<bool>> = None; let mut thing_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "CertificateArn" => { certificate_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "SyncShadow" => { sync_shadow = ::serde::de::MapAccess::next_value(&mut map)?; } "ThingArn" => { thing_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Device { certificate_arn: certificate_arn.ok_or(::serde::de::Error::missing_field("CertificateArn"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, sync_shadow: sync_shadow, thing_arn: thing_arn.ok_or(::serde::de::Error::missing_field("ThingArn"))?, }) } } d.deserialize_map(Visitor) } } } pub mod function_definition { //! Property types for the `FunctionDefinition` resource. /// The [`AWS::Greengrass::FunctionDefinition.DefaultConfig`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-defaultconfig.html) property type. #[derive(Debug, Default)] pub struct DefaultConfig { /// Property [`Execution`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-defaultconfig.html#cfn-greengrass-functiondefinition-defaultconfig-execution). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub execution: ::Value<Execution>, } impl ::codec::SerializeValue for DefaultConfig { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Execution", &self.execution)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for DefaultConfig { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<DefaultConfig, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = DefaultConfig; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type DefaultConfig") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut execution: Option<::Value<Execution>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Execution" => { execution = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(DefaultConfig { execution: execution.ok_or(::serde::de::Error::missing_field("Execution"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinition.Environment`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html) property type. #[derive(Debug, Default)] pub struct Environment { /// Property [`AccessSysfs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-accesssysfs). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub access_sysfs: Option<::Value<bool>>, /// Property [`Execution`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-execution). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub execution: Option<::Value<Execution>>, /// Property [`ResourceAccessPolicies`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-resourceaccesspolicies). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_access_policies: Option<::ValueList<ResourceAccessPolicy>>, /// Property [`Variables`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-variables). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub variables: Option<::Value<::json::Value>>, } impl ::codec::SerializeValue for Environment { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref access_sysfs) = self.access_sysfs { ::serde::ser::SerializeMap::serialize_entry(&mut map, "AccessSysfs", access_sysfs)?; } if let Some(ref execution) = self.execution { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Execution", execution)?; } if let Some(ref resource_access_policies) = self.resource_access_policies { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceAccessPolicies", resource_access_policies)?; } if let Some(ref variables) = self.variables { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Variables", variables)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Environment { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Environment, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Environment; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Environment") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut access_sysfs: Option<::Value<bool>> = None; let mut execution: Option<::Value<Execution>> = None; let mut resource_access_policies: Option<::ValueList<ResourceAccessPolicy>> = None; let mut variables: Option<::Value<::json::Value>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "AccessSysfs" => { access_sysfs = ::serde::de::MapAccess::next_value(&mut map)?; } "Execution" => { execution = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceAccessPolicies" => { resource_access_policies = ::serde::de::MapAccess::next_value(&mut map)?; } "Variables" => { variables = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Environment { access_sysfs: access_sysfs, execution: execution, resource_access_policies: resource_access_policies, variables: variables, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinition.Execution`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html) property type. #[derive(Debug, Default)] pub struct Execution { /// Property [`IsolationMode`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html#cfn-greengrass-functiondefinition-execution-isolationmode). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub isolation_mode: Option<::Value<String>>, /// Property [`RunAs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html#cfn-greengrass-functiondefinition-execution-runas). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub run_as: Option<::Value<RunAs>>, } impl ::codec::SerializeValue for Execution { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref isolation_mode) = self.isolation_mode { ::serde::ser::SerializeMap::serialize_entry(&mut map, "IsolationMode", isolation_mode)?; } if let Some(ref run_as) = self.run_as { ::serde::ser::SerializeMap::serialize_entry(&mut map, "RunAs", run_as)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Execution { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Execution, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Execution; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Execution") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut isolation_mode: Option<::Value<String>> = None; let mut run_as: Option<::Value<RunAs>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "IsolationMode" => { isolation_mode = ::serde::de::MapAccess::next_value(&mut map)?; } "RunAs" => { run_as = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Execution { isolation_mode: isolation_mode, run_as: run_as, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinition.Function`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html) property type. #[derive(Debug, Default)] pub struct Function { /// Property [`FunctionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-functionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub function_arn: ::Value<String>, /// Property [`FunctionConfiguration`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-functionconfiguration). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub function_configuration: ::Value<FunctionConfiguration>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, } impl ::codec::SerializeValue for Function { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionArn", &self.function_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionConfiguration", &self.function_configuration)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Function { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Function, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Function; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Function") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut function_arn: Option<::Value<String>> = None; let mut function_configuration: Option<::Value<FunctionConfiguration>> = None; let mut id: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "FunctionArn" => { function_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "FunctionConfiguration" => { function_configuration = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Function { function_arn: function_arn.ok_or(::serde::de::Error::missing_field("FunctionArn"))?, function_configuration: function_configuration.ok_or(::serde::de::Error::missing_field("FunctionConfiguration"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinition.FunctionConfiguration`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html) property type. #[derive(Debug, Default)] pub struct FunctionConfiguration { /// Property [`EncodingType`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-encodingtype). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub encoding_type: Option<::Value<String>>, /// Property [`Environment`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-environment). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub environment: Option<::Value<Environment>>, /// Property [`ExecArgs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-execargs). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub exec_args: Option<::Value<String>>, /// Property [`Executable`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-executable). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub executable: Option<::Value<String>>, /// Property [`MemorySize`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-memorysize). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub memory_size: Option<::Value<u32>>, /// Property [`Pinned`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-pinned). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub pinned: Option<::Value<bool>>, /// Property [`Timeout`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-timeout). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub timeout: Option<::Value<u32>>, } impl ::codec::SerializeValue for FunctionConfiguration { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref encoding_type) = self.encoding_type { ::serde::ser::SerializeMap::serialize_entry(&mut map, "EncodingType", encoding_type)?; } if let Some(ref environment) = self.environment { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Environment", environment)?; } if let Some(ref exec_args) = self.exec_args { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ExecArgs", exec_args)?; } if let Some(ref executable) = self.executable { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Executable", executable)?; } if let Some(ref memory_size) = self.memory_size { ::serde::ser::SerializeMap::serialize_entry(&mut map, "MemorySize", memory_size)?; } if let Some(ref pinned) = self.pinned { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Pinned", pinned)?; } if let Some(ref timeout) = self.timeout { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Timeout", timeout)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for FunctionConfiguration { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<FunctionConfiguration, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = FunctionConfiguration; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type FunctionConfiguration") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut encoding_type: Option<::Value<String>> = None; let mut environment: Option<::Value<Environment>> = None; let mut exec_args: Option<::Value<String>> = None; let mut executable: Option<::Value<String>> = None; let mut memory_size: Option<::Value<u32>> = None; let mut pinned: Option<::Value<bool>> = None; let mut timeout: Option<::Value<u32>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "EncodingType" => { encoding_type = ::serde::de::MapAccess::next_value(&mut map)?; } "Environment" => { environment = ::serde::de::MapAccess::next_value(&mut map)?; } "ExecArgs" => { exec_args = ::serde::de::MapAccess::next_value(&mut map)?; } "Executable" => { executable = ::serde::de::MapAccess::next_value(&mut map)?; } "MemorySize" => { memory_size = ::serde::de::MapAccess::next_value(&mut map)?; } "Pinned" => { pinned = ::serde::de::MapAccess::next_value(&mut map)?; } "Timeout" => { timeout = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(FunctionConfiguration { encoding_type: encoding_type, environment: environment, exec_args: exec_args, executable: executable, memory_size: memory_size, pinned: pinned, timeout: timeout, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinition.FunctionDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html) property type. #[derive(Debug, Default)] pub struct FunctionDefinitionVersion { /// Property [`DefaultConfig`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html#cfn-greengrass-functiondefinition-functiondefinitionversion-defaultconfig). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub default_config: Option<::Value<DefaultConfig>>, /// Property [`Functions`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html#cfn-greengrass-functiondefinition-functiondefinitionversion-functions). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub functions: ::ValueList<Function>, } impl ::codec::SerializeValue for FunctionDefinitionVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref default_config) = self.default_config { ::serde::ser::SerializeMap::serialize_entry(&mut map, "DefaultConfig", default_config)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Functions", &self.functions)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for FunctionDefinitionVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<FunctionDefinitionVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = FunctionDefinitionVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type FunctionDefinitionVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut default_config: Option<::Value<DefaultConfig>> = None; let mut functions: Option<::ValueList<Function>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DefaultConfig" => { default_config = ::serde::de::MapAccess::next_value(&mut map)?; } "Functions" => { functions = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(FunctionDefinitionVersion { default_config: default_config, functions: functions.ok_or(::serde::de::Error::missing_field("Functions"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinition.ResourceAccessPolicy`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html) property type. #[derive(Debug, Default)] pub struct ResourceAccessPolicy { /// Property [`Permission`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html#cfn-greengrass-functiondefinition-resourceaccesspolicy-permission). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub permission: Option<::Value<String>>, /// Property [`ResourceId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html#cfn-greengrass-functiondefinition-resourceaccesspolicy-resourceid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_id: ::Value<String>, } impl ::codec::SerializeValue for ResourceAccessPolicy { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref permission) = self.permission { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Permission", permission)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceId", &self.resource_id)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceAccessPolicy { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceAccessPolicy, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceAccessPolicy; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceAccessPolicy") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut permission: Option<::Value<String>> = None; let mut resource_id: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Permission" => { permission = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceId" => { resource_id = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceAccessPolicy { permission: permission, resource_id: resource_id.ok_or(::serde::de::Error::missing_field("ResourceId"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinition.RunAs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html) property type. #[derive(Debug, Default)] pub struct RunAs { /// Property [`Gid`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html#cfn-greengrass-functiondefinition-runas-gid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub gid: Option<::Value<u32>>, /// Property [`Uid`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html#cfn-greengrass-functiondefinition-runas-uid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub uid: Option<::Value<u32>>, } impl ::codec::SerializeValue for RunAs { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref gid) = self.gid { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Gid", gid)?; } if let Some(ref uid) = self.uid { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Uid", uid)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for RunAs { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<RunAs, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = RunAs; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type RunAs") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut gid: Option<::Value<u32>> = None; let mut uid: Option<::Value<u32>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Gid" => { gid = ::serde::de::MapAccess::next_value(&mut map)?; } "Uid" => { uid = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(RunAs { gid: gid, uid: uid, }) } } d.deserialize_map(Visitor) } } } pub mod function_definition_version { //! Property types for the `FunctionDefinitionVersion` resource. /// The [`AWS::Greengrass::FunctionDefinitionVersion.DefaultConfig`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-defaultconfig.html) property type. #[derive(Debug, Default)] pub struct DefaultConfig { /// Property [`Execution`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-defaultconfig.html#cfn-greengrass-functiondefinitionversion-defaultconfig-execution). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub execution: ::Value<Execution>, } impl ::codec::SerializeValue for DefaultConfig { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Execution", &self.execution)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for DefaultConfig { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<DefaultConfig, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = DefaultConfig; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type DefaultConfig") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut execution: Option<::Value<Execution>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Execution" => { execution = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(DefaultConfig { execution: execution.ok_or(::serde::de::Error::missing_field("Execution"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinitionVersion.Environment`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html) property type. #[derive(Debug, Default)] pub struct Environment { /// Property [`AccessSysfs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-accesssysfs). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub access_sysfs: Option<::Value<bool>>, /// Property [`Execution`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-execution). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub execution: Option<::Value<Execution>>, /// Property [`ResourceAccessPolicies`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-resourceaccesspolicies). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_access_policies: Option<::ValueList<ResourceAccessPolicy>>, /// Property [`Variables`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-variables). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub variables: Option<::Value<::json::Value>>, } impl ::codec::SerializeValue for Environment { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref access_sysfs) = self.access_sysfs { ::serde::ser::SerializeMap::serialize_entry(&mut map, "AccessSysfs", access_sysfs)?; } if let Some(ref execution) = self.execution { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Execution", execution)?; } if let Some(ref resource_access_policies) = self.resource_access_policies { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceAccessPolicies", resource_access_policies)?; } if let Some(ref variables) = self.variables { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Variables", variables)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Environment { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Environment, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Environment; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Environment") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut access_sysfs: Option<::Value<bool>> = None; let mut execution: Option<::Value<Execution>> = None; let mut resource_access_policies: Option<::ValueList<ResourceAccessPolicy>> = None; let mut variables: Option<::Value<::json::Value>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "AccessSysfs" => { access_sysfs = ::serde::de::MapAccess::next_value(&mut map)?; } "Execution" => { execution = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceAccessPolicies" => { resource_access_policies = ::serde::de::MapAccess::next_value(&mut map)?; } "Variables" => { variables = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Environment { access_sysfs: access_sysfs, execution: execution, resource_access_policies: resource_access_policies, variables: variables, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinitionVersion.Execution`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html) property type. #[derive(Debug, Default)] pub struct Execution { /// Property [`IsolationMode`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html#cfn-greengrass-functiondefinitionversion-execution-isolationmode). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub isolation_mode: Option<::Value<String>>, /// Property [`RunAs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html#cfn-greengrass-functiondefinitionversion-execution-runas). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub run_as: Option<::Value<RunAs>>, } impl ::codec::SerializeValue for Execution { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref isolation_mode) = self.isolation_mode { ::serde::ser::SerializeMap::serialize_entry(&mut map, "IsolationMode", isolation_mode)?; } if let Some(ref run_as) = self.run_as { ::serde::ser::SerializeMap::serialize_entry(&mut map, "RunAs", run_as)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Execution { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Execution, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Execution; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Execution") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut isolation_mode: Option<::Value<String>> = None; let mut run_as: Option<::Value<RunAs>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "IsolationMode" => { isolation_mode = ::serde::de::MapAccess::next_value(&mut map)?; } "RunAs" => { run_as = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Execution { isolation_mode: isolation_mode, run_as: run_as, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinitionVersion.Function`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html) property type. #[derive(Debug, Default)] pub struct Function { /// Property [`FunctionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-functionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub function_arn: ::Value<String>, /// Property [`FunctionConfiguration`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-functionconfiguration). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub function_configuration: ::Value<FunctionConfiguration>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, } impl ::codec::SerializeValue for Function { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionArn", &self.function_arn)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionConfiguration", &self.function_configuration)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Function { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Function, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Function; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Function") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut function_arn: Option<::Value<String>> = None; let mut function_configuration: Option<::Value<FunctionConfiguration>> = None; let mut id: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "FunctionArn" => { function_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "FunctionConfiguration" => { function_configuration = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Function { function_arn: function_arn.ok_or(::serde::de::Error::missing_field("FunctionArn"))?, function_configuration: function_configuration.ok_or(::serde::de::Error::missing_field("FunctionConfiguration"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinitionVersion.FunctionConfiguration`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html) property type. #[derive(Debug, Default)] pub struct FunctionConfiguration { /// Property [`EncodingType`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-encodingtype). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub encoding_type: Option<::Value<String>>, /// Property [`Environment`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-environment). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub environment: Option<::Value<Environment>>, /// Property [`ExecArgs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-execargs). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub exec_args: Option<::Value<String>>, /// Property [`Executable`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-executable). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub executable: Option<::Value<String>>, /// Property [`MemorySize`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-memorysize). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub memory_size: Option<::Value<u32>>, /// Property [`Pinned`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-pinned). /// /// Update type: _Mutable_. /// AWS CloudFormation doesn't replace the resource when you change this property. pub pinned: Option<::Value<bool>>, /// Property [`Timeout`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-timeout). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub timeout: Option<::Value<u32>>, } impl ::codec::SerializeValue for FunctionConfiguration { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref encoding_type) = self.encoding_type { ::serde::ser::SerializeMap::serialize_entry(&mut map, "EncodingType", encoding_type)?; } if let Some(ref environment) = self.environment { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Environment", environment)?; } if let Some(ref exec_args) = self.exec_args { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ExecArgs", exec_args)?; } if let Some(ref executable) = self.executable { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Executable", executable)?; } if let Some(ref memory_size) = self.memory_size { ::serde::ser::SerializeMap::serialize_entry(&mut map, "MemorySize", memory_size)?; } if let Some(ref pinned) = self.pinned { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Pinned", pinned)?; } if let Some(ref timeout) = self.timeout { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Timeout", timeout)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for FunctionConfiguration { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<FunctionConfiguration, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = FunctionConfiguration; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type FunctionConfiguration") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut encoding_type: Option<::Value<String>> = None; let mut environment: Option<::Value<Environment>> = None; let mut exec_args: Option<::Value<String>> = None; let mut executable: Option<::Value<String>> = None; let mut memory_size: Option<::Value<u32>> = None; let mut pinned: Option<::Value<bool>> = None; let mut timeout: Option<::Value<u32>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "EncodingType" => { encoding_type = ::serde::de::MapAccess::next_value(&mut map)?; } "Environment" => { environment = ::serde::de::MapAccess::next_value(&mut map)?; } "ExecArgs" => { exec_args = ::serde::de::MapAccess::next_value(&mut map)?; } "Executable" => { executable = ::serde::de::MapAccess::next_value(&mut map)?; } "MemorySize" => { memory_size = ::serde::de::MapAccess::next_value(&mut map)?; } "Pinned" => { pinned = ::serde::de::MapAccess::next_value(&mut map)?; } "Timeout" => { timeout = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(FunctionConfiguration { encoding_type: encoding_type, environment: environment, exec_args: exec_args, executable: executable, memory_size: memory_size, pinned: pinned, timeout: timeout, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinitionVersion.ResourceAccessPolicy`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html) property type. #[derive(Debug, Default)] pub struct ResourceAccessPolicy { /// Property [`Permission`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html#cfn-greengrass-functiondefinitionversion-resourceaccesspolicy-permission). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub permission: Option<::Value<String>>, /// Property [`ResourceId`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html#cfn-greengrass-functiondefinitionversion-resourceaccesspolicy-resourceid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_id: ::Value<String>, } impl ::codec::SerializeValue for ResourceAccessPolicy { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref permission) = self.permission { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Permission", permission)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceId", &self.resource_id)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceAccessPolicy { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceAccessPolicy, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceAccessPolicy; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceAccessPolicy") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut permission: Option<::Value<String>> = None; let mut resource_id: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Permission" => { permission = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceId" => { resource_id = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceAccessPolicy { permission: permission, resource_id: resource_id.ok_or(::serde::de::Error::missing_field("ResourceId"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::FunctionDefinitionVersion.RunAs`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html) property type. #[derive(Debug, Default)] pub struct RunAs { /// Property [`Gid`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html#cfn-greengrass-functiondefinitionversion-runas-gid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub gid: Option<::Value<u32>>, /// Property [`Uid`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html#cfn-greengrass-functiondefinitionversion-runas-uid). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub uid: Option<::Value<u32>>, } impl ::codec::SerializeValue for RunAs { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref gid) = self.gid { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Gid", gid)?; } if let Some(ref uid) = self.uid { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Uid", uid)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for RunAs { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<RunAs, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = RunAs; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type RunAs") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut gid: Option<::Value<u32>> = None; let mut uid: Option<::Value<u32>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Gid" => { gid = ::serde::de::MapAccess::next_value(&mut map)?; } "Uid" => { uid = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(RunAs { gid: gid, uid: uid, }) } } d.deserialize_map(Visitor) } } } pub mod group { //! Property types for the `Group` resource. /// The [`AWS::Greengrass::Group.GroupVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html) property type. #[derive(Debug, Default)] pub struct GroupVersion { /// Property [`ConnectorDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-connectordefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub connector_definition_version_arn: Option<::Value<String>>, /// Property [`CoreDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-coredefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub core_definition_version_arn: Option<::Value<String>>, /// Property [`DeviceDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-devicedefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub device_definition_version_arn: Option<::Value<String>>, /// Property [`FunctionDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-functiondefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub function_definition_version_arn: Option<::Value<String>>, /// Property [`LoggerDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-loggerdefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub logger_definition_version_arn: Option<::Value<String>>, /// Property [`ResourceDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-resourcedefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_definition_version_arn: Option<::Value<String>>, /// Property [`SubscriptionDefinitionVersionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-subscriptiondefinitionversionarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub subscription_definition_version_arn: Option<::Value<String>>, } impl ::codec::SerializeValue for GroupVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref connector_definition_version_arn) = self.connector_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ConnectorDefinitionVersionArn", connector_definition_version_arn)?; } if let Some(ref core_definition_version_arn) = self.core_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "CoreDefinitionVersionArn", core_definition_version_arn)?; } if let Some(ref device_definition_version_arn) = self.device_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "DeviceDefinitionVersionArn", device_definition_version_arn)?; } if let Some(ref function_definition_version_arn) = self.function_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionDefinitionVersionArn", function_definition_version_arn)?; } if let Some(ref logger_definition_version_arn) = self.logger_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "LoggerDefinitionVersionArn", logger_definition_version_arn)?; } if let Some(ref resource_definition_version_arn) = self.resource_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceDefinitionVersionArn", resource_definition_version_arn)?; } if let Some(ref subscription_definition_version_arn) = self.subscription_definition_version_arn { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SubscriptionDefinitionVersionArn", subscription_definition_version_arn)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for GroupVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<GroupVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = GroupVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type GroupVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut connector_definition_version_arn: Option<::Value<String>> = None; let mut core_definition_version_arn: Option<::Value<String>> = None; let mut device_definition_version_arn: Option<::Value<String>> = None; let mut function_definition_version_arn: Option<::Value<String>> = None; let mut logger_definition_version_arn: Option<::Value<String>> = None; let mut resource_definition_version_arn: Option<::Value<String>> = None; let mut subscription_definition_version_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ConnectorDefinitionVersionArn" => { connector_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "CoreDefinitionVersionArn" => { core_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "DeviceDefinitionVersionArn" => { device_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "FunctionDefinitionVersionArn" => { function_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "LoggerDefinitionVersionArn" => { logger_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceDefinitionVersionArn" => { resource_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } "SubscriptionDefinitionVersionArn" => { subscription_definition_version_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(GroupVersion { connector_definition_version_arn: connector_definition_version_arn, core_definition_version_arn: core_definition_version_arn, device_definition_version_arn: device_definition_version_arn, function_definition_version_arn: function_definition_version_arn, logger_definition_version_arn: logger_definition_version_arn, resource_definition_version_arn: resource_definition_version_arn, subscription_definition_version_arn: subscription_definition_version_arn, }) } } d.deserialize_map(Visitor) } } } pub mod logger_definition { //! Property types for the `LoggerDefinition` resource. /// The [`AWS::Greengrass::LoggerDefinition.Logger`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html) property type. #[derive(Debug, Default)] pub struct Logger { /// Property [`Component`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-component). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub component: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Level`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-level). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub level: ::Value<String>, /// Property [`Space`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-space). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub space: Option<::Value<u32>>, /// Property [`Type`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-type). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub r#type: ::Value<String>, } impl ::codec::SerializeValue for Logger { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Component", &self.component)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Level", &self.level)?; if let Some(ref space) = self.space { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Space", space)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Type", &self.r#type)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Logger { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Logger, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Logger; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Logger") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut component: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut level: Option<::Value<String>> = None; let mut space: Option<::Value<u32>> = None; let mut r#type: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Component" => { component = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Level" => { level = ::serde::de::MapAccess::next_value(&mut map)?; } "Space" => { space = ::serde::de::MapAccess::next_value(&mut map)?; } "Type" => { r#type = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Logger { component: component.ok_or(::serde::de::Error::missing_field("Component"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, level: level.ok_or(::serde::de::Error::missing_field("Level"))?, space: space, r#type: r#type.ok_or(::serde::de::Error::missing_field("Type"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::LoggerDefinition.LoggerDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-loggerdefinitionversion.html) property type. #[derive(Debug, Default)] pub struct LoggerDefinitionVersion { /// Property [`Loggers`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-loggerdefinitionversion.html#cfn-greengrass-loggerdefinition-loggerdefinitionversion-loggers). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub loggers: ::ValueList<Logger>, } impl ::codec::SerializeValue for LoggerDefinitionVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Loggers", &self.loggers)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for LoggerDefinitionVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<LoggerDefinitionVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = LoggerDefinitionVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type LoggerDefinitionVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut loggers: Option<::ValueList<Logger>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Loggers" => { loggers = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(LoggerDefinitionVersion { loggers: loggers.ok_or(::serde::de::Error::missing_field("Loggers"))?, }) } } d.deserialize_map(Visitor) } } } pub mod logger_definition_version { //! Property types for the `LoggerDefinitionVersion` resource. /// The [`AWS::Greengrass::LoggerDefinitionVersion.Logger`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html) property type. #[derive(Debug, Default)] pub struct Logger { /// Property [`Component`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-component). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub component: ::Value<String>, /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Level`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-level). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub level: ::Value<String>, /// Property [`Space`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-space). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub space: Option<::Value<u32>>, /// Property [`Type`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-type). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub r#type: ::Value<String>, } impl ::codec::SerializeValue for Logger { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Component", &self.component)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Level", &self.level)?; if let Some(ref space) = self.space { ::serde::ser::SerializeMap::serialize_entry(&mut map, "Space", space)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "Type", &self.r#type)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Logger { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Logger, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Logger; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Logger") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut component: Option<::Value<String>> = None; let mut id: Option<::Value<String>> = None; let mut level: Option<::Value<String>> = None; let mut space: Option<::Value<u32>> = None; let mut r#type: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Component" => { component = ::serde::de::MapAccess::next_value(&mut map)?; } "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Level" => { level = ::serde::de::MapAccess::next_value(&mut map)?; } "Space" => { space = ::serde::de::MapAccess::next_value(&mut map)?; } "Type" => { r#type = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Logger { component: component.ok_or(::serde::de::Error::missing_field("Component"))?, id: id.ok_or(::serde::de::Error::missing_field("Id"))?, level: level.ok_or(::serde::de::Error::missing_field("Level"))?, space: space, r#type: r#type.ok_or(::serde::de::Error::missing_field("Type"))?, }) } } d.deserialize_map(Visitor) } } } pub mod resource_definition { //! Property types for the `ResourceDefinition` resource. /// The [`AWS::Greengrass::ResourceDefinition.GroupOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html) property type. #[derive(Debug, Default)] pub struct GroupOwnerSetting { /// Property [`AutoAddGroupOwner`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html#cfn-greengrass-resourcedefinition-groupownersetting-autoaddgroupowner). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub auto_add_group_owner: ::Value<bool>, /// Property [`GroupOwner`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html#cfn-greengrass-resourcedefinition-groupownersetting-groupowner). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner: Option<::Value<String>>, } impl ::codec::SerializeValue for GroupOwnerSetting { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "AutoAddGroupOwner", &self.auto_add_group_owner)?; if let Some(ref group_owner) = self.group_owner { ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwner", group_owner)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for GroupOwnerSetting { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<GroupOwnerSetting, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = GroupOwnerSetting; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type GroupOwnerSetting") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut auto_add_group_owner: Option<::Value<bool>> = None; let mut group_owner: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "AutoAddGroupOwner" => { auto_add_group_owner = ::serde::de::MapAccess::next_value(&mut map)?; } "GroupOwner" => { group_owner = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(GroupOwnerSetting { auto_add_group_owner: auto_add_group_owner.ok_or(::serde::de::Error::missing_field("AutoAddGroupOwner"))?, group_owner: group_owner, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.LocalDeviceResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html) property type. #[derive(Debug, Default)] pub struct LocalDeviceResourceData { /// Property [`GroupOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html#cfn-greengrass-resourcedefinition-localdeviceresourcedata-groupownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner_setting: Option<::Value<GroupOwnerSetting>>, /// Property [`SourcePath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html#cfn-greengrass-resourcedefinition-localdeviceresourcedata-sourcepath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub source_path: ::Value<String>, } impl ::codec::SerializeValue for LocalDeviceResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref group_owner_setting) = self.group_owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwnerSetting", group_owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "SourcePath", &self.source_path)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for LocalDeviceResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<LocalDeviceResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = LocalDeviceResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type LocalDeviceResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut group_owner_setting: Option<::Value<GroupOwnerSetting>> = None; let mut source_path: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "GroupOwnerSetting" => { group_owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "SourcePath" => { source_path = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(LocalDeviceResourceData { group_owner_setting: group_owner_setting, source_path: source_path.ok_or(::serde::de::Error::missing_field("SourcePath"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.LocalVolumeResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html) property type. #[derive(Debug, Default)] pub struct LocalVolumeResourceData { /// Property [`DestinationPath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-destinationpath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub destination_path: ::Value<String>, /// Property [`GroupOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-groupownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner_setting: Option<::Value<GroupOwnerSetting>>, /// Property [`SourcePath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-sourcepath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub source_path: ::Value<String>, } impl ::codec::SerializeValue for LocalVolumeResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "DestinationPath", &self.destination_path)?; if let Some(ref group_owner_setting) = self.group_owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwnerSetting", group_owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "SourcePath", &self.source_path)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for LocalVolumeResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<LocalVolumeResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = LocalVolumeResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type LocalVolumeResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut destination_path: Option<::Value<String>> = None; let mut group_owner_setting: Option<::Value<GroupOwnerSetting>> = None; let mut source_path: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DestinationPath" => { destination_path = ::serde::de::MapAccess::next_value(&mut map)?; } "GroupOwnerSetting" => { group_owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "SourcePath" => { source_path = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(LocalVolumeResourceData { destination_path: destination_path.ok_or(::serde::de::Error::missing_field("DestinationPath"))?, group_owner_setting: group_owner_setting, source_path: source_path.ok_or(::serde::de::Error::missing_field("SourcePath"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.ResourceDataContainer`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html) property type. #[derive(Debug, Default)] pub struct ResourceDataContainer { /// Property [`LocalDeviceResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-localdeviceresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub local_device_resource_data: Option<::Value<LocalDeviceResourceData>>, /// Property [`LocalVolumeResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-localvolumeresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub local_volume_resource_data: Option<::Value<LocalVolumeResourceData>>, /// Property [`S3MachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-s3machinelearningmodelresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub s3_machine_learning_model_resource_data: Option<::Value<S3MachineLearningModelResourceData>>, /// Property [`SageMakerMachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-sagemakermachinelearningmodelresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sage_maker_machine_learning_model_resource_data: Option<::Value<SageMakerMachineLearningModelResourceData>>, /// Property [`SecretsManagerSecretResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-secretsmanagersecretresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub secrets_manager_secret_resource_data: Option<::Value<SecretsManagerSecretResourceData>>, } impl ::codec::SerializeValue for ResourceDataContainer { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref local_device_resource_data) = self.local_device_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "LocalDeviceResourceData", local_device_resource_data)?; } if let Some(ref local_volume_resource_data) = self.local_volume_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "LocalVolumeResourceData", local_volume_resource_data)?; } if let Some(ref s3_machine_learning_model_resource_data) = self.s3_machine_learning_model_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "S3MachineLearningModelResourceData", s3_machine_learning_model_resource_data)?; } if let Some(ref sage_maker_machine_learning_model_resource_data) = self.sage_maker_machine_learning_model_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SageMakerMachineLearningModelResourceData", sage_maker_machine_learning_model_resource_data)?; } if let Some(ref secrets_manager_secret_resource_data) = self.secrets_manager_secret_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SecretsManagerSecretResourceData", secrets_manager_secret_resource_data)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceDataContainer { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceDataContainer, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceDataContainer; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceDataContainer") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut local_device_resource_data: Option<::Value<LocalDeviceResourceData>> = None; let mut local_volume_resource_data: Option<::Value<LocalVolumeResourceData>> = None; let mut s3_machine_learning_model_resource_data: Option<::Value<S3MachineLearningModelResourceData>> = None; let mut sage_maker_machine_learning_model_resource_data: Option<::Value<SageMakerMachineLearningModelResourceData>> = None; let mut secrets_manager_secret_resource_data: Option<::Value<SecretsManagerSecretResourceData>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "LocalDeviceResourceData" => { local_device_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "LocalVolumeResourceData" => { local_volume_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "S3MachineLearningModelResourceData" => { s3_machine_learning_model_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "SageMakerMachineLearningModelResourceData" => { sage_maker_machine_learning_model_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "SecretsManagerSecretResourceData" => { secrets_manager_secret_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceDataContainer { local_device_resource_data: local_device_resource_data, local_volume_resource_data: local_volume_resource_data, s3_machine_learning_model_resource_data: s3_machine_learning_model_resource_data, sage_maker_machine_learning_model_resource_data: sage_maker_machine_learning_model_resource_data, secrets_manager_secret_resource_data: secrets_manager_secret_resource_data, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.ResourceDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedefinitionversion.html) property type. #[derive(Debug, Default)] pub struct ResourceDefinitionVersion { /// Property [`Resources`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedefinitionversion.html#cfn-greengrass-resourcedefinition-resourcedefinitionversion-resources). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resources: ::ValueList<ResourceInstance>, } impl ::codec::SerializeValue for ResourceDefinitionVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Resources", &self.resources)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceDefinitionVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceDefinitionVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceDefinitionVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceDefinitionVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut resources: Option<::ValueList<ResourceInstance>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Resources" => { resources = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceDefinitionVersion { resources: resources.ok_or(::serde::de::Error::missing_field("Resources"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.ResourceDownloadOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html) property type. #[derive(Debug, Default)] pub struct ResourceDownloadOwnerSetting { /// Property [`GroupOwner`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinition-resourcedownloadownersetting-groupowner). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner: ::Value<String>, /// Property [`GroupPermission`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinition-resourcedownloadownersetting-grouppermission). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_permission: ::Value<String>, } impl ::codec::SerializeValue for ResourceDownloadOwnerSetting { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwner", &self.group_owner)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupPermission", &self.group_permission)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceDownloadOwnerSetting { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceDownloadOwnerSetting, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceDownloadOwnerSetting; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceDownloadOwnerSetting") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut group_owner: Option<::Value<String>> = None; let mut group_permission: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "GroupOwner" => { group_owner = ::serde::de::MapAccess::next_value(&mut map)?; } "GroupPermission" => { group_permission = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceDownloadOwnerSetting { group_owner: group_owner.ok_or(::serde::de::Error::missing_field("GroupOwner"))?, group_permission: group_permission.ok_or(::serde::de::Error::missing_field("GroupPermission"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.ResourceInstance`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html) property type. #[derive(Debug, Default)] pub struct ResourceInstance { /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-name). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub name: ::Value<String>, /// Property [`ResourceDataContainer`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-resourcedatacontainer). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_data_container: ::Value<ResourceDataContainer>, } impl ::codec::SerializeValue for ResourceInstance { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceDataContainer", &self.resource_data_container)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceInstance { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceInstance, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceInstance; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceInstance") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut id: Option<::Value<String>> = None; let mut name: Option<::Value<String>> = None; let mut resource_data_container: Option<::Value<ResourceDataContainer>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceDataContainer" => { resource_data_container = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceInstance { id: id.ok_or(::serde::de::Error::missing_field("Id"))?, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, resource_data_container: resource_data_container.ok_or(::serde::de::Error::missing_field("ResourceDataContainer"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.S3MachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html) property type. #[derive(Debug, Default)] pub struct S3MachineLearningModelResourceData { /// Property [`DestinationPath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-destinationpath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub destination_path: ::Value<String>, /// Property [`OwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-ownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub owner_setting: Option<::Value<ResourceDownloadOwnerSetting>>, /// Property [`S3Uri`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-s3uri). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub s3_uri: ::Value<String>, } impl ::codec::SerializeValue for S3MachineLearningModelResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "DestinationPath", &self.destination_path)?; if let Some(ref owner_setting) = self.owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "OwnerSetting", owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "S3Uri", &self.s3_uri)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for S3MachineLearningModelResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<S3MachineLearningModelResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = S3MachineLearningModelResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type S3MachineLearningModelResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut destination_path: Option<::Value<String>> = None; let mut owner_setting: Option<::Value<ResourceDownloadOwnerSetting>> = None; let mut s3_uri: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DestinationPath" => { destination_path = ::serde::de::MapAccess::next_value(&mut map)?; } "OwnerSetting" => { owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "S3Uri" => { s3_uri = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(S3MachineLearningModelResourceData { destination_path: destination_path.ok_or(::serde::de::Error::missing_field("DestinationPath"))?, owner_setting: owner_setting, s3_uri: s3_uri.ok_or(::serde::de::Error::missing_field("S3Uri"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.SageMakerMachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html) property type. #[derive(Debug, Default)] pub struct SageMakerMachineLearningModelResourceData { /// Property [`DestinationPath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-destinationpath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub destination_path: ::Value<String>, /// Property [`OwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-ownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub owner_setting: Option<::Value<ResourceDownloadOwnerSetting>>, /// Property [`SageMakerJobArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-sagemakerjobarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sage_maker_job_arn: ::Value<String>, } impl ::codec::SerializeValue for SageMakerMachineLearningModelResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "DestinationPath", &self.destination_path)?; if let Some(ref owner_setting) = self.owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "OwnerSetting", owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "SageMakerJobArn", &self.sage_maker_job_arn)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for SageMakerMachineLearningModelResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<SageMakerMachineLearningModelResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = SageMakerMachineLearningModelResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type SageMakerMachineLearningModelResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut destination_path: Option<::Value<String>> = None; let mut owner_setting: Option<::Value<ResourceDownloadOwnerSetting>> = None; let mut sage_maker_job_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DestinationPath" => { destination_path = ::serde::de::MapAccess::next_value(&mut map)?; } "OwnerSetting" => { owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "SageMakerJobArn" => { sage_maker_job_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(SageMakerMachineLearningModelResourceData { destination_path: destination_path.ok_or(::serde::de::Error::missing_field("DestinationPath"))?, owner_setting: owner_setting, sage_maker_job_arn: sage_maker_job_arn.ok_or(::serde::de::Error::missing_field("SageMakerJobArn"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinition.SecretsManagerSecretResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html) property type. #[derive(Debug, Default)] pub struct SecretsManagerSecretResourceData { /// Property [`ARN`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinition-secretsmanagersecretresourcedata-arn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub arn: ::Value<String>, /// Property [`AdditionalStagingLabelsToDownload`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinition-secretsmanagersecretresourcedata-additionalstaginglabelstodownload). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub additional_staging_labels_to_download: Option<::ValueList<String>>, } impl ::codec::SerializeValue for SecretsManagerSecretResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ARN", &self.arn)?; if let Some(ref additional_staging_labels_to_download) = self.additional_staging_labels_to_download { ::serde::ser::SerializeMap::serialize_entry(&mut map, "AdditionalStagingLabelsToDownload", additional_staging_labels_to_download)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for SecretsManagerSecretResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<SecretsManagerSecretResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = SecretsManagerSecretResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type SecretsManagerSecretResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut arn: Option<::Value<String>> = None; let mut additional_staging_labels_to_download: Option<::ValueList<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ARN" => { arn = ::serde::de::MapAccess::next_value(&mut map)?; } "AdditionalStagingLabelsToDownload" => { additional_staging_labels_to_download = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(SecretsManagerSecretResourceData { arn: arn.ok_or(::serde::de::Error::missing_field("ARN"))?, additional_staging_labels_to_download: additional_staging_labels_to_download, }) } } d.deserialize_map(Visitor) } } } pub mod resource_definition_version { //! Property types for the `ResourceDefinitionVersion` resource. /// The [`AWS::Greengrass::ResourceDefinitionVersion.GroupOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html) property type. #[derive(Debug, Default)] pub struct GroupOwnerSetting { /// Property [`AutoAddGroupOwner`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html#cfn-greengrass-resourcedefinitionversion-groupownersetting-autoaddgroupowner). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub auto_add_group_owner: ::Value<bool>, /// Property [`GroupOwner`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html#cfn-greengrass-resourcedefinitionversion-groupownersetting-groupowner). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner: Option<::Value<String>>, } impl ::codec::SerializeValue for GroupOwnerSetting { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "AutoAddGroupOwner", &self.auto_add_group_owner)?; if let Some(ref group_owner) = self.group_owner { ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwner", group_owner)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for GroupOwnerSetting { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<GroupOwnerSetting, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = GroupOwnerSetting; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type GroupOwnerSetting") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut auto_add_group_owner: Option<::Value<bool>> = None; let mut group_owner: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "AutoAddGroupOwner" => { auto_add_group_owner = ::serde::de::MapAccess::next_value(&mut map)?; } "GroupOwner" => { group_owner = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(GroupOwnerSetting { auto_add_group_owner: auto_add_group_owner.ok_or(::serde::de::Error::missing_field("AutoAddGroupOwner"))?, group_owner: group_owner, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.LocalDeviceResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html) property type. #[derive(Debug, Default)] pub struct LocalDeviceResourceData { /// Property [`GroupOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html#cfn-greengrass-resourcedefinitionversion-localdeviceresourcedata-groupownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner_setting: Option<::Value<GroupOwnerSetting>>, /// Property [`SourcePath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html#cfn-greengrass-resourcedefinitionversion-localdeviceresourcedata-sourcepath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub source_path: ::Value<String>, } impl ::codec::SerializeValue for LocalDeviceResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref group_owner_setting) = self.group_owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwnerSetting", group_owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "SourcePath", &self.source_path)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for LocalDeviceResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<LocalDeviceResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = LocalDeviceResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type LocalDeviceResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut group_owner_setting: Option<::Value<GroupOwnerSetting>> = None; let mut source_path: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "GroupOwnerSetting" => { group_owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "SourcePath" => { source_path = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(LocalDeviceResourceData { group_owner_setting: group_owner_setting, source_path: source_path.ok_or(::serde::de::Error::missing_field("SourcePath"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.LocalVolumeResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html) property type. #[derive(Debug, Default)] pub struct LocalVolumeResourceData { /// Property [`DestinationPath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-destinationpath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub destination_path: ::Value<String>, /// Property [`GroupOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-groupownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner_setting: Option<::Value<GroupOwnerSetting>>, /// Property [`SourcePath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-sourcepath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub source_path: ::Value<String>, } impl ::codec::SerializeValue for LocalVolumeResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "DestinationPath", &self.destination_path)?; if let Some(ref group_owner_setting) = self.group_owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwnerSetting", group_owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "SourcePath", &self.source_path)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for LocalVolumeResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<LocalVolumeResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = LocalVolumeResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type LocalVolumeResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut destination_path: Option<::Value<String>> = None; let mut group_owner_setting: Option<::Value<GroupOwnerSetting>> = None; let mut source_path: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DestinationPath" => { destination_path = ::serde::de::MapAccess::next_value(&mut map)?; } "GroupOwnerSetting" => { group_owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "SourcePath" => { source_path = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(LocalVolumeResourceData { destination_path: destination_path.ok_or(::serde::de::Error::missing_field("DestinationPath"))?, group_owner_setting: group_owner_setting, source_path: source_path.ok_or(::serde::de::Error::missing_field("SourcePath"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.ResourceDataContainer`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html) property type. #[derive(Debug, Default)] pub struct ResourceDataContainer { /// Property [`LocalDeviceResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-localdeviceresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub local_device_resource_data: Option<::Value<LocalDeviceResourceData>>, /// Property [`LocalVolumeResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-localvolumeresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub local_volume_resource_data: Option<::Value<LocalVolumeResourceData>>, /// Property [`S3MachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-s3machinelearningmodelresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub s3_machine_learning_model_resource_data: Option<::Value<S3MachineLearningModelResourceData>>, /// Property [`SageMakerMachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-sagemakermachinelearningmodelresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sage_maker_machine_learning_model_resource_data: Option<::Value<SageMakerMachineLearningModelResourceData>>, /// Property [`SecretsManagerSecretResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-secretsmanagersecretresourcedata). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub secrets_manager_secret_resource_data: Option<::Value<SecretsManagerSecretResourceData>>, } impl ::codec::SerializeValue for ResourceDataContainer { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; if let Some(ref local_device_resource_data) = self.local_device_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "LocalDeviceResourceData", local_device_resource_data)?; } if let Some(ref local_volume_resource_data) = self.local_volume_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "LocalVolumeResourceData", local_volume_resource_data)?; } if let Some(ref s3_machine_learning_model_resource_data) = self.s3_machine_learning_model_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "S3MachineLearningModelResourceData", s3_machine_learning_model_resource_data)?; } if let Some(ref sage_maker_machine_learning_model_resource_data) = self.sage_maker_machine_learning_model_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SageMakerMachineLearningModelResourceData", sage_maker_machine_learning_model_resource_data)?; } if let Some(ref secrets_manager_secret_resource_data) = self.secrets_manager_secret_resource_data { ::serde::ser::SerializeMap::serialize_entry(&mut map, "SecretsManagerSecretResourceData", secrets_manager_secret_resource_data)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceDataContainer { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceDataContainer, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceDataContainer; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceDataContainer") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut local_device_resource_data: Option<::Value<LocalDeviceResourceData>> = None; let mut local_volume_resource_data: Option<::Value<LocalVolumeResourceData>> = None; let mut s3_machine_learning_model_resource_data: Option<::Value<S3MachineLearningModelResourceData>> = None; let mut sage_maker_machine_learning_model_resource_data: Option<::Value<SageMakerMachineLearningModelResourceData>> = None; let mut secrets_manager_secret_resource_data: Option<::Value<SecretsManagerSecretResourceData>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "LocalDeviceResourceData" => { local_device_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "LocalVolumeResourceData" => { local_volume_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "S3MachineLearningModelResourceData" => { s3_machine_learning_model_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "SageMakerMachineLearningModelResourceData" => { sage_maker_machine_learning_model_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } "SecretsManagerSecretResourceData" => { secrets_manager_secret_resource_data = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceDataContainer { local_device_resource_data: local_device_resource_data, local_volume_resource_data: local_volume_resource_data, s3_machine_learning_model_resource_data: s3_machine_learning_model_resource_data, sage_maker_machine_learning_model_resource_data: sage_maker_machine_learning_model_resource_data, secrets_manager_secret_resource_data: secrets_manager_secret_resource_data, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.ResourceDownloadOwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html) property type. #[derive(Debug, Default)] pub struct ResourceDownloadOwnerSetting { /// Property [`GroupOwner`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinitionversion-resourcedownloadownersetting-groupowner). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_owner: ::Value<String>, /// Property [`GroupPermission`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinitionversion-resourcedownloadownersetting-grouppermission). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub group_permission: ::Value<String>, } impl ::codec::SerializeValue for ResourceDownloadOwnerSetting { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupOwner", &self.group_owner)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "GroupPermission", &self.group_permission)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceDownloadOwnerSetting { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceDownloadOwnerSetting, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceDownloadOwnerSetting; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceDownloadOwnerSetting") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut group_owner: Option<::Value<String>> = None; let mut group_permission: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "GroupOwner" => { group_owner = ::serde::de::MapAccess::next_value(&mut map)?; } "GroupPermission" => { group_permission = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceDownloadOwnerSetting { group_owner: group_owner.ok_or(::serde::de::Error::missing_field("GroupOwner"))?, group_permission: group_permission.ok_or(::serde::de::Error::missing_field("GroupPermission"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.ResourceInstance`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html) property type. #[derive(Debug, Default)] pub struct ResourceInstance { /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-name). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub name: ::Value<String>, /// Property [`ResourceDataContainer`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-resourcedatacontainer). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub resource_data_container: ::Value<ResourceDataContainer>, } impl ::codec::SerializeValue for ResourceInstance { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ResourceDataContainer", &self.resource_data_container)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for ResourceInstance { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ResourceInstance, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = ResourceInstance; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type ResourceInstance") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut id: Option<::Value<String>> = None; let mut name: Option<::Value<String>> = None; let mut resource_data_container: Option<::Value<ResourceDataContainer>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Name" => { name = ::serde::de::MapAccess::next_value(&mut map)?; } "ResourceDataContainer" => { resource_data_container = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(ResourceInstance { id: id.ok_or(::serde::de::Error::missing_field("Id"))?, name: name.ok_or(::serde::de::Error::missing_field("Name"))?, resource_data_container: resource_data_container.ok_or(::serde::de::Error::missing_field("ResourceDataContainer"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.S3MachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html) property type. #[derive(Debug, Default)] pub struct S3MachineLearningModelResourceData { /// Property [`DestinationPath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-destinationpath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub destination_path: ::Value<String>, /// Property [`OwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-ownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub owner_setting: Option<::Value<ResourceDownloadOwnerSetting>>, /// Property [`S3Uri`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-s3uri). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub s3_uri: ::Value<String>, } impl ::codec::SerializeValue for S3MachineLearningModelResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "DestinationPath", &self.destination_path)?; if let Some(ref owner_setting) = self.owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "OwnerSetting", owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "S3Uri", &self.s3_uri)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for S3MachineLearningModelResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<S3MachineLearningModelResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = S3MachineLearningModelResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type S3MachineLearningModelResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut destination_path: Option<::Value<String>> = None; let mut owner_setting: Option<::Value<ResourceDownloadOwnerSetting>> = None; let mut s3_uri: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DestinationPath" => { destination_path = ::serde::de::MapAccess::next_value(&mut map)?; } "OwnerSetting" => { owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "S3Uri" => { s3_uri = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(S3MachineLearningModelResourceData { destination_path: destination_path.ok_or(::serde::de::Error::missing_field("DestinationPath"))?, owner_setting: owner_setting, s3_uri: s3_uri.ok_or(::serde::de::Error::missing_field("S3Uri"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.SageMakerMachineLearningModelResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html) property type. #[derive(Debug, Default)] pub struct SageMakerMachineLearningModelResourceData { /// Property [`DestinationPath`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-destinationpath). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub destination_path: ::Value<String>, /// Property [`OwnerSetting`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-ownersetting). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub owner_setting: Option<::Value<ResourceDownloadOwnerSetting>>, /// Property [`SageMakerJobArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-sagemakerjobarn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub sage_maker_job_arn: ::Value<String>, } impl ::codec::SerializeValue for SageMakerMachineLearningModelResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "DestinationPath", &self.destination_path)?; if let Some(ref owner_setting) = self.owner_setting { ::serde::ser::SerializeMap::serialize_entry(&mut map, "OwnerSetting", owner_setting)?; } ::serde::ser::SerializeMap::serialize_entry(&mut map, "SageMakerJobArn", &self.sage_maker_job_arn)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for SageMakerMachineLearningModelResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<SageMakerMachineLearningModelResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = SageMakerMachineLearningModelResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type SageMakerMachineLearningModelResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut destination_path: Option<::Value<String>> = None; let mut owner_setting: Option<::Value<ResourceDownloadOwnerSetting>> = None; let mut sage_maker_job_arn: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "DestinationPath" => { destination_path = ::serde::de::MapAccess::next_value(&mut map)?; } "OwnerSetting" => { owner_setting = ::serde::de::MapAccess::next_value(&mut map)?; } "SageMakerJobArn" => { sage_maker_job_arn = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(SageMakerMachineLearningModelResourceData { destination_path: destination_path.ok_or(::serde::de::Error::missing_field("DestinationPath"))?, owner_setting: owner_setting, sage_maker_job_arn: sage_maker_job_arn.ok_or(::serde::de::Error::missing_field("SageMakerJobArn"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::ResourceDefinitionVersion.SecretsManagerSecretResourceData`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html) property type. #[derive(Debug, Default)] pub struct SecretsManagerSecretResourceData { /// Property [`ARN`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata-arn). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub arn: ::Value<String>, /// Property [`AdditionalStagingLabelsToDownload`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata-additionalstaginglabelstodownload). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub additional_staging_labels_to_download: Option<::ValueList<String>>, } impl ::codec::SerializeValue for SecretsManagerSecretResourceData { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "ARN", &self.arn)?; if let Some(ref additional_staging_labels_to_download) = self.additional_staging_labels_to_download { ::serde::ser::SerializeMap::serialize_entry(&mut map, "AdditionalStagingLabelsToDownload", additional_staging_labels_to_download)?; } ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for SecretsManagerSecretResourceData { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<SecretsManagerSecretResourceData, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = SecretsManagerSecretResourceData; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type SecretsManagerSecretResourceData") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut arn: Option<::Value<String>> = None; let mut additional_staging_labels_to_download: Option<::ValueList<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "ARN" => { arn = ::serde::de::MapAccess::next_value(&mut map)?; } "AdditionalStagingLabelsToDownload" => { additional_staging_labels_to_download = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(SecretsManagerSecretResourceData { arn: arn.ok_or(::serde::de::Error::missing_field("ARN"))?, additional_staging_labels_to_download: additional_staging_labels_to_download, }) } } d.deserialize_map(Visitor) } } } pub mod subscription_definition { //! Property types for the `SubscriptionDefinition` resource. /// The [`AWS::Greengrass::SubscriptionDefinition.Subscription`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html) property type. #[derive(Debug, Default)] pub struct Subscription { /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Source`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-source). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub source: ::Value<String>, /// Property [`Subject`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-subject). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub subject: ::Value<String>, /// Property [`Target`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-target). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub target: ::Value<String>, } impl ::codec::SerializeValue for Subscription { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Source", &self.source)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Subject", &self.subject)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Target", &self.target)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Subscription { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Subscription, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Subscription; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Subscription") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut id: Option<::Value<String>> = None; let mut source: Option<::Value<String>> = None; let mut subject: Option<::Value<String>> = None; let mut target: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Source" => { source = ::serde::de::MapAccess::next_value(&mut map)?; } "Subject" => { subject = ::serde::de::MapAccess::next_value(&mut map)?; } "Target" => { target = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Subscription { id: id.ok_or(::serde::de::Error::missing_field("Id"))?, source: source.ok_or(::serde::de::Error::missing_field("Source"))?, subject: subject.ok_or(::serde::de::Error::missing_field("Subject"))?, target: target.ok_or(::serde::de::Error::missing_field("Target"))?, }) } } d.deserialize_map(Visitor) } } /// The [`AWS::Greengrass::SubscriptionDefinition.SubscriptionDefinitionVersion`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscriptiondefinitionversion.html) property type. #[derive(Debug, Default)] pub struct SubscriptionDefinitionVersion { /// Property [`Subscriptions`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinition-subscriptiondefinitionversion-subscriptions). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub subscriptions: ::ValueList<Subscription>, } impl ::codec::SerializeValue for SubscriptionDefinitionVersion { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Subscriptions", &self.subscriptions)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for SubscriptionDefinitionVersion { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<SubscriptionDefinitionVersion, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = SubscriptionDefinitionVersion; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type SubscriptionDefinitionVersion") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut subscriptions: Option<::ValueList<Subscription>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Subscriptions" => { subscriptions = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(SubscriptionDefinitionVersion { subscriptions: subscriptions.ok_or(::serde::de::Error::missing_field("Subscriptions"))?, }) } } d.deserialize_map(Visitor) } } } pub mod subscription_definition_version { //! Property types for the `SubscriptionDefinitionVersion` resource. /// The [`AWS::Greengrass::SubscriptionDefinitionVersion.Subscription`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html) property type. #[derive(Debug, Default)] pub struct Subscription { /// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-id). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub id: ::Value<String>, /// Property [`Source`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-source). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub source: ::Value<String>, /// Property [`Subject`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-subject). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub subject: ::Value<String>, /// Property [`Target`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-target). /// /// Update type: _Immutable_. /// AWS CloudFormation replaces the resource when you change this property. pub target: ::Value<String>, } impl ::codec::SerializeValue for Subscription { fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut map = ::serde::Serializer::serialize_map(s, None)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Source", &self.source)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Subject", &self.subject)?; ::serde::ser::SerializeMap::serialize_entry(&mut map, "Target", &self.target)?; ::serde::ser::SerializeMap::end(map) } } impl ::codec::DeserializeValue for Subscription { fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Subscription, D::Error> { struct Visitor; impl<'de> ::serde::de::Visitor<'de> for Visitor { type Value = Subscription; fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "a struct of type Subscription") } fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> { let mut id: Option<::Value<String>> = None; let mut source: Option<::Value<String>> = None; let mut subject: Option<::Value<String>> = None; let mut target: Option<::Value<String>> = None; while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? { match __cfn_key.as_ref() { "Id" => { id = ::serde::de::MapAccess::next_value(&mut map)?; } "Source" => { source = ::serde::de::MapAccess::next_value(&mut map)?; } "Subject" => { subject = ::serde::de::MapAccess::next_value(&mut map)?; } "Target" => { target = ::serde::de::MapAccess::next_value(&mut map)?; } _ => {} } } Ok(Subscription { id: id.ok_or(::serde::de::Error::missing_field("Id"))?, source: source.ok_or(::serde::de::Error::missing_field("Source"))?, subject: subject.ok_or(::serde::de::Error::missing_field("Subject"))?, target: target.ok_or(::serde::de::Error::missing_field("Target"))?, }) } } d.deserialize_map(Visitor) } } }
angular_jqxwindow.d.ts
/// <reference path="../jqwidgets.d.ts" /> import { EventEmitter, ElementRef, OnChanges, SimpleChanges } from '@angular/core'; export declare class jqxWindowComponent implements OnChanges { attrAutoOpen: boolean; attrAnimationType: string; attrCollapsed: boolean; attrCollapseAnimationDuration: number; attrContent: string; attrCloseAnimationDuration: number; attrCloseButtonSize: number; attrCloseButtonAction: string; attrCancelButton: any; attrDragArea: jqwidgets.WindowDragArea; attrDraggable: boolean; attrDisabled: boolean; attrInitContent: () => void; attrIsModal: boolean; attrKeyboardCloseKey: number | string; attrKeyboardNavigation: boolean; attrMinHeight: string | number; attrMaxHeight: string | number; attrMinWidth: number | string; attrMaxWidth: number | string; attrModalOpacity: number | string; attrModalZIndex: number; attrModalBackgroundZIndex: number; attrOkButton: any; attrPosition: string | any; attrRtl: boolean; attrResizable: boolean; attrShowAnimationDuration: number; attrShowCloseButton: boolean; attrShowCollapseButton: boolean; attrTheme: string; attrTitle: string; attrZIndex: number; attrWidth: string | number; attrHeight: string | number; autoCreate: boolean; properties: string[]; host: any; elementRef: ElementRef; widgetObject: jqwidgets.jqxWindow; constructor(containerElement: ElementRef); ngOnInit(): void; ngOnChanges(changes: SimpleChanges): boolean; arraysEqual(attrValue: any, hostValue: any): boolean; manageAttributes(): any; moveClasses(parentEl: HTMLElement, childEl: HTMLElement): void; moveStyles(parentEl: HTMLElement, childEl: HTMLElement): void; createComponent(options?: any): void; createWidget(options?: any): void; __updateRect__(): void; setOptions(options: any): void; autoOpen(arg?: boolean): boolean; animationType(arg?: string): string; collapsed(arg?: boolean): boolean; collapseAnimationDuration(arg?: number): number; content(arg?: string): string; closeAnimationDuration(arg?: number): number; closeButtonSize(arg?: number): number; closeButtonAction(arg?: string): string; cancelButton(arg?: any): any; dragArea(arg?: jqwidgets.WindowDragArea): jqwidgets.WindowDragArea; draggable(arg?: boolean): boolean; disabled(arg?: boolean): boolean; height(arg?: string | number): string | number;
initContent(arg?: () => void): () => void; isModal(arg?: boolean): boolean; keyboardCloseKey(arg?: number | string): number | string; keyboardNavigation(arg?: boolean): boolean; minHeight(arg?: string | number): string | number; maxHeight(arg?: string | number): string | number; minWidth(arg?: number | string): number | string; maxWidth(arg?: number | string): number | string; modalOpacity(arg?: undefined): undefined; modalZIndex(arg?: number): number; modalBackgroundZIndex(arg?: number): number; okButton(arg?: any): any; position(arg?: string | any): string | any; rtl(arg?: boolean): boolean; resizable(arg?: boolean): boolean; showAnimationDuration(arg?: number): number; showCloseButton(arg?: boolean): boolean; showCollapseButton(arg?: boolean): boolean; theme(arg?: string): string; title(arg?: string): string; width(arg?: string | number): string | number; zIndex(arg?: number): number; bringToFront(): void; close(): void; collapse(): void; closeAll(): void; disable(): void; destroy(): void; enable(): void; expand(): void; focus(): void; isOpen(): boolean; move(top: number, left: number): void; open(): void; hide(): void; resize(top: number, left: number): void; setTitle(title: string): void; setContent(content: string): void; onClose: EventEmitter<any>; onCollapse: EventEmitter<any>; onExpand: EventEmitter<any>; onMoving: EventEmitter<any>; onMoved: EventEmitter<any>; onOpen: EventEmitter<any>; onResizing: EventEmitter<any>; onResized: EventEmitter<any>; __wireEvents__(): void; }
Is integer safe to use?.js
/* Description: Not all integers can be represented by JavaScript/TypeScript. It has space to to represent 53bit signed integers. In this Kata, we've to determine if it is safe to use the integer or not. Make use of the latest ES6 features to find this. SafeInteger(9007199254740990) //true SafeInteger(-90) //true SafeInteger(9007199254740992) //false */ function
(n) { return Number.isSafeInteger(n); }
SafeInteger
admin_student_routes.py
from urllib.request import Request from api.drivers import student from api.drivers.student import student_drivers from api.middlewares import authentication_middleware from api.schemas.admin.admin_request_schema import admin_request_schemas from api.schemas.student.request_schemas import student_request_schemas from api.schemas.student.response_schemas import student_response_schemas from api.utils.exceptions import exceptions from fastapi import APIRouter, Depends, HTTPException, Request, status from fastapi.responses import JSONResponse from api.repository import admin_repo from api.utils.save_student_data import save_data from starlette.responses import FileResponse import json def construct_router(): admin = APIRouter(tags=["Admin"]) @admin.post("/notify/student") async def notify_by_batch(): pass @admin.post("/add/student/subscription") async def add_student_subscription( request: admin_request_schemas.ManipulateStudentSubscriptionSchema, ): try: response = await student_drivers.Student().update_array_of_str( request.__dict__ ) return JSONResponse(status_code=200, content={"message": "info updated"}) except exceptions.DuplicateStudent: return JSONResponse( status_code=409, content={"message": "info cannot be updated"} ) except exceptions.UnexpectedError: return JSONResponse( status_code=500, content={"message": "internal server error"} ) @admin.post("/remove/student/subscription") async def remove_student_subscription( request: admin_request_schemas.ManipulateStudentSubscriptionSchema, ): try: response = await student_drivers.Student().delete_from_array_of_str( request.__dict__ ) if response: return JSONResponse( status_code=200, content={"message": "subscription deleted successfully"}, ) return JSONResponse( status_code=500, content={"message": "subscription deletion failed"} ) except exceptions.DuplicateStudent: return JSONResponse( status_code=409, content={"message": "info cannot be updated"} ) except exceptions.UnexpectedError: return JSONResponse( status_code=500, content={"message": "internal server error"} ) @admin.post("/verify/student") async def verify_student(request: Request): request = await request.json() response = await admin_repo.assign_otp(request["student_ids"]) if response: return JSONResponse( status_code=200, content={"message": "otp assigned successfully"} ) return JSONResponse( status_code=500, content={ "message": """otp cannot be assigned successfully for all student""" }, ) @admin.get("/ban/student/{student_id}") async def ban_student_account(student_id: str): response = await student_drivers.Student().ban_student(student_id) if response == "already_banned": return JSONResponse( status_code=404, content={"message": "student aleady banned"} ) elif response: return JSONResponse( status_code=200, content={"message": "student banned successfully"} ) return JSONResponse( status_code=500, content={"message": "internal server error"} ) @admin.delete("/delete/student/{student_id}") async def delete_student_account(student_id: str): response = await student_drivers.Student().delete_student(student_id) if response: return JSONResponse( status_code=200, content={"message": "student deleted successfully"} ) return JSONResponse( status_code=404, content={"message": "student does not exist"} ) @admin.get("/all_student") async def get_student_profile(): try: response = await ( student_drivers.Student().get_all_students() ) return JSONResponse( status_code=200, content=response ) except Exception as e: print(e, "exception") @admin.post("/student/data") async def get_student_data(): students = await ( student_drivers.Student().get_all_students_data() ) # print(students) save_data(students) if students: return JSONResponse( status_code=200, content= { "message" : "training details saved succesfully" } ) return JSONResponse( status_code=500, content= { "message" : "training details cannot be saved" } ) @admin.get("/student/data") async def get_student_data(request: Request): filename = "student_data.xls" #send excel file return FileResponse( filename, filename="student_data.xls", status_code=200, media_type="application/vnd.ms-excel" ) return admin
from fileinput import filename
capctrl6.rs
#[doc = "Register `CAPCTRL6` reader"] pub struct R(crate::R<CAPCTRL6_SPEC>); impl core::ops::Deref for R { type Target = crate::R<CAPCTRL6_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<CAPCTRL6_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<CAPCTRL6_SPEC>) -> Self { R(reader) } } #[doc = "Register `CAPCTRL6` writer"] pub struct W(crate::W<CAPCTRL6_SPEC>); impl core::ops::Deref for W { type Target = crate::W<CAPCTRL6_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<CAPCTRL6_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<CAPCTRL6_SPEC>) -> Self { W(writer) } } #[doc = "Field `CAPCONn_L` reader - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."] pub struct CAPCONN_L_R(crate::FieldReader<u16, u16>); impl CAPCONN_L_R { #[inline(always)] pub(crate) fn new(bits: u16) -> Self { CAPCONN_L_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CAPCONN_L_R { type Target = crate::FieldReader<u16, u16>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CAPCONn_L` writer - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."] pub struct CAPCONN_L_W<'a> { w: &'a mut W, } impl<'a> CAPCONN_L_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff) | (value as u32 & 0xffff); self.w } } #[doc = "Field `CAPCONn_H` reader - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."] pub struct CAPCONN_H_R(crate::FieldReader<u16, u16>); impl CAPCONN_H_R { #[inline(always)] pub(crate) fn new(bits: u16) -> Self { CAPCONN_H_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CAPCONN_H_R { type Target = crate::FieldReader<u16, u16>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CAPCONn_H` writer - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."] pub struct CAPCONN_H_W<'a> { w: &'a mut W, } impl<'a> CAPCONN_H_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0xffff << 16)) | ((value as u32 & 0xffff) << 16); self.w } } impl R { #[doc = "Bits 0:15 - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."] #[inline(always)] pub fn capconn_l(&self) -> CAPCONN_L_R { CAPCONN_L_R::new((self.bits & 0xffff) as u16) } #[doc = "Bits 16:31 - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."] #[inline(always)] pub fn capconn_h(&self) -> CAPCONN_H_R { CAPCONN_H_R::new(((self.bits >> 16) & 0xffff) as u16) } } impl W {
#[doc = "Bits 0:15 - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."] #[inline(always)] pub fn capconn_l(&mut self) -> CAPCONN_L_W { CAPCONN_L_W { w: self } } #[doc = "Bits 16:31 - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."] #[inline(always)] pub fn capconn_h(&mut self) -> CAPCONN_H_W { CAPCONN_H_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "SCT capture control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [capctrl6](index.html) module"] pub struct CAPCTRL6_SPEC; impl crate::RegisterSpec for CAPCTRL6_SPEC { type Ux = u32; } #[doc = "`read()` method returns [capctrl6::R](R) reader structure"] impl crate::Readable for CAPCTRL6_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [capctrl6::W](W) writer structure"] impl crate::Writable for CAPCTRL6_SPEC { type Writer = W; } #[doc = "`reset()` method sets CAPCTRL6 to value 0"] impl crate::Resettable for CAPCTRL6_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
genre.go
package validator import ( "net/http" "github.com/rl404/go-malscraper/errors" "github.com/rl404/go-malscraper/model" ) // GetGenres to get anime/manga genre list. func (v *Validator) GetGenres(t string) ([]model.ItemCount, int, error) { if t != AnimeType && t != MangaType { return nil, http.StatusBadRequest, errors.ErrInvalidType } return v.api.GetGenres(t) } // GetAnimeWithGenre to get anime list with specific genre. func (v *Validator) GetAnimeWithGenre(id int, page int) ([]model.AnimeItem, int, error) { if page <= 0 { return nil, http.StatusBadRequest, errors.ErrInvalidPage } if !v.isAnimeGenreValid(id) { return nil, http.StatusBadRequest, errors.ErrInvalidID } return v.api.GetAnimeWithGenre(id, page) } // GetMangaWithGenre to get manga list with specific genre. func (v *Validator) GetMangaWithGenre(id int, page int) ([]model.MangaItem, int, error) { if page <= 0
if !v.isMangaGenreValid(id) { return nil, http.StatusBadRequest, errors.ErrInvalidID } return v.api.GetMangaWithGenre(id, page) }
{ return nil, http.StatusBadRequest, errors.ErrInvalidPage }
index.js
import React, { useState } from 'react'; import { Container, Provider, useProvided } from '../src/index.js'; import { render, fireEvent } from '@testing-library/react'; import '@testing-library/react/cleanup-after-each'; const counterHook = (initialState = 0) => { const [count, setCount] = useState(initialState); const add = val => setCount(count + val); const sub = val => setCount(count - val); const reset = () => setCount(initialState); return { count, add, sub, reset }; }; const counterContainer = Container(counterHook); test('Container should work nicely when it wrap hook', () => { expect(typeof counterContainer._id).toEqual('symbol'); expect(typeof counterContainer.hook).toEqual('function'); }); const Sibling1 = () => { const { add, sub } = useProvided(counterContainer); return ( <> <button data-testid="btn-add" onClick={() => add(1)}> +1 </button> <button data-testid="btn-sub" onClick={() => sub(1)}> -1 </button> <button data-testid="btn-reset" onClick={() => reset()}> reset </button> </> ); }; const Sibling2 = () => { const { count } = useProvided(counterContainer); return <p data-testid="panel-count">{count}</p>; }; const SimpleApp = () => { return ( <Provider inject={[counterContainer]}> <Sibling1 /> <Sibling2 /> </Provider> ); }; test('Two components shared hook', () => { const { getByTestId } = render(<SimpleApp />); expect(getByTestId('panel-count').innerHTML).toEqual('0'); fireEvent.click(getByTestId('btn-add')); expect(getByTestId('panel-count').innerHTML).toEqual('1'); fireEvent.click(getByTestId('btn-add')); fireEvent.click(getByTestId('btn-add')); expect(getByTestId('panel-count').innerHTML).toEqual('3'); fireEvent.click(getByTestId('btn-sub')); expect(getByTestId('panel-count').innerHTML).toEqual('2'); }); test('Cleanup are still working', () => { const { getByTestId } = render(<SimpleApp />); expect(getByTestId('panel-count').innerHTML).toEqual('0'); }); const FakeApp = () => { return ( <Provider> <div>Render without broken but get warn</div> <p data-testid="panel-render">not broken</p> </Provider> ); }; test('Provider inject miss could render', () => { console.warn = () => {};
test('Provider inject miss will show warning message', () => { console.warn = jest.fn(); render(<FakeApp />); expect(console.warn).toHaveBeenCalled(); });
const { getByTestId } = render(<FakeApp />); expect(getByTestId('panel-render').innerHTML).toEqual('not broken'); });
0007_auto_20170613_0605.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class
(migrations.Migration): dependencies = [ ('library', '0006_auto_20170516_0903'), ] operations = [ migrations.RenameField( model_name='borrowitem', old_name='borrow_find_id', new_name='book_id', ), migrations.RemoveField( model_name='borrowitem', name='library_name', ), migrations.AddField( model_name='borrowitem', name='find_id', field=models.TextField(default=None), ), ]
Migration
cmdb_switches_logic.py
""" Component logic """ from bluecat.util import get_password_from_file from ..cmdb_configuration import cmdb_config import requests def raw_table_data(*args, **kwargs): # pylint: disable=redefined-outer-name data = {'columns': [{'title': 'Name'}, {'title': 'IP Address'}, {'title': 'Serial Number'}, {'title': 'Manufacturer'}, ], 'data': []} # HTTP request headers = {"Accept": "application/json"} cmdb_url = cmdb_config.servicenow_url + '/api/now/table/cmdb_ci_ip_switch' response = requests.get(cmdb_url, auth=(cmdb_config.servicenow_username, get_password_from_file(cmdb_config.servicenow_secret_file)), headers=headers, verify=False) # Check for HTTP codes other than 200 if response.status_code == 200: switches = response.json() for switch in switches['result']: switch_name = switch['name'] switch_ip = switch['ip_address'] switch_serial = switch['serial_number'] if switch['manufacturer']: switch_manufacturer = get_switch_manufacturer(switch['manufacturer']['link']) data['data'].append([switch_name, switch_ip, switch_serial, switch_manufacturer]) return data def
(link): headers = {"Accept": "application/json"} response = requests.get(link, auth=(cmdb_config.servicenow_username, get_password_from_file(cmdb_config.servicenow_secret_file)), headers=headers, verify=False) manufacturer = response.json() return manufacturer['result']['name']
get_switch_manufacturer
permutation_structure.rs
use crate::pairing::ff::{Field}; use crate::pairing::{Engine, CurveProjective}; use std::marker::PhantomData; use crate::sonic::helped::{Proof, SxyAdvice}; use crate::sonic::helped::batch::Batch; use crate::sonic::helped::poly::{SxEval, SyEval}; use crate::sonic::helped::Parameters; use crate::SynthesisError; use crate::sonic::transcript::{Transcript, TranscriptProtocol}; use crate::sonic::util::*; use crate::sonic::cs::{Backend, SynthesisDriver, ConstraintSystem}; use crate::sonic::cs::{Circuit, Variable, Coeff}; use crate::sonic::srs::SRS; use crate::sonic::sonic::Preprocess; use crate::sonic::sonic::M; use crate::sonic::sonic::PermutationSynthesizer; use super::s2_proof::*; use super::permutation_argument::*; #[derive(Clone)] pub struct PermutationStructure<E: Engine> { pub n: usize, pub q: usize, pub a: Vec<[Option<(Coeff<E>, usize)>; M]>, pub b: Vec<[Option<(Coeff<E>, usize)>; M]>, pub c: Vec<[Option<(Coeff<E>, usize)>; M]>, } pub fn create_permutation_structure<E: Engine, C: Circuit<E>>( circuit: &C, ) -> PermutationStructure<E> { let mut backend: Preprocess<E> = Preprocess::new(); let (a, b, c) = { let mut cs: PermutationSynthesizer<E, &'_ mut Preprocess<E>> = PermutationSynthesizer::new(&mut backend); let one = cs.alloc_input(|| Ok(E::Fr::one())).expect("should have no issues"); match (one, <PermutationSynthesizer<E, &'_ mut Preprocess<E>> as ConstraintSystem<E>>::ONE) { (Variable::A(1), Variable::A(1)) => {}, _ => panic!("one variable is incorrect") } circuit.synthesize(&mut cs).expect("should synthesize"); (cs.a, cs.b, cs.c) }; let n = backend.n; let q = backend.q; // println!("Will have {} gates and {} linear constraints", n, q); PermutationStructure::<E> { n: n, q: q, a: a, b: b, c: c } } use rand::{Rng, Rand}; impl<E: Engine> PermutationStructure<E> { pub fn calculate_s2_commitment_value(&self, srs: &SRS<E>) -> E::G1Affine { S2Eval::calculate_commitment_element(self.n, srs) } pub fn calculate_s2_proof(&self, x: E::Fr, y: E::Fr, srs: &SRS<E>) -> S2Proof<E> { let s2_eval = S2Eval::new(self.n); s2_eval.evaluate(x, y, &srs) } pub fn create_inverse_permutation_vectors(&self) -> (Vec<Vec<E::Fr>>, Vec<Vec<usize>>) { // we have to form non-permuted coefficients, as well as permutation structures; let n = self.n; let mut non_permuted_coeffs = vec![vec![E::Fr::zero(); 3*n+1]; M]; let mut permutations = vec![vec![0usize; 3*n+1]; M]; let one = E::Fr::one(); let mut minus_one = E::Fr::one(); minus_one.negate(); let mut not_empty = [false; M]; // go other the permutations for (gate_index, info) in self.a.iter().enumerate() { let offset = n-1; for i in 0..M { // coefficients of A are placed at the offset = 0 from the beginning of the vector if let Some((coeff, place)) = info[i].as_ref() { // place it assert!(*place != 0); let array_position = offset - gate_index; // special for A let place_coeff_into = &mut non_permuted_coeffs[i]; let place_permutation_into = &mut permutations[i]; match coeff { Coeff::Zero => { }, Coeff::One => { not_empty[i] = true; place_coeff_into[array_position] = one; place_permutation_into[array_position] = *place; }, Coeff::NegativeOne => { not_empty[i] = true; place_coeff_into[array_position] = minus_one; place_permutation_into[array_position] = *place; }, Coeff::Full(value) =>
} } } } for (gate_index, info) in self.b.iter().enumerate() { let offset = n + 1; for i in 0..M { if let Some((coeff, place)) = info[i].as_ref() { // place it assert!(*place != 0); let array_position = offset + gate_index; let place_coeff_into = &mut non_permuted_coeffs[i]; let place_permutation_into = &mut permutations[i]; match coeff { Coeff::Zero => { }, Coeff::One => { not_empty[i] = true; place_coeff_into[array_position] = one; place_permutation_into[array_position] = *place; }, Coeff::NegativeOne => { not_empty[i] = true; place_coeff_into[array_position] = minus_one; place_permutation_into[array_position] = *place; }, Coeff::Full(value) => { not_empty[i] = true; place_coeff_into[array_position] = *value; place_permutation_into[array_position] = *place; } } } } } for (gate_index, info) in self.c.iter().enumerate() { let offset = 2*n + 1; for i in 0..M { // coefficients of A are placed at the offset = 0 from the beginning of the vector if let Some((coeff, place)) = info[i].as_ref() { // place it assert!(*place != 0); let array_position = offset + gate_index; let place_coeff_into = &mut non_permuted_coeffs[i]; let place_permutation_into = &mut permutations[i]; match coeff { Coeff::Zero => { }, Coeff::One => { not_empty[i] = true; place_coeff_into[array_position] = one; place_permutation_into[array_position] = *place; }, Coeff::NegativeOne => { not_empty[i] = true; place_coeff_into[array_position] = minus_one; place_permutation_into[array_position] = *place; }, Coeff::Full(value) => { not_empty[i] = true; place_coeff_into[array_position] = *value; place_permutation_into[array_position] = *place; } } } } } Self::print_constraints(n, self.q, &non_permuted_coeffs, &permutations); // need to fill arrays with non-zero indexes just to have full permutation, even while it's just zero coefficient // TODO: fix let mut m = M; for i in (0..M).into_iter().rev() { // these are no constant terms assert!(non_permuted_coeffs[i][n].is_zero()); assert!(permutations[i][n] == 0); } for i in (0..M).into_iter().rev() { if !not_empty[i] { non_permuted_coeffs.pop(); permutations.pop(); m -= 1; } } assert!(m != 0); // find something faster, although it's still linear for i in 0..m { let mut fillers: Vec<usize> = (1..=(3*n+1)).map(|el| el).collect(); for (p, c) in permutations[i].iter_mut().zip(non_permuted_coeffs[i].iter()) { if *p == 0 { assert!(c.is_zero()); } else { fillers[*p - 1] = 0; } } let mut fill_from = 0; for p in permutations[i].iter_mut() { if *p == 0 { loop { if fillers[fill_from] != 0 { *p = fillers[fill_from]; fill_from += 1; break; } else { fill_from += 1; } } } } } (non_permuted_coeffs, permutations) } pub fn create_permutation_vectors(&self) -> (Vec<Vec<E::Fr>>, Vec<Vec<usize>>) { // we have to form non-permuted coefficients, as well as permutation structures; let n = self.n; let mut non_permuted_coeffs = vec![vec![E::Fr::zero(); 3*n+1]; M]; let mut permutations = vec![vec![0usize; 3*n+1]; M]; let one = E::Fr::one(); let mut minus_one = E::Fr::one(); minus_one.negate(); let mut not_empty = [false; M]; // go other the permutations for (gate_index, info) in self.a.iter().enumerate() { let offset = n-1; for i in 0..M { // coefficients of A are placed at the offset = 0 from the beginning of the vector if let Some((coeff, place)) = info[i].as_ref() { // place it assert!(*place != 0); let array_position = offset - gate_index; // special for A let coeff_position = *place - 1; let place_coeff_into = &mut non_permuted_coeffs[i]; let place_permutation_into = &mut permutations[i]; match coeff { Coeff::Zero => { }, Coeff::One => { not_empty[i] = true; place_coeff_into[coeff_position] = one; place_permutation_into[array_position] = *place; }, Coeff::NegativeOne => { not_empty[i] = true; place_coeff_into[coeff_position] = minus_one; place_permutation_into[array_position] = *place; }, Coeff::Full(value) => { not_empty[i] = true; place_coeff_into[coeff_position] = *value; place_permutation_into[array_position] = *place; } } } } } for (gate_index, info) in self.b.iter().enumerate() { let offset = n + 1; for i in 0..M { if let Some((coeff, place)) = info[i].as_ref() { // place it assert!(*place != 0); let array_position = offset + gate_index; let coeff_position = *place - 1; let place_coeff_into = &mut non_permuted_coeffs[i]; let place_permutation_into = &mut permutations[i]; match coeff { Coeff::Zero => { }, Coeff::One => { not_empty[i] = true; place_coeff_into[coeff_position] = one; place_permutation_into[array_position] = *place; }, Coeff::NegativeOne => { not_empty[i] = true; place_coeff_into[coeff_position] = minus_one; place_permutation_into[array_position] = *place; }, Coeff::Full(value) => { not_empty[i] = true; place_coeff_into[coeff_position] = *value; place_permutation_into[array_position] = *place; } } } } } for (gate_index, info) in self.c.iter().enumerate() { let offset = 2*n + 1; for i in 0..M { // coefficients of A are placed at the offset = 0 from the beginning of the vector if let Some((coeff, place)) = info[i].as_ref() { // place it assert!(*place != 0); let array_position = offset + gate_index; let coeff_position = *place - 1; let place_coeff_into = &mut non_permuted_coeffs[i]; let place_permutation_into = &mut permutations[i]; match coeff { Coeff::Zero => { }, Coeff::One => { not_empty[i] = true; place_coeff_into[coeff_position] = one; place_permutation_into[array_position] = *place; }, Coeff::NegativeOne => { not_empty[i] = true; place_coeff_into[coeff_position] = minus_one; place_permutation_into[array_position] = *place; }, Coeff::Full(value) => { not_empty[i] = true; place_coeff_into[coeff_position] = *value; place_permutation_into[array_position] = *place; } } } } } // Self::print_constraints(n, self.q, &non_permuted_coeffs, &permutations); // need to fill arrays with non-zero indexes just to have full permutation, even while it's just zero coefficient // TODO: fix let mut m = M; // for i in (0..M).into_iter().rev() { // // these are no constant terms // assert!(non_permuted_coeffs[i][n].is_zero()); // assert!(permutations[i][n] == 0); // } for i in (0..M).into_iter().rev() { if !not_empty[i] { non_permuted_coeffs.pop(); permutations.pop(); m -= 1; } } assert!(m != 0); // find something faster, although it's still linear for i in 0..m { let mut fillers: Vec<usize> = (1..=(3*n+1)).map(|el| el).collect(); for (p, _c) in permutations[i].iter_mut().zip(non_permuted_coeffs[i].iter()) { if *p == 0 { continue; // assert!(c.is_zero()); } else { fillers[*p - 1] = 0; } } let mut fill_from = 0; for p in permutations[i].iter_mut() { if *p == 0 { loop { if fillers[fill_from] != 0 { *p = fillers[fill_from]; fill_from += 1; break; } else { fill_from += 1; } } } } } (non_permuted_coeffs, permutations) } pub fn print_constraints(n:usize, q: usize, coeffs: &Vec<Vec<E::Fr>>, permutations: &Vec<Vec<usize>>) { let m = coeffs.len(); for constraint_idx in 1..=q { println!("Constraint {} (term for Y^{})", constraint_idx, constraint_idx); let mut terms = vec![]; for p_idx in 0..m { if let Some(variable_idx) = permutations[p_idx].iter().position(|&s| s == constraint_idx) { let coeff = coeffs[p_idx][variable_idx]; terms.push((variable_idx, coeff)); } } for (var_idx, coeff) in terms.into_iter() { if var_idx < n + 1 { print!("{} * A({})", coeff, n - var_idx); } else if var_idx < 2*n + 1 { print!("{} * B({})", coeff, var_idx - n); } else { print!("{} * C({})", coeff, var_idx - 2*n); } print!("\n"); } } } pub fn create_permutation_special_reference(&self, srs: &SRS<E>) -> SpecializedSRS<E> { let (non_permuted_coeffs, permutations) = self.create_permutation_vectors(); let specialized_srs = PermutationArgument::make_specialized_srs( &non_permuted_coeffs, &permutations, &srs ); specialized_srs } pub fn make_signature(&self, y: E::Fr, z: E::Fr, srs: &SRS<E>) -> SignatureOfCorrectComputation<E> { let (non_permuted_coeffs, permutations) = self.create_permutation_vectors(); let mut s_contrib = E::Fr::zero(); for permutation_index in 0..permutations.len() { for (variable_index, sigma_i) in permutations[permutation_index].iter().enumerate() { let y_power = y.pow([*sigma_i as u64]); let x_power = z.pow([(variable_index+1) as u64]); let coeff = non_permuted_coeffs[permutation_index][*sigma_i - 1]; let mut result = coeff; result.mul_assign(&x_power); result.mul_assign(&y_power); s_contrib.add_assign(&result); } } let z_n_plus_1_inv = z.pow([(self.n + 1) as u64]).inverse().unwrap(); let y_n = y.pow([self.n as u64]); println!("Naive S contribution = {}", s_contrib); s_contrib.mul_assign(&z_n_plus_1_inv); s_contrib.mul_assign(&y_n); println!("Naive S contribution scaled = {}", s_contrib); // let specialized_srs = PermutationArgument::make_specialized_srs( // &non_permuted_coeffs, // &permutations, // &srs // ); let signature = PermutationArgument::make_signature( non_permuted_coeffs, permutations, y, z, &srs, ); signature } pub fn create_permutation_arguments<R: Rng>(&self, y: E::Fr, z: E::Fr, rng: &mut R, srs: &SRS<E>) -> (Vec<(E::G1Affine, E::G1Affine)>, Vec<E::Fr>, PermutationProof<E>, PermutationArgumentProof<E>, E::Fr, usize, E::Fr) { // we have to form non-permuted coefficients, as well as permutation structures; let n = self.n; let (non_permuted_coeffs, permutations) = self.create_permutation_vectors(); let m = non_permuted_coeffs.len(); println!("Will need {} permutation polynomials", m); let specialized_srs = PermutationArgument::make_specialized_srs( &non_permuted_coeffs, &permutations, &srs ); // evaluate S naively let mut s_contrib = E::Fr::zero(); for permutation_index in 0..m { for (variable_index, sigma_i) in permutations[permutation_index].iter().enumerate() { let y_power = y.pow([*sigma_i as u64]); let x_power = z.pow([(variable_index+1) as u64]); let coeff = non_permuted_coeffs[permutation_index][*sigma_i - 1]; let mut result = coeff; result.mul_assign(&x_power); result.mul_assign(&y_power); s_contrib.add_assign(&result); } } println!("Naive S contribution = {}", s_contrib); let mut argument = PermutationArgument::new(non_permuted_coeffs, permutations); let challenges = (0..m).map(|_| E::Fr::rand(rng)).collect::<Vec<_>>(); let commitments = argument.commit(y, &srs); let mut s_commitments = vec![]; let mut s_prime_commitments = vec![]; for (s, s_prime) in commitments.clone().into_iter() { s_commitments.push(s); // println!("S' = {}", s_prime); s_prime_commitments.push(s_prime); } let z_prime : E::Fr = rng.gen(); let opening = argument.open_commitments_to_s_prime(&challenges, y, z_prime, &srs); let randomness = (0..2).map(|_| E::Fr::rand(rng)).collect::<Vec<_>>(); let valid = PermutationArgument::verify_s_prime_commitment(n, &randomness, &challenges, &s_prime_commitments, &opening, y, z_prime, &specialized_srs, &srs); assert!(valid, "s' commitment must be valid"); let beta : E::Fr = rng.gen(); let gamma : E::Fr = rng.gen(); let grand_product_challenges = (0..m).map(|_| E::Fr::rand(rng)).collect::<Vec<_>>(); let wellformed_challenges = (0..(2*m)).map(|_| E::Fr::rand(rng)).collect::<Vec<_>>(); let proof = argument.make_argument( beta, gamma, & grand_product_challenges, & wellformed_challenges, y, z, &specialized_srs, &srs); let valid = PermutationArgument::verify(&s_commitments, &proof, z, &srs); assert!(valid, "permutation argument must be valid"); (commitments, challenges, opening, proof, z_prime, m, s_contrib) } } #[test] fn test_simple_succinct_sonic() { use crate::pairing::ff::{Field, PrimeField}; use crate::pairing::{Engine, CurveAffine, CurveProjective}; use crate::pairing::bls12_381::{Bls12, Fr}; use std::time::{Instant}; use crate::sonic::srs::SRS; use crate::sonic::cs::{Circuit, ConstraintSystem, LinearCombination}; struct MyCircuit; impl<E: Engine> Circuit<E> for MyCircuit { fn synthesize<CS: ConstraintSystem<E>>(&self, cs: &mut CS) -> Result<(), SynthesisError> { let (a, b, c) = cs.multiply(|| { Ok(( E::Fr::from_str("10").unwrap(), E::Fr::from_str("20").unwrap(), E::Fr::from_str("200").unwrap(), )) })?; cs.enforce_zero(LinearCombination::zero() + (Coeff::Full(E::Fr::from_str("2").unwrap()), a) - b); cs.enforce_zero(LinearCombination::zero() + (Coeff::Full(E::Fr::from_str("20").unwrap()), a) - c); cs.enforce_zero(LinearCombination::zero() + (Coeff::Full(E::Fr::from_str("10").unwrap()), b) - c); // let multiplier = cs.alloc_input(|| Ok(E::Fr::from_str("20").unwrap()))?; // cs.enforce_zero(LinearCombination::from(b) - multiplier); // let (a1, b1, _) = cs.multiply(|| { // Ok(( // E::Fr::from_str("5").unwrap(), // E::Fr::from_str("5").unwrap(), // E::Fr::from_str("25").unwrap(), // )) // })?; // cs.enforce_zero(LinearCombination::zero() + (Coeff::Full(E::Fr::from_str("2").unwrap()), b1) - a); // cs.enforce_zero(LinearCombination::zero() + (Coeff::Full(E::Fr::from_str("4").unwrap()), a1) - b); // cs.enforce_zero(LinearCombination::zero() + (Coeff::Full(E::Fr::from_str("40").unwrap()), b1) - c); Ok(()) } } let srs_x = Fr::from_str("23923").unwrap(); let srs_alpha = Fr::from_str("23728792").unwrap(); println!("making srs"); let start = Instant::now(); // let srs = SRS::<Bls12>::dummy(830564, srs_x, srs_alpha); let srs = SRS::<Bls12>::new(100, srs_x, srs_alpha); println!("done in {:?}", start.elapsed()); { use rand::{XorShiftRng, SeedableRng, Rand, Rng}; let _rng = &mut XorShiftRng::from_seed([0x3dbe6259, 0x8d313d76, 0x3237db17, 0xe5bc0654]); use crate::sonic::sonic::Basic; use crate::sonic::sonic::AdaptorCircuit; use crate::sonic::helped::prover::{create_advice_on_srs, create_proof_on_srs}; use crate::sonic::helped::{MultiVerifier, get_circuit_parameters_for_succinct_sonic}; use crate::sonic::helped::helper::{create_aggregate_on_srs}; use crate::sonic::sonic::Permutation3; use crate::sonic::unhelped::permutation_structure::*; // let z: Fr = rng.gen(); // let y: Fr = rng.gen(); let z: Fr = Fr::from_str("2").unwrap(); let y: Fr = Fr::one(); let perm_structure = create_permutation_structure::<Bls12, _>(&MyCircuit); let (non_permuted_coeffs, permutations) = perm_structure.create_permutation_vectors(); println!("Non-permuted = {:?}", non_permuted_coeffs[0]); println!("Permutation = {:?}", permutations[0]); println!("N = {}, Q = {}", perm_structure.n, perm_structure.q); let n = perm_structure.n; let szy = { let mut tmp = SxEval::<Bls12>::new(y, n); Permutation3::synthesize(&mut tmp, &MyCircuit).unwrap(); // TODO tmp.finalize(z) }; let naive_s1 = { let mut res = Fr::zero(); for j in 0..permutations.len() { for i in 0..non_permuted_coeffs[j].len() { let sigma_i = permutations[j][i]; let coeff_i = non_permuted_coeffs[j][i]; // let coeff_sigma_i = non_permuted_coeffs[j][sigma_i - 1]; let y_power = y.pow([sigma_i as u64]); let x_power = z.pow([(i+1) as u64]); // let mut result = coeff_sigma_i; let mut result = coeff_i; result.mul_assign(&y_power); result.mul_assign(&x_power); res.add_assign(&result); } } res }; println!("Naive s1 = {}", naive_s1); // perm_structure.create_permutation_arguments(y, z, rng, &srs); let signature = perm_structure.make_signature(y, z, &srs); let s2 = S2Eval::new(perm_structure.n); let s2 = s2.evaluate(z, y, &srs); let mut s2_value = s2.c_value; s2_value.add_assign(&s2.d_value); let mut expected_s2_value = Fr::zero(); let y_inv = y.inverse().unwrap(); let mut p1 = y; p1.add_assign(&y_inv); p1.mul_assign(&z); expected_s2_value.add_assign(&p1); let mut t0 = y; t0.square(); let mut t1 = y_inv; t1.square(); let mut p2 = t0; p2.add_assign(&t1); p2.mul_assign(&z); p2.mul_assign(&z); expected_s2_value.add_assign(&p2); let z_n = z.pow([n as u64]); let z_n_plus_1_inv = z.pow([(n + 1) as u64]).inverse().unwrap(); let y_n = y.pow([n as u64]); assert!(expected_s2_value == s2_value); s2_value.mul_assign(&z_n); let mut s1 = signature.perm_argument_proof.s_zy; println!("S1 = {}", s1); s1.mul_assign(&z_n_plus_1_inv); s1.mul_assign(&y_n); s1.sub_assign(&s2_value); let mut naive_s1 = naive_s1; naive_s1.mul_assign(&z_n_plus_1_inv); naive_s1.mul_assign(&y_n); naive_s1.sub_assign(&s2_value); println!("S1(?) = {}", naive_s1); assert_eq!(s1, szy); } }
{ not_empty[i] = true; place_coeff_into[array_position] = *value; place_permutation_into[array_position] = *place; }
SystemUtil.py
import getpass import os import platform class SystemUtil: __SYSTEM_NAMES_WINDOWS: set = ['Windows'] @staticmethod def isWindows() -> bool: osName: str = platform.system() return osName in SystemUtil.__SYSTEM_NAMES_WINDOWS @staticmethod def
() -> str: username = getpass.getuser() return username @staticmethod def getCurrentUserGroups() -> list: if not SystemUtil.isWindows(): return os.getgroups()
getCurrentUserName
eval.py
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys __dir__ = os.path.dirname(os.path.abspath(__file__)) sys.path.append(__dir__) sys.path.append(os.path.abspath(os.path.join(__dir__, '../'))) import argparse import paddle import paddle.fluid as fluid import program from ppcls.data import Reader from ppcls.utils.config import get_config from ppcls.utils.save_load import init_model from paddle.fluid.incubate.fleet.collective import fleet from paddle.fluid.incubate.fleet.base import role_maker def parse_args(): parser = argparse.ArgumentParser("PaddleClas eval script") parser.add_argument( '-c', '--config', type=str, default='./configs/eval.yaml', help='config file path') parser.add_argument( '-o', '--override', action='append', default=[], help='config options to be overridden') args = parser.parse_args() return args def main(args):
if __name__ == '__main__': paddle.enable_static() args = parse_args() main(args)
role = role_maker.PaddleCloudRoleMaker(is_collective=True) fleet.init(role) config = get_config(args.config, overrides=args.override, show=True) gpu_id = int(os.environ.get('FLAGS_selected_gpus', 0)) place = fluid.CUDAPlace(gpu_id) startup_prog = fluid.Program() valid_prog = fluid.Program() valid_dataloader, valid_fetchs = program.build( config, valid_prog, startup_prog, is_train=False) valid_prog = valid_prog.clone(for_test=True) exe = fluid.Executor(place) exe.run(startup_prog) init_model(config, valid_prog, exe) valid_reader = Reader(config, 'valid')() valid_dataloader.set_sample_list_generator(valid_reader, place) compiled_valid_prog = program.compile(config, valid_prog) program.run(valid_dataloader, exe, compiled_valid_prog, valid_fetchs, -1, 'eval', config)
compat.py
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utilities for API compatibility between TensorFlow release versions. See [Version Compatibility](https://tensorflow.org/guide/version_compat#backward_forward) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import datetime import os from tensorflow.python.util import tf_contextlib from tensorflow.python.util.tf_export import tf_export # This value changes every day with an automatic CL. It can be modified in code # via `forward_compatibility_horizon()` or with the environment variable # TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date. _FORWARD_COMPATIBILITY_HORIZON = datetime.date(2020, 2, 5) _FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS" _FORWARD_COMPATIBILITY_DATE_NUMBER = None def _date_to_date_number(year, month, day): return (year << 9) | (month << 5) | day def _update_forward_compatibility_date_number(date_to_override=None): """Update the base date to compare in forward_compatible function.""" global _FORWARD_COMPATIBILITY_DATE_NUMBER if date_to_override: date = date_to_override else: date = _FORWARD_COMPATIBILITY_HORIZON delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME) if delta_days: date += datetime.timedelta(days=int(delta_days)) _FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number( date.year, date.month, date.day) _update_forward_compatibility_date_number() @tf_export("compat.forward_compatible") def forward_compatible(year, month, day):
@tf_export("compat.forward_compatibility_horizon") @tf_contextlib.contextmanager def forward_compatibility_horizon(year, month, day): """Context manager for testing forward compatibility of generated graphs. See [Version compatibility](https://tensorflow.org/guide/version_compat#backward_forward). To ensure forward compatibility of generated graphs (see `forward_compatible`) with older binaries, new features can be gated with: ```python if compat.forward_compatible(year=2018, month=08, date=01): generate_graph_with_new_features() else: generate_graph_so_older_binaries_can_consume_it() ``` However, when adding new features, one may want to unittest it before the forward compatibility window expires. This context manager enables such tests. For example: ```python from tensorflow.python.compat import compat def testMyNewFeature(self): with compat.forward_compatibility_horizon(2018, 08, 02): # Test that generate_graph_with_new_features() has an effect ``` Args: year: A year (e.g., 2018). Must be an `int`. month: A month (1 <= month <= 12) in year. Must be an `int`. day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an `int`. Yields: Nothing. """ try: _update_forward_compatibility_date_number(datetime.date(year, month, day)) yield finally: _update_forward_compatibility_date_number()
"""Return true if the forward compatibility window has expired. See [Version compatibility](https://tensorflow.org/guide/version_compat#backward_forward). Forward-compatibility refers to scenarios where the producer of a TensorFlow model (a GraphDef or SavedModel) is compiled against a version of the TensorFlow library newer than what the consumer was compiled against. The "producer" is typically a Python program that constructs and trains a model while the "consumer" is typically another program that loads and serves the model. TensorFlow has been supporting a 3 week forward-compatibility window for programs compiled from source at HEAD. For example, consider the case where a new operation `MyNewAwesomeAdd` is created with the intent of replacing the implementation of an existing Python wrapper - `tf.add`. The Python wrapper implementation should change from something like: ```python def add(inputs, name=None): return gen_math_ops.add(inputs, name) ``` to: ```python from tensorflow.python.compat import compat def add(inputs, name=None): if compat.forward_compatible(year, month, day): # Can use the awesome new implementation. return gen_math_ops.my_new_awesome_add(inputs, name) # To maintain forward compatibiltiy, use the old implementation. return gen_math_ops.add(inputs, name) ``` Where `year`, `month`, and `day` specify the date beyond which binaries that consume a model are expected to have been updated to include the new operations. This date is typically at least 3 weeks beyond the date the code that adds the new operation is committed. Args: year: A year (e.g., 2018). Must be an `int`. month: A month (1 <= month <= 12) in year. Must be an `int`. day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an `int`. Returns: True if the caller can expect that serialized TensorFlow graphs produced can be consumed by programs that are compiled with the TensorFlow library source code after (year, month, day). """ return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number( year, month, day)
freepdk45_demo.py
import siliconcompiler ############################################################################ # DOCS ############################################################################
''' Demonstration target for compiling ASICs with FreePDK45 and the open-source asicflow. ''' chip = siliconcompiler.Chip('<design>') setup(chip) return chip #################################################### # PDK Setup #################################################### def setup(chip): ''' Target setup ''' #0. Defining the project chip.set('option', 'target', 'freepdk45_demo') #1. Setting to ASIC mode chip.set('option', 'mode','asic') #2. Load PDK, flow, libs combo chip.load_pdk('freepdk45') chip.load_flow('lintflow') chip.load_flow('asicflow') chip.load_flow('asictopflow') chip.load_lib('nangate45') #3. Set flow and pdk chip.set('option', 'flow', 'asicflow', clobber=False) chip.set('option', 'pdk', 'freepdk45') #4. Select libraries chip.set('asic', 'logiclib', 'nangate45') #5. Set project specific design choices chip.set('asic', 'stackup', '10M') chip.set('asic', 'delaymodel', 'nldm') chip.set('asic', 'minlayer', "m1") chip.set('asic', 'maxlayer', "m10") chip.set('asic', 'maxfanout', 64) chip.set('asic', 'maxlength', 1000) chip.set('asic', 'maxslew', 0.2e-9) chip.set('asic', 'maxcap', 0.2e-12) chip.set('asic', 'rclayer', 'clk', "m5") chip.set('asic', 'rclayer', 'data',"m3") chip.set('asic', 'hpinlayer', "m3") chip.set('asic', 'vpinlayer', "m2") chip.set('asic', 'density', 10) chip.set('asic', 'aspectratio', 1) chip.set('asic', 'coremargin', 1.9) #6. Timing corners corner = 'typical' chip.set('constraint','worst','libcorner', corner) chip.set('constraint','worst','pexcorner', corner) chip.set('constraint','worst','mode', 'func') chip.set('constraint','worst','check', ['setup','hold']) ######################### if __name__ == "__main__": chip = make_docs()
def make_docs():
os.rs
// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Interfaces to the operating system provided random number //! generators. pub use self::imp::OsRng; #[cfg(all(unix, not(target_os = "ios")))] mod imp { extern crate libc; use self::OsRngInner::*; use old_io::{IoResult, File}; use old_path::Path; use rand::Rng; use rand::reader::ReaderRng; use result::Result::Ok; use slice::SliceExt; use mem; use os::errno; #[cfg(all(target_os = "linux", any(target_arch = "x86_64", target_arch = "x86", target_arch = "arm", target_arch = "aarch64", target_arch = "powerpc")))] fn getrandom(buf: &mut [u8]) -> libc::c_long { extern "C" { fn syscall(number: libc::c_long, ...) -> libc::c_long; } #[cfg(target_arch = "x86_64")] const NR_GETRANDOM: libc::c_long = 318; #[cfg(target_arch = "x86")] const NR_GETRANDOM: libc::c_long = 355; #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] const NR_GETRANDOM: libc::c_long = 384; #[cfg(target_arch = "powerpc")] const NR_GETRANDOM: libc::c_long = 384; unsafe { syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), 0) } } #[cfg(not(all(target_os = "linux", any(target_arch = "x86_64", target_arch = "x86", target_arch = "arm", target_arch = "aarch64", target_arch = "powerpc"))))] fn getrandom(_buf: &mut [u8]) -> libc::c_long { -1 } fn getrandom_fill_bytes(v: &mut [u8]) { let mut read = 0; let len = v.len(); while read < len { let result = getrandom(&mut v[read..]); if result == -1 { let err = errno() as libc::c_int; if err == libc::EINTR { continue; } else { panic!("unexpected getrandom error: {}", err); } } else { read += result as usize; } } } fn getrandom_next_u32() -> u32 { let mut buf: [u8; 4] = [0u8; 4]; getrandom_fill_bytes(&mut buf); unsafe { mem::transmute::<[u8; 4], u32>(buf) } } fn getrandom_next_u64() -> u64 { let mut buf: [u8; 8] = [0u8; 8]; getrandom_fill_bytes(&mut buf); unsafe { mem::transmute::<[u8; 8], u64>(buf) } } #[cfg(all(target_os = "linux", any(target_arch = "x86_64", target_arch = "x86", target_arch = "arm", target_arch = "aarch64", target_arch = "powerpc")))] fn is_getrandom_available() -> bool { use sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering}; static GETRANDOM_CHECKED: AtomicBool = ATOMIC_BOOL_INIT; static GETRANDOM_AVAILABLE: AtomicBool = ATOMIC_BOOL_INIT; if !GETRANDOM_CHECKED.load(Ordering::Relaxed) { let mut buf: [u8; 0] = []; let result = getrandom(&mut buf); let available = if result == -1 { let err = errno() as libc::c_int; err != libc::ENOSYS } else { true }; GETRANDOM_AVAILABLE.store(available, Ordering::Relaxed); GETRANDOM_CHECKED.store(true, Ordering::Relaxed); available } else { GETRANDOM_AVAILABLE.load(Ordering::Relaxed) } } #[cfg(not(all(target_os = "linux", any(target_arch = "x86_64", target_arch = "x86", target_arch = "arm", target_arch = "aarch64", target_arch = "powerpc"))))] fn is_getrandom_available() -> bool { false } /// A random number generator that retrieves randomness straight from /// the operating system. Platform sources: /// /// - Unix-like systems (Linux, Android, Mac OSX): read directly from /// `/dev/urandom`, or from `getrandom(2)` system call if available. /// - Windows: calls `CryptGenRandom`, using the default cryptographic /// service provider with the `PROV_RSA_FULL` type. /// - iOS: calls SecRandomCopyBytes as /dev/(u)random is sandboxed. /// /// This does not block. pub struct OsRng { inner: OsRngInner, } enum OsRngInner { OsGetrandomRng, OsReaderRng(ReaderRng<File>), } impl OsRng { /// Create a new `OsRng`. pub fn new() -> IoResult<OsRng> { if is_getrandom_available() { return Ok(OsRng { inner: OsGetrandomRng }); } let reader = try!(File::open(&Path::new("/dev/urandom"))); let reader_rng = ReaderRng::new(reader); Ok(OsRng { inner: OsReaderRng(reader_rng) }) } } impl Rng for OsRng { fn next_u32(&mut self) -> u32 { match self.inner { OsGetrandomRng => getrandom_next_u32(), OsReaderRng(ref mut rng) => rng.next_u32(), } } fn next_u64(&mut self) -> u64 { match self.inner { OsGetrandomRng => getrandom_next_u64(), OsReaderRng(ref mut rng) => rng.next_u64(), } } fn fill_bytes(&mut self, v: &mut [u8]) { match self.inner { OsGetrandomRng => getrandom_fill_bytes(v), OsReaderRng(ref mut rng) => rng.fill_bytes(v) } } } } #[cfg(target_os = "ios")] mod imp { extern crate libc; use old_io::{IoResult}; use marker::Sync; use mem; use os; use rand::Rng; use result::Result::{Ok}; use self::libc::{c_int, size_t}; use slice::SliceExt; /// A random number generator that retrieves randomness straight from /// the operating system. Platform sources: /// /// - Unix-like systems (Linux, Android, Mac OSX): read directly from /// `/dev/urandom`, or from `getrandom(2)` system call if available. /// - Windows: calls `CryptGenRandom`, using the default cryptographic /// service provider with the `PROV_RSA_FULL` type. /// - iOS: calls SecRandomCopyBytes as /dev/(u)random is sandboxed. /// /// This does not block. #[allow(missing_copy_implementations)] pub struct OsRng { // dummy field to ensure that this struct cannot be constructed outside of this module _dummy: (), } #[repr(C)] struct SecRandom; unsafe impl Sync for *const SecRandom {} #[allow(non_upper_case_globals)] static kSecRandomDefault: *const SecRandom = 0 as *const SecRandom; #[link(name = "Security", kind = "framework")] extern "C" { fn SecRandomCopyBytes(rnd: *const SecRandom, count: size_t, bytes: *mut u8) -> c_int; } impl OsRng { /// Create a new `OsRng`. pub fn new() -> IoResult<OsRng> { Ok(OsRng { _dummy: () }) } } impl Rng for OsRng { fn next_u32(&mut self) -> u32 { let mut v = [0u8; 4]; self.fill_bytes(&mut v); unsafe { mem::transmute(v) } } fn next_u64(&mut self) -> u64 { let mut v = [0u8; 8]; self.fill_bytes(&mut v); unsafe { mem::transmute(v) } } fn fill_bytes(&mut self, v: &mut [u8]) { let ret = unsafe { SecRandomCopyBytes(kSecRandomDefault, v.len() as size_t, v.as_mut_ptr()) }; if ret == -1 { panic!("couldn't generate random bytes: {}", os::last_os_error()); } } } } #[cfg(windows)] mod imp { extern crate libc; use old_io::{IoResult, IoError}; use mem; use ops::Drop; use os; use rand::Rng; use result::Result::{Ok, Err}; use self::libc::{DWORD, BYTE, LPCSTR, BOOL}; use self::libc::types::os::arch::extra::{LONG_PTR}; use slice::SliceExt; type HCRYPTPROV = LONG_PTR;
/// the operating system. Platform sources: /// /// - Unix-like systems (Linux, Android, Mac OSX): read directly from /// `/dev/urandom`, or from `getrandom(2)` system call if available. /// - Windows: calls `CryptGenRandom`, using the default cryptographic /// service provider with the `PROV_RSA_FULL` type. /// - iOS: calls SecRandomCopyBytes as /dev/(u)random is sandboxed. /// /// This does not block. pub struct OsRng { hcryptprov: HCRYPTPROV } static PROV_RSA_FULL: DWORD = 1; static CRYPT_SILENT: DWORD = 64; static CRYPT_VERIFYCONTEXT: DWORD = 0xF0000000; #[allow(non_snake_case)] extern "system" { fn CryptAcquireContextA(phProv: *mut HCRYPTPROV, pszContainer: LPCSTR, pszProvider: LPCSTR, dwProvType: DWORD, dwFlags: DWORD) -> BOOL; fn CryptGenRandom(hProv: HCRYPTPROV, dwLen: DWORD, pbBuffer: *mut BYTE) -> BOOL; fn CryptReleaseContext(hProv: HCRYPTPROV, dwFlags: DWORD) -> BOOL; } impl OsRng { /// Create a new `OsRng`. pub fn new() -> IoResult<OsRng> { let mut hcp = 0; let ret = unsafe { CryptAcquireContextA(&mut hcp, 0 as LPCSTR, 0 as LPCSTR, PROV_RSA_FULL, CRYPT_VERIFYCONTEXT | CRYPT_SILENT) }; if ret == 0 { Err(IoError::last_error()) } else { Ok(OsRng { hcryptprov: hcp }) } } } impl Rng for OsRng { fn next_u32(&mut self) -> u32 { let mut v = [0u8; 4]; self.fill_bytes(&mut v); unsafe { mem::transmute(v) } } fn next_u64(&mut self) -> u64 { let mut v = [0u8; 8]; self.fill_bytes(&mut v); unsafe { mem::transmute(v) } } fn fill_bytes(&mut self, v: &mut [u8]) { let ret = unsafe { CryptGenRandom(self.hcryptprov, v.len() as DWORD, v.as_mut_ptr()) }; if ret == 0 { panic!("couldn't generate random bytes: {}", os::last_os_error()); } } } impl Drop for OsRng { fn drop(&mut self) { let ret = unsafe { CryptReleaseContext(self.hcryptprov, 0) }; if ret == 0 { panic!("couldn't release context: {}", os::last_os_error()); } } } } #[cfg(test)] mod test { use prelude::v1::*; use sync::mpsc::channel; use rand::Rng; use super::OsRng; use thread::Thread; #[test] fn test_os_rng() { let mut r = OsRng::new().unwrap(); r.next_u32(); r.next_u64(); let mut v = [0u8; 1000]; r.fill_bytes(&mut v); } #[test] fn test_os_rng_tasks() { let mut txs = vec!(); for _ in 0u..20 { let (tx, rx) = channel(); txs.push(tx); Thread::spawn(move|| { // wait until all the tasks are ready to go. rx.recv().unwrap(); // deschedule to attempt to interleave things as much // as possible (XXX: is this a good test?) let mut r = OsRng::new().unwrap(); Thread::yield_now(); let mut v = [0u8; 1000]; for _ in 0u..100 { r.next_u32(); Thread::yield_now(); r.next_u64(); Thread::yield_now(); r.fill_bytes(&mut v); Thread::yield_now(); } }); } // start all the tasks for tx in &txs { tx.send(()).unwrap(); } } }
/// A random number generator that retrieves randomness straight from