id
stringlengths
1
265
text
stringlengths
6
5.19M
dataset_id
stringclasses
7 values
77419
# -*- coding: utf-8 -*- """ Created on Tue Oct 6 15:37:07 2020 @author: aoust """ import numpy as np import math class QuadraticPolynomial(): def __init__(self,n,tuples, coefs): self.n = n assert(len(tuples)==len(coefs)) self.tuples = tuples self.coefs = coefs for (i,j) in tuples: assert(i<=j) def check(self): for (i,j) in self.tuples: assert(i<=j) if type(self.coefs)==list: self.coefs = np.array(self.coefs) def vpairs(self): for (i,j) in self.tuples: if ((i>=0) and (i<j)): yield i,j def scale_variables(self,tab): for k in range(len(self.tuples)): i,j = self.tuples[k] factor = 1 if i!=-1: factor = factor*tab[i] if j!=-1: factor = factor*tab[j] self.coefs[k] = self.coefs[k]*factor def scale_coefs(self): self.coefs = self.coefs/(np.linalg.norm(self.coefs,2)) def scale_coefs2(self): power = int(math.log10(np.linalg.norm(self.coefs,2))) factor = 10**(power-1) self.coefs = self.coefs/factor return factor def enumerate_triples(self): for k in range(len(self.tuples)): i,j = self.tuples[k] c = self.coefs[k] yield i,j,c def variables_list(self): set_of_variables = set() for (i,j) in self.tuples: if i!=-1: set_of_variables.add(i) if j!=-1: set_of_variables.add(j) res = list(set_of_variables) res.sort() return res def evaluation(self,x): S = 0 for k in range(len(self.tuples)): i,j = self.tuples[k] S+=x[i]*x[j]*self.coefs[k] return S
StarcoderdataPython
3398058
<reponame>patriotemeritus/LO-PHI #!/usr/bin/env python """" LOPHI Actuation Library @TODO: add mouse support Script syntax: SPECIAL: <LEFT_GUI> n TEXT: wget ftp://get_test_malware_here MOUSE: (TODO) SLEEP: (Time in seconds) Each line is a command message Header is necessary to differentiate text versus special keys (c) 2015 Massachusetts Institute of Technology """ # Native Libraries import io import re import socket import logging import os import string # LO-PHI import lophi.globals as G import lophi.actuation.keycodes as keycodes class KeypressGenerator(object): """ This object takes care of generating input used for actuating physical and virtual machines using the keyboard and mouse """ class REPLACE_STRINGS: """ Strings to replace in our scripts """ ip = "%%IP%%" port = "%%PORT%%" username = "%%USERNAME%%" password = "<PASSWORD>%%" dir = "%%DIR%%" #exe = "%%EXE%%" # comment regexp comment_pattern = re.compile('^#..*$') # cmd regexp cmd_pattern = re.compile('^(\w+):(..*)$') def __init__(self): if self.__class__ == KeypressGenerator: raise("Abstract class initialized directly!") def get_ftp_script(self, profile, ftp_info, hit_enter=True): """ Return a script for the actuation sensor that will run the contents in the given directory on a system of the given profile @param profile: Profile of the system that we are running code on @param ftp_info: (ip, user, password, directory) @return: list of actions that can be sent to control sensor """ # Get our FTP info to fill in FTP_IP = ftp_info['ip'] FTP_PORT = ftp_info['port'] FTP_USER = ftp_info['user'] FTP_PASSWORD = ftp_info['pass'] FTP_DIR = ftp_info['dir'] #FTP_EXE = ftp_info['exe'] # Get the appropriate script script = None """ Windows """ if profile in G.PROFILE_TO_SCRIPT: script = os.path.join(G.DIR_ROOT, G.DIR_ACTUATION_SCRIPTS, G.PROFILE_TO_SCRIPT[profile]) if script is None: logging.error("No ftp execution script exists for %s"%profile) return None else: if not os.path.exists(script): logging.error("File (%s) does not exist!" % script) return None # open file f = open(script, 'r') SCRIPT = f.read() f.close() SCRIPT = SCRIPT.replace(self.REPLACE_STRINGS.ip, FTP_IP) SCRIPT = SCRIPT.replace(self.REPLACE_STRINGS.port, str(FTP_PORT)) SCRIPT = SCRIPT.replace(self.REPLACE_STRINGS.username, FTP_USER) SCRIPT = SCRIPT.replace(self.REPLACE_STRINGS.password, FTP_PASSWORD) SCRIPT = SCRIPT.replace(self.REPLACE_STRINGS.dir, FTP_DIR) #SCRIPT = SCRIPT.replace(self.REPLACE_STRINGS.exe, FTP_EXE) script = self.text_to_script(SCRIPT) if hit_enter: script.append(self.parse_special("RETURN")) return script def text_to_script(self, script): """ Parses specified file into a list of messages easily digested by python. @param filename: Filename of the script to be converted to python object. """ # list to return msg_list = list() # Split on newliens script = script.split("\n") # parse the script line by line for line in script: # Parse our line msg = self.parse_line(line) if msg is not None: msg_list.append(msg) logging.debug(msg) return msg_list def parse_line(self, line): """ Parses line into a msg parse commands are overloaded by subclasses @param line: Line from script file @return: Formatted output to sent to SUT """ # Ignore comments and empty lines if (line == '\n') or self.comment_pattern.match(line) or (line == ''): return None # Not a comment, try to match command cmd_match = self.cmd_pattern.match(line) if cmd_match: # Determine type of command groups = cmd_match.groups() cmd = groups[0] payload = groups[1].lstrip() # How should we treat this line? if cmd == 'SPECIAL': return self.parse_special(payload) elif cmd == 'TEXT': return self.parse_text(payload) elif cmd == 'MOUSE': return self.parse_mouse(payload) elif cmd == 'SLEEP': return self.parse_sleep(payload) else: # Not recognized, raise an Error raise Exception("Could not parse line '%s'" % line) # Not recognized, raise an Error raise Exception("Could not parse line '%s'" % line) """ Abstract functions """ def parse_special(self): """ Returns message containing special key presses based on the payload. """ raise NotImplementedError("ERROR: Unimplemented function.") def parse_text(self): """ Returns message containing text to type via keyboard emulation. """ raise NotImplementedError("ERROR: Unimplemented function.") def parse_mouse(self, payload): """ Returns message containing mouse comands. """ raise NotImplementedError("ERROR: Unimplemented function.") class KeypressGeneratorPhysical(KeypressGenerator): """ This object takes care of generating input used for actuating physical and virtual machines using the keyboard and mouse """ def _create_msg(self, cmd_type, payload): """ Format command for arduino @param cmd_type: Type of command @param payload: Payload to send @return: Text to be sent over the network """ return cmd_type + ':' + payload def parse_special(self, payload): """ Returns message containing special key presses based on the payload. @param payload: Input from script, delimited by spaces @return: Appropriate input for Physical machines """ # strip newline key_list = payload.rstrip().split() p = list() # convert special characters into Arduino codes for key in key_list: # Special Key? if key in keycodes.ARDUINO_KEYMAP: hex_key = keycodes.ARDUINO_KEYMAP[key][0][2:] # Normal Key? else: hex_key = hex(ord(key))[2:] # Arduino expects 5 bytes for each keypress hex_key += ' '*(5-len(hex_key)) p.append(hex_key) # Join the list p = ''.join(p) return self._create_msg(G.SENSOR_CONTROL.KEY_SP_CMD, p) def parse_text(self, payload): """ Returns message containing text to type via keyboard emulation on Arduino. @param payload: Input from script, delimited by spaces @return: Appropriate input for Physical or Virtual Machine """ return self._create_msg(G.SENSOR_CONTROL.KEYB_CMD, payload) def parse_sleep(self, payload): """ Returns message containing a time to sleep @param payload: Input from script, delimited by spaces @return: Appropriate input for Physical or Virtual Machine """ return self._create_msg(G.SENSOR_CONTROL.KEY_SLEEP, payload) def parse_mouse(self, payload): """ Returns message containing mouse comands. @todo: IMPLEMENT MOUSE FUNCTIONS """ logging.error("Mouse Commands are not implemented yet.") return None class KeypressGeneratorVirtual(KeypressGenerator): """ This object takes care of generating input used for actuating physical and virtual machines using the keyboard and mouse """ def parse_special(self, payload): """ Returns message containing special key presses based on the payload. @param payload: Input from script, delimited by spaces @return: Appropriate input for Physical or Virtual Machine """ # strip newline key_list = payload.rstrip().split() p = list() for key in key_list: # Everything is indexed by upper case key = string.upper(key) # Do we have a keycode for this? if key in keycodes.KEYCODES: p.append(keycodes.KEYCODES[key]) else: logging.error("Could not find %s in our keycodes" % key) return [G.SENSOR_CONTROL.KEY_SP_CMD, p] def parse_text(self, payload): """ Returns message containing text to type via keyboard emulation on Arduino. @param payload: Input from script, delimited by spaces @return: Appropriate input for Physical or Virtual Machine """ return [G.SENSOR_CONTROL.KEYB_CMD, keycodes.get_codes(payload)] def parse_sleep(self, payload): """ Returns message containing time to sleep @param payload: Input from script, delimited by spaces @return: Appropriate input for Physical or Virtual Machine """ return [G.SENSOR_CONTROL.KEY_SLEEP, payload] def parse_mouse(self, payload): """ Returns message containing mouse comands. @todo: IMPLEMENT MOUSE FUNCTIONS """ logging.error("Mouse Commands are not implemented yet.") return None
StarcoderdataPython
1605854
PART_V = 'xc7k410tfbg900-1' PART_N = 'NG-MEDIUM' @runner(NanoXplore, single_thread=True) def nx_math_runner(): for n in (8, 16, 32, 64, 128, 256): yield dict(name='add%d' % n, part=PART_N, files=('../vhdl/add_nx.vhd',), generics={'N': n}, path=NX_PATH) @runner(Vivado) def vivado_math_runner(): for n in (8, 16, 32, 64, 128, 256): yield dict(name='add%d' % n, part=PART_V, files=('../vhdl/add.vhd',), generics={'N': n}) @runner(Vivado) def vivado_iscas89_runner(): """ Set for all .vhd files in ../iscas89/vhdl/ """ files = list(helpers.natural_sort(glob.iglob('../iscas89/vhdl/*.vhd'))) logging.debug('ISCAS89 files: %r', files) for file in files: name = os.path.splitext(os.path.basename(file))[0] yield dict(name=name, top=name, part=PART_V, files=[file]) @runner(NanoXplore, single_thread=True) def nx_iscas89_runner(): """ Set for all .vhd files in ../iscas89/vhdl/ """ files = list(helpers.natural_sort(glob.iglob('../iscas89/vhdl/*.vhd'))) logging.debug('ISCAS89 files: %r', files) for file in files: name = os.path.splitext(os.path.basename(file))[0] yield dict(name=name, part=PART_N, top=name, files=(file,), path=NX_PATH) @runner(Vivado) def vivado_fir_s832_runner(): """ Complex example with multiple control structures. This runner was used to generate the results in the thesis. The reason for the complexity is that we didn't know how long the tasks would take. With limited compute time available this prioritizes some data points over others. Priority high -> low: 1. 1 -> 25 FIR filters with 1% -> 51% background filler in steps of 10% 2. 1 -> 25 FIR filters with 1% -> 51% background filler in steps of 5% 3. 1 -> 25 FIR filters with 1% -> 71% background filler in steps of 10% 4. 1 -> 25 FIR filters with 1% -> 76% background filler in steps of 5% 5. 1 -> 50 FIR filters with 1% -> 51% background filler in steps of 10% 6. 1 -> 50 FIR filters with 1% -> 51% background filler in steps of 5% All combinations with an expected fill rate over 90% are skipped. The FIR filter takes 2.3%, the filler 0.1% per instance. """ # List of files we need. files = ['../iscas89/vhdl/s832.vhd'] + FIR_FILES + TOP_FILES logging.debug('vivado_fir_s298_runner files: %r', files) pairs = [] # NA = number of design A = FIR filter # NB = number of design B = Filler for na in range(1, 26): for nb in range(10, 511, 100): if (na, nb) not in pairs: pairs.append((na, nb)) for na in range(1, 26): for nb in range(10, 511, 50): if (na, nb) not in pairs: pairs.append((na, nb)) for na in range(1, 26): for nb in range(10, 711, 100): if (na, nb) not in pairs: pairs.append((na, nb)) for na in range(1, 26): for nb in range(10, 761, 50): if (na, nb) not in pairs: pairs.append((na, nb)) for na in range(1, 51): for nb in range(10, 511, 100): if (na, nb) not in pairs: pairs.append((na, nb)) for na in range(1, 51): for nb in range(10, 511, 50): if (na, nb) not in pairs: pairs.append((na, nb)) logging.info('Que length without duplicates: %d', len(pairs)) for na, nb in pairs: if 2.3 * na + 0.1 * nb > 90: continue name = '%dxFIR_%dxS832' % (na, nb) yield dict(name=name, top='top', part=PART_V, files=files, libraries=FIR_LIBS, generics={'TAPS':129, 'COEF_BW':16, 'DATA_BW':16, 'N_A':na, 'N_B':nb})
StarcoderdataPython
3244855
import matplotlib.pyplot as plt import pandas as pd import pytask import seaborn as sns from src.config import BLD _PARAMETRIZATION = [ ( BLD / "contact_models" / "age_assort_params" / "other_non_recurrent.pkl", "assortative_matching_other_non_recurrent_age_group", BLD / "figures" / "data" / "assortativity_other_non_recurrent.pdf", ), ( BLD / "contact_models" / "age_assort_params" / "work_non_recurrent.pkl", "assortative_matching_work_non_recurrent_age_group", BLD / "figures" / "data" / "assortativity_work_non_recurrent.pdf", ), ] @pytask.mark.parametrize("depends_on, loc, produces", _PARAMETRIZATION) def task_create_assortativity_heatmap(depends_on, loc, produces): sr = pd.read_pickle(depends_on) fig, ax = _create_heatmap(sr, loc) fig.tight_layout() fig.savefig(produces) plt.close() def _create_heatmap(sr, loc): probs = sr.unstack().loc[loc] probs.index.name = "age group" probs.columns.name = "age group of contact" if "work" in loc: non_working_groups = ["0-9", "70-79", "80-100"] probs = probs.drop(non_working_groups, axis=1).drop(non_working_groups, axis=0) assert (probs.sum(axis=1) > 0.85).all() fig, ax = plt.subplots(figsize=(10, 8)) ax = sns.heatmap( probs, annot=True, fmt=".2f", cbar=False, cmap="coolwarm", center=0, ax=ax ) ax.set_yticklabels(ax.get_yticklabels(), rotation=0) return fig, ax
StarcoderdataPython
178877
<reponame>karolinanikolova/SoftUni-Software-Engineering # 3. Сума от числа # Напишете програма, която чете цяло число от конзолата и на всеки следващ ред цели числа, докато тяхната сума стане # по-голяма или равна на първоначалното число. След приключване на четенето да се отпечата сумата на въведените числа. number = int(input()) sum_numbers = 0 while sum_numbers < number: input_number = int(input()) sum_numbers += input_number else: print(sum_numbers)
StarcoderdataPython
28573
<gh_stars>0 import torch import tvm from tvm import autotvm from tvm import relay from tvm.contrib import download from tvm.contrib.debugger import debug_runtime from PIL import Image import matplotlib.pyplot as plt import numpy as np import argparse import os from os.path import join, isfile import sys import json, requests from io import BytesIO import re import mixed_precision_models.quantized_resnet_v1 as quantized_resnet_v1 from mixed_precision_models.layers import QConfig, QuantizeContext import hawq_utils_resnet import torch.cuda.profiler as profiler import pyprof pyprof.init() import logging logging.basicConfig(level=logging.CRITICAL) parser = argparse.ArgumentParser(description='Resnet accuracy test', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--model-dir', required=True, help='Model data directory') parser.add_argument('--debug-unit', default=None, help='Debug specific unit input, compare the unit input to the pytorch result (stage1_unit1, stage1_unit2 ...)') parser.add_argument('--rounding', default='TONEAREST', help='Round scheme (TONEAREST, TRUNCATE)') parser.add_argument('--num-classes', type=int, default=1000, help='Total number of classes') parser.add_argument('--arch', default='resnet50', help='resnet architecture') args = parser.parse_args() ############################################################################### # Set target device # ----------------- TARGET_NAME = 'cuda' CTX = tvm.context(TARGET_NAME, 0) #CTX = tvm.gpu(0) ############################################################################### # Load params # ----------------- if args.arch == 'resnet50': isRes18 = False if args.num_classes == 10: # Cifar 10 num_stages = 3 units = [3, 4, 6] print("Use Cifar 10") else: num_stages = 4 units = [3, 4, 6, 3] elif args.arch == 'resnet18': isRes18 = True num_stages = 4 units = [2, 2, 2, 2] else: assert 0 weights = np.load(os.path.join(args.model_dir, "weights.npy"), allow_pickle=True)[()] bias = np.load(os.path.join(args.model_dir, "bias.npy"), allow_pickle=True)[()] hawq_utils_resnet.load_qconfig("uint4", "int4", num_stages, units, file_name=os.path.join(args.model_dir, "quantized_checkpoint.pth.tar"), isRes18=isRes18) #hawq_utils_resnet50.load_qconfig("int8", "int8", num_stages, units, file_name=os.path.join(args.model_dir, "quantized_checkpoint.pth.tar")) input_image = np.load(os.path.join(args.model_dir, "input_image_batch_1.npy")) input_image = input_image / QuantizeContext.qconfig_dict["conv0_qconfig"].input_scale input_image = np.clip(input_image, -128, 127) if args.rounding == "TONEAREST": input_image = np.round(input_image) elif args.rounding == "TRUNCATE": input_image = np.trunc(input_image) input_image = input_image.astype("int8") params = {**weights, **bias} ############################################################################### # Load model # ----------------- batch_size = 8 shape = list(input_image.shape) image_shape = (shape[3], shape[1], shape[2]) input_dtype = 'int8' model_type = "int4" num_layers = 18 if isRes18 else 50 data_layout = "NHWC" kernel_layout = "HWOI" func, _ = quantized_resnet_v1.get_workload(batch_size=batch_size, image_shape=image_shape, num_classes=args.num_classes, num_layers=num_layers, dtype=input_dtype, data_layout=data_layout, kernel_layout=kernel_layout, with_bn=False, debug_unit=args.debug_unit, rounding=args.rounding) # Download ImageNet categories categ_url = "https://github.com/uwsaml/web-data/raw/main/vta/models/" categ_fn = "synset.txt" download.download(join(categ_url, categ_fn), categ_fn) synset = eval(open(categ_fn).read()) image = input_image input_data = np.repeat(image, batch_size, axis=0) ############################################################################### # Run the model # ----------------- log_filename = "/home/zach_zheng/hawq_tvm/mixed_precision_models/tuning_logs/resnet%d_%s_%s_batch_%d.log" % (num_layers, data_layout, model_type, batch_size) if not os.path.exists(log_filename): log_filename = None else: print("Apply tuning log " + log_filename) with autotvm.apply_history_best(log_filename): with relay.build_config(opt_level=3): print("building relay") graph, lib, params = relay.build(func, target=TARGET_NAME, params=params) if args.debug_unit is not None: m = tvm.contrib.graph_runtime.create(graph, lib, CTX) #m = tvm.contrib.graph_executor.create(graph, lib, CTX) # Set the network parameters and inputs m.set_input(**params) m.set_input('data', input_data) m.run() np.set_printoptions(threshold=sys.maxsize) out = m.get_output(0).asnumpy() if not os.path.exists(os.path.join(args.model_dir, "tvm_result")): os.mkdir(os.path.join(args.model_dir, "tvm_result")) unit_str_regex = re.search('stage(\d)_unit(\d)', args.debug_unit) if unit_str_regex is not None: unit_str = unit_str_regex.group(0) else: unit_str = "" if args.debug_unit == "fc_input": actual_result = out np.save(os.path.join(args.model_dir, "tvm_result/fc_input_int8.npy"), actual_result[0]) golden_result = np.load(os.path.join(args.model_dir, "pytorch_result/fc_input_int8.npy")).astype("int8") elif args.debug_unit == "fc_output": golden_result = np.load(os.path.join(args.model_dir, "pytorch_result/fc_output_int32.npy")) actual_result = out np.save(os.path.join(args.model_dir, "tvm_result/fc_output_int32.npy"), actual_result[0]) # golden_result = np.load(os.path.join(args.model_dir, "pytorch_result/fc_output_float32.npy"))#.astype("int32") elif args.debug_unit == "avg_pool": actual_result = out np.save(os.path.join(args.model_dir, "tvm_result/avg_pool_int32.npy"), actual_result[0]) golden_result = np.load(os.path.join(args.model_dir, "pytorch_result/avg_pool_int32.npy")).astype("int32") elif args.debug_unit == "softmax": actual_result = out np.save(os.path.join(args.model_dir, "tvm_result/avg_pool_int32.npy"), actual_result[0]) golden_result = np.load(os.path.join(args.model_dir, "pytorch_result/avg_pool_int32.npy")).astype("int32") elif args.debug_unit == unit_str + "_output": actual_result = out * QuantizeContext.qconfig_dict["%s_qconfig_add" % unit_str].output_scale # actual_result = out np.save(os.path.join(args.model_dir, "tvm_result/%s_output_int32.npy" % unit_str), actual_result[0]) golden_result = np.load(os.path.join(args.model_dir, "pytorch_result/%s_output_float32.npy" % unit_str)) elif args.debug_unit == unit_str + "_input": actual_result = hawq_utils_resnet.unpack_int4_to_int32(out) np.save(os.path.join(args.model_dir, "tvm_result/%s_input_int4.npy" % unit_str), actual_result[0]) golden_result = np.load(os.path.join(args.model_dir, "pytorch_result/%s_input_int4.npy" % unit_str)).astype("int32") else: print("Error: Unsupported debug unit.") print("Above is Pytorch result, under is TVM result") tvm.testing.assert_allclose(golden_result, actual_result[0]) print(args.debug_unit + " is 100% matched !") else: module = tvm.contrib.graph_runtime.create(graph, lib, ctx=CTX) #module = tvm.contrib.graph_executor.create(graph, lib, ctx=CTX) module.set_input(**params) module.set_input('data', input_data) module.run() tvm_output = module.get_output(0) print(tvm_output.shape) for b in range(batch_size): top_categories = np.argsort(tvm_output.asnumpy()[b]) # Report top-5 classification results print("\n prediction for sample {}".format(b)) print("\t#1:", synset[top_categories[-1]]) print("\t#2:", synset[top_categories[-2]]) print("\t#3:", synset[top_categories[-3]]) print("\t#4:", synset[top_categories[-4]]) print("\t#5:", synset[top_categories[-5]])
StarcoderdataPython
44478
<gh_stars>0 #!/usr/bin/env python # Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import print_function import difflib import logging import os import re import textwrap def ProcessIncludesInContent(lines, fname): '''ProcessIncludesInContent expands any INCLUDE directives found in a markdown file. Parameters: lines : a list of strings with line endings that represent the contents of the Markdown file. fname : the name of the file including the path if applicable. Includes are resolved relative to this path. Include directives take the form: <!-- INCLUDE <relative_path> [(<line_count> lines)] [fenced as <fence_type>] --> The parts of the directive in brackets are optional. The <relative_path> is an unquoted string with no spaces in it that specifies the relative path to the markdown file to be included. The lack of spaces in the paths can be easily remedied, but is not expected to be an issue for our use case in CEL. <fence_type> can be used to indicate that the content of the file should be included as a fenced block of the specified type. Note that the type is mandatory for the fence specification. If specified, the contents of the file will be included in the file as follows: ``` <fence_type> <contents of the file> ``` Once expanded, the "(<line_count> lines)" portion will be inserted to indicate how many lines were added during the expansion. Do not change the line count or the included contents. ''' class Include(object): def __init__(self, fn, lc, at, ft): self.filename = fn self.line_count = int(lc) if lc else 0 self.at = at self.fence_type = ft dirname = os.path.dirname(fname) replacements = [] for i, l in enumerate(lines): if l.startswith('<!-- INCLUDE '): m = re.match( r'<!-- INCLUDE +(?P<fn>[^ ]*) +(?:\((?P<lc>\d+) lines\) +|)(?:fenced as (?P<ft>\w+) +|)-->', l) if m is None: raise Exception("improperly formatted INCLUDE line: {}".format(l)) replacements.append( Include(m.group('fn'), m.group('lc'), i, m.group('ft'))) for r in reversed(replacements): newlines = [] with open(os.path.join(dirname, r.filename), 'r') as f: newlines = f.readlines() for l in newlines: if l.startswith('<!-- INCLUDE '): raise Exception('''Recursive includes are not supported.''') if r.fence_type is not None: newlines[0:0] = ['``` {}\n'.format(r.fence_type)] newlines.append('```\n') lines[r.at] = '<!-- INCLUDE {} ({} lines){} -->\n'.format( r.filename, len(newlines), ' fenced as {}'.format(r.fence_type) if r.fence_type else '') lines[r.at + 1:r.at + 1 + r.line_count] = newlines def FixOldStyleLinks(lines, fname): '''FixOldStyleLinks replaces links of the form [foo] with [foo][] The former style is accepted by Gitiles, but is not valid CommonMark. Hence this function replaces it with the equivalent latter form. This replacement ensures that the links are correctly handled by editors and viewers other than Gitiles. ''' bad_link_re = re.compile(r'(^|[^]])\[(?P<ref>[^]]+)\](?=$|[^[:(])') in_pre = False for i, l in enumerate(lines): if '```' in l: in_pre = not in_pre if in_pre: continue if "[TOC]" in l: continue lines[i] = re.sub(bad_link_re, r'\1[\g<ref>][]', l) def FixTrailingWhitespace(lines, fname): '''FixTrailingWhitespace does what it says and removes trailing whitespace. ''' trailing_ws_re = re.compile(r'\s+(?=\n)$') for i, l in enumerate(lines): lines[i] = re.sub(trailing_ws_re, '', l) def CheckLinksInContent(lines, fname): '''CheckLinksInContent verifies that reference style links are defined in the same document. Reference style links are links of the form [foo][], or [Foo][foo] where [foo] needs to be defiend somewhere else in the document as: [foo]: https://example.com/foo This function raises an exception with a suitable description if a reference style link is not defined. ''' links = set() link_re = re.compile(r"\[(?P<ref>[^]]+)\]: ") for l in lines: m = link_re.match(l) if m is None: continue links.add(m.group('ref')) whole_thing = re.sub(r'\s+', ' ', ''.join(lines), count=0) whole_thing = re.sub(r'`[^`]*`', '', whole_thing, count=0) not_found = set() for m in re.finditer(r'\[(?P<a>[^]]*)\]\[(?P<ref>[^]]*)\]', whole_thing): ref = m.group('ref') if m.group('ref') != '' else m.group('a') if ref not in links: not_found.add(ref) if len(not_found) != 0: raise Exception( textwrap.dedent('''\ The following list of links were unresolved in {}: {} '''.format(fname, ','.join(list(not_found))))) def FormatMarkdown(fname, dry_run=False): '''FormatMarkdown resolves any includes, fixes links, corrects trailing whitespace, and verifies that reference style links are defined in Markdown document specified by the filename in |fname|. if |dry_run| is True, then checks whether the contents in the file at |fname| would be modified by the function. Returns True if the file at |fname| was modified (or in the case of |dry_run==True|, would be modified). False otherwise. A return value of False can be safely assumed to mean that the file contents were not modified. For additional information about the changes that are applied see the documentation in: ProcessIncludesInContent() FixOldStyleLinks() FixTrailingWhitespace() CheckLinksInContent() ''' lines = [] with open(fname, 'r') as f: lines = f.readlines() for l in lines: if "-- Skip validation --" in l: logging.info("Skipping %s due to skip validation directive", fname) return False unmodified = lines[:] ProcessIncludesInContent(lines, fname) FixOldStyleLinks(lines, fname) FixTrailingWhitespace(lines, fname) CheckLinksInContent(lines, fname) if lines == unmodified: logging.info("%s is already correctly formatted", fname) return False if dry_run: print("Would write %s with the following changes:", fname) for l in difflib.unified_diff(unmodified, lines, fname + " (original)", fname + " (modified)"): print(l, end='') print('\n') else: logging.info("Writing %s", fname) with open(fname, 'w') as f: f.writelines(lines) return True
StarcoderdataPython
3283717
import objc import sys from Foundation import * from AppKit import * from PyObjCTools import NibClassBuilder, AppHelper import pyRotateDisplayAPIMac as dispAPI import pyRotateDisplaySettingsMac as settingsWindow import pyRotateDisplayArduinoAPI as arduinoAPI import pyRotateDisplayStatusBar as statusBar if __name__ == '__main__': import itertools, glob disp = dispAPI.Display() arduino = arduinoAPI.Arduino(disp.arduinoCallback, None) settings = settingsWindow.DisplaySelectWindow(sys.argv, disp, arduino) app = NSApplication.sharedApplication() delegate = statusBar.StatusBar.alloc().init() app.setDelegate_(delegate) delegate.setCallBack(settings.show) delegate.setQuitCallBack(arduino.stopAndClose) AppHelper.runEventLoop()
StarcoderdataPython
3368680
<reponame>oskar456/spotzurnal import os import os.path import json import spotipy from spotipy import oauth2 import click def handle_oauth(credfile, username=None, scope=""): save_creds = False try: with open(credfile) as f: creds = json.load(f) except IOError: creds = {} save_creds = True if "client_id" not in creds: creds["client_id"] = input("Enter Spotify App Client ID: ") save_creds = True if "client_secret" not in creds: creds["client_secret"] = input("Enter Spotify App Client Secret: ") save_creds = True if "redirect_uri" not in creds: creds["redirect_uri"] = "http://localhost:8080/" click.secho( f"Please add redirect URI \"{creds['redirect_uri']}\" " "to the white-list in the Spotify App settings.", bold=True, ) save_creds = True sp_oauth = oauth2.SpotifyOAuth( scope=scope, **{ k: v for k, v in creds.items() if k in [ "client_id", "client_secret", "redirect_uri", ] }, ) if "refresh_token" not in creds: auth_url = sp_oauth.get_authorize_url() print(f"\nPlease navigate to: {auth_url}") response = input("Enter the URL you were redirected to: ") code = sp_oauth.parse_response_code(response) token_info = sp_oauth.get_access_token(code) creds["refresh_token"] = token_info["refresh_token"] save_creds = True else: token_info = sp_oauth.refresh_access_token(creds["refresh_token"]) if "username" not in creds: creds["username"] = username or input("Enter Spotify User name: ") save_creds = True if save_creds: os.makedirs(os.path.dirname(credfile), mode=0o700, exist_ok=True) with open(credfile, "w") as f: json.dump(creds, f) return username or creds["username"], token_info["access_token"] class Spotify(spotipy.Spotify): def __init__( self, credfile="clientid.json", username=None, scope="playlist-modify-public", ): self.user, token = handle_oauth(credfile, username, scope) super().__init__(auth=token) def add_tracks_to_playlist( self, trackids, username="0skat-cz", playlistid="2wrkilEEx7SD0OnyZwtGk8", ): if not isinstance(trackids, list): trackids = [trackids, ] self.user_playlist_add_tracks(username, playlistid, trackids) def get_all_data(self, func, *args, **kwargs): r = func(*args, **kwargs) yield from r["items"] while r["next"]: r = self.next(r) yield from r["items"] @staticmethod def put_all_data(func, data, *args, limit=100, offset=0, **kwargs): for i in range(offset, len(data), limit): func(*args, data[i:i + limit], **kwargs) def get_or_create_playlist(self, name, description=""): playlists = self.get_all_data(self.current_user_playlists, limit=50) for p in playlists: if p["name"] == name: return p["id"] r = self.user_playlist_create( self.user, name, ) return r["id"]
StarcoderdataPython
3329055
<gh_stars>10-100 # Copyright 2020 IBM Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import time from kubesat.base_service import BaseService SERVICE_TYPE = 'hello' hello = BaseService(service_type=SERVICE_TYPE, config_path='./service.json') @hello.schedule_callback(2) # Send a hello message every two seconds. async def send_hello_message(nats, shared_storage, logger): """ Send a hello message. Args: nats (NatsHandler): connection to nats used to send and receive messages shared_storage (dict): dictionary that stores local data for the service logger (NatsLogger): logger that can be used to communicate the state of the system """ message = nats.create_message({"message": "hello"}) # Send a hello message to public.hello subject await nats.send_message("public.hello", message) print(f"SEND : {message.encode_json()}") @hello.subscribe_nats_callback("public.hello") # Subscribe public.hello subject async def receive_ping_message(message, nats, shared_storage, logger): message_json = message.encode_json() print(f"RECEIVED : {message_json}") shared_storage["last_sent"] = message_json['time_sent'] @hello.startup_callback # Invoke the startup function at the start time async def startup(nats_handler, shared_storage, logger): print(f"{SERVICE_TYPE} in {hello.sender_id} has started.") if __name__ == '__main__': # Start the hello service hello.run()
StarcoderdataPython
1636202
import serial import matplotlib.pyplot as plt import time import statistics moment = time.strftime("%Y-%b-%d__%Hh%Mm%Ss",time.localtime()) rawdata = [] count = 0 fileName = 'data_' + moment +'.txt' #the time that the program will be running in seconds 16 min ish timeOut = 1000 #connect to the arduino try: ard = serial.Serial('COM4', baudrate = 9600, timeout = 1) except: print('Serial not found!') #get a list of data while count < timeOut: arduinoData = ard.readline() rawdata.append(str(arduinoData)) count += 1 #clean the data from "b'xxx;xx\n\r" tp "xxx;xx" def clean(list): newList = [] for i in range(len(list)): #starting from the third element in the string temp=list[i][2:] #ending at the last fifth string from the end of the string newList.append(temp[:-5]) return newList cleandata = clean(rawdata) #writing the data from the list to the file def write(list): file = open(fileName, 'w') for i in range(len(list)): file.write(list[i] + '\n') file.close() write(cleandata)
StarcoderdataPython
1676029
#!/usr/bin/python # # Copyright 2018-2021 Polyaxon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # coding: utf-8 """ Polyaxon SDKs and REST API specification. Polyaxon SDKs and REST API specification. # noqa: E501 The version of the OpenAPI document: 1.10.1 Contact: <EMAIL> Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from polyaxon_sdk.configuration import Configuration class V1HpParams(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'choice': 'V1HpChoice', 'pchoice': 'V1HpPChoice', 'range': 'V1HpRange', 'linspace': 'V1HpLinSpace', 'logspace': 'V1HpLogSpace', 'geomspace': 'V1HpGeomSpace', 'uniform': 'V1HpUniform', 'quniform': 'V1HpQUniform', 'loguniform': 'V1HpLogUniform', 'qloguniform': 'V1HpQLogUniform', 'normal': 'V1HpNormal', 'qnormal': 'V1HpQNormal', 'lognormal': 'V1HpLogNormal', 'qlognormal': 'V1HpQLogNormal' } attribute_map = { 'choice': 'choice', 'pchoice': 'pchoice', 'range': 'range', 'linspace': 'linspace', 'logspace': 'logspace', 'geomspace': 'geomspace', 'uniform': 'uniform', 'quniform': 'quniform', 'loguniform': 'loguniform', 'qloguniform': 'qloguniform', 'normal': 'normal', 'qnormal': 'qnormal', 'lognormal': 'lognormal', 'qlognormal': 'qlognormal' } def __init__(self, choice=None, pchoice=None, range=None, linspace=None, logspace=None, geomspace=None, uniform=None, quniform=None, loguniform=None, qloguniform=None, normal=None, qnormal=None, lognormal=None, qlognormal=None, local_vars_configuration=None): # noqa: E501 """V1HpParams - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._choice = None self._pchoice = None self._range = None self._linspace = None self._logspace = None self._geomspace = None self._uniform = None self._quniform = None self._loguniform = None self._qloguniform = None self._normal = None self._qnormal = None self._lognormal = None self._qlognormal = None self.discriminator = None if choice is not None: self.choice = choice if pchoice is not None: self.pchoice = pchoice if range is not None: self.range = range if linspace is not None: self.linspace = linspace if logspace is not None: self.logspace = logspace if geomspace is not None: self.geomspace = geomspace if uniform is not None: self.uniform = uniform if quniform is not None: self.quniform = quniform if loguniform is not None: self.loguniform = loguniform if qloguniform is not None: self.qloguniform = qloguniform if normal is not None: self.normal = normal if qnormal is not None: self.qnormal = qnormal if lognormal is not None: self.lognormal = lognormal if qlognormal is not None: self.qlognormal = qlognormal @property def choice(self): """Gets the choice of this V1HpParams. # noqa: E501 :return: The choice of this V1HpParams. # noqa: E501 :rtype: V1HpChoice """ return self._choice @choice.setter def choice(self, choice): """Sets the choice of this V1HpParams. :param choice: The choice of this V1HpParams. # noqa: E501 :type: V1HpChoice """ self._choice = choice @property def pchoice(self): """Gets the pchoice of this V1HpParams. # noqa: E501 :return: The pchoice of this V1HpParams. # noqa: E501 :rtype: V1HpPChoice """ return self._pchoice @pchoice.setter def pchoice(self, pchoice): """Sets the pchoice of this V1HpParams. :param pchoice: The pchoice of this V1HpParams. # noqa: E501 :type: V1HpPChoice """ self._pchoice = pchoice @property def range(self): """Gets the range of this V1HpParams. # noqa: E501 :return: The range of this V1HpParams. # noqa: E501 :rtype: V1HpRange """ return self._range @range.setter def range(self, range): """Sets the range of this V1HpParams. :param range: The range of this V1HpParams. # noqa: E501 :type: V1HpRange """ self._range = range @property def linspace(self): """Gets the linspace of this V1HpParams. # noqa: E501 :return: The linspace of this V1HpParams. # noqa: E501 :rtype: V1HpLinSpace """ return self._linspace @linspace.setter def linspace(self, linspace): """Sets the linspace of this V1HpParams. :param linspace: The linspace of this V1HpParams. # noqa: E501 :type: V1HpLinSpace """ self._linspace = linspace @property def logspace(self): """Gets the logspace of this V1HpParams. # noqa: E501 :return: The logspace of this V1HpParams. # noqa: E501 :rtype: V1HpLogSpace """ return self._logspace @logspace.setter def logspace(self, logspace): """Sets the logspace of this V1HpParams. :param logspace: The logspace of this V1HpParams. # noqa: E501 :type: V1HpLogSpace """ self._logspace = logspace @property def geomspace(self): """Gets the geomspace of this V1HpParams. # noqa: E501 :return: The geomspace of this V1HpParams. # noqa: E501 :rtype: V1HpGeomSpace """ return self._geomspace @geomspace.setter def geomspace(self, geomspace): """Sets the geomspace of this V1HpParams. :param geomspace: The geomspace of this V1HpParams. # noqa: E501 :type: V1HpGeomSpace """ self._geomspace = geomspace @property def uniform(self): """Gets the uniform of this V1HpParams. # noqa: E501 :return: The uniform of this V1HpParams. # noqa: E501 :rtype: V1HpUniform """ return self._uniform @uniform.setter def uniform(self, uniform): """Sets the uniform of this V1HpParams. :param uniform: The uniform of this V1HpParams. # noqa: E501 :type: V1HpUniform """ self._uniform = uniform @property def quniform(self): """Gets the quniform of this V1HpParams. # noqa: E501 :return: The quniform of this V1HpParams. # noqa: E501 :rtype: V1HpQUniform """ return self._quniform @quniform.setter def quniform(self, quniform): """Sets the quniform of this V1HpParams. :param quniform: The quniform of this V1HpParams. # noqa: E501 :type: V1HpQUniform """ self._quniform = quniform @property def loguniform(self): """Gets the loguniform of this V1HpParams. # noqa: E501 :return: The loguniform of this V1HpParams. # noqa: E501 :rtype: V1HpLogUniform """ return self._loguniform @loguniform.setter def loguniform(self, loguniform): """Sets the loguniform of this V1HpParams. :param loguniform: The loguniform of this V1HpParams. # noqa: E501 :type: V1HpLogUniform """ self._loguniform = loguniform @property def qloguniform(self): """Gets the qloguniform of this V1HpParams. # noqa: E501 :return: The qloguniform of this V1HpParams. # noqa: E501 :rtype: V1HpQLogUniform """ return self._qloguniform @qloguniform.setter def qloguniform(self, qloguniform): """Sets the qloguniform of this V1HpParams. :param qloguniform: The qloguniform of this V1HpParams. # noqa: E501 :type: V1HpQLogUniform """ self._qloguniform = qloguniform @property def normal(self): """Gets the normal of this V1HpParams. # noqa: E501 :return: The normal of this V1HpParams. # noqa: E501 :rtype: V1HpNormal """ return self._normal @normal.setter def normal(self, normal): """Sets the normal of this V1HpParams. :param normal: The normal of this V1HpParams. # noqa: E501 :type: V1HpNormal """ self._normal = normal @property def qnormal(self): """Gets the qnormal of this V1HpParams. # noqa: E501 :return: The qnormal of this V1HpParams. # noqa: E501 :rtype: V1HpQNormal """ return self._qnormal @qnormal.setter def qnormal(self, qnormal): """Sets the qnormal of this V1HpParams. :param qnormal: The qnormal of this V1HpParams. # noqa: E501 :type: V1HpQNormal """ self._qnormal = qnormal @property def lognormal(self): """Gets the lognormal of this V1HpParams. # noqa: E501 :return: The lognormal of this V1HpParams. # noqa: E501 :rtype: V1HpLogNormal """ return self._lognormal @lognormal.setter def lognormal(self, lognormal): """Sets the lognormal of this V1HpParams. :param lognormal: The lognormal of this V1HpParams. # noqa: E501 :type: V1HpLogNormal """ self._lognormal = lognormal @property def qlognormal(self): """Gets the qlognormal of this V1HpParams. # noqa: E501 :return: The qlognormal of this V1HpParams. # noqa: E501 :rtype: V1HpQLogNormal """ return self._qlognormal @qlognormal.setter def qlognormal(self, qlognormal): """Sets the qlognormal of this V1HpParams. :param qlognormal: The qlognormal of this V1HpParams. # noqa: E501 :type: V1HpQLogNormal """ self._qlognormal = qlognormal def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1HpParams): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1HpParams): return True return self.to_dict() != other.to_dict()
StarcoderdataPython
3210586
<filename>lib/diffmask/__init__.py #!/usr/bin/python # vim:fileencoding=utf-8 # (C) 2010 <NAME>, distributed under the terms of 3-clause BSD license PV='0.3.3'
StarcoderdataPython
3242947
import os import torch import numpy as np from config import get_config from src.Learner import face_learner import argparse if __name__ == '__main__': parser = argparse.ArgumentParser(description='') parser.add_argument("-m", "--load_model", default="", type=str) args = parser.parse_args() conf = get_config() conf.load_model = args.load_model learner = face_learner(conf) learner.test(conf)
StarcoderdataPython
3353457
<gh_stars>0 # coding: utf-8 # # Copyright 2020 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Service layer for handling user-authentication with Firebase. Oppia depends on OpenID Connect 1.0 to handle user authentication. We use [Firebase authentication](https://firebase.google.com/docs/auth) to do the heavy-lifting, especially for securely storing user credentials and associating users to their identity providers. This helps us minimize the contact we make with private information. Terminology: OpenID Connect 1.0 (OIDC): A simple identity layer on top of the OAuth 2.0 protocol. It is a specification (i.e. a strict set of algorithms, data structures, and rules) that defines how two parties must share data about a user in a secure way on that user's behalf. OAuth 2.0 (OAuth): The industry-standard protocol for authorization. It enables a third-party application to obtain limited access to an HTTP service on behalf of a user. Claim: A piece of information about a user (name, address, phone number, etc.) that has been encrypted and digitally signed. JSON Web Token (JWT): A compact and URL-safe protocol primarily designed to send Claims between two parties. Claims are organized into JSON objects that map "Claim Names" to "Claim Values". Identity provider: An entity that creates, maintains, and manages identity information and provides authentication services. Such services rely on JWTs to send identity information. Examples of identity providers include: Google, Facebook, Email verification links, and Text message SMS codes. Subject Identifier: A Claim that can uniquely identify a user. It is locally unique and never reassigned with respect to the provider who issued it. The Claim's name is 'sub'. Example values: `24400320` or `AItOawmwtWwcT0k51BayewNvutrJUqsvl6qs7A4`. """ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules import logging from constants import constants from core.domain import auth_domain from core.platform import models import feconf import python_utils import firebase_admin from firebase_admin import auth as firebase_auth from firebase_admin import exceptions as firebase_exceptions auth_models, user_models = ( models.Registry.import_models([models.NAMES.auth, models.NAMES.user])) transaction_services = models.Registry.import_transaction_services() def establish_firebase_connection(): """Establishes the connection to Firebase needed by the rest of the SDK. All Firebase operations require an "app", the abstraction used for a Firebase server connection. The initialize_app() function raises an error when it's called more than once, however, so we make this function idempotent by trying to "get" the app first. Returns: firebase_admin.App. The App being by the Firebase SDK. Raises: Exception. The Firebase app has a genuine problem. """ try: firebase_admin.get_app() except ValueError as error: if 'initialize_app' in python_utils.UNICODE(error): firebase_admin.initialize_app( options={'projectId': feconf.OPPIA_PROJECT_ID}) else: raise def establish_auth_session(request, response): """Sets login cookies to maintain a user's sign-in session. Args: request: webapp2.Request. The request with the authorization to begin a new session. response: webapp2.Response. The response to establish the new session upon. """ claims = _get_auth_claims_from_session_cookie(_get_session_cookie(request)) # If the request already contains a valid session cookie, then there's no # action necessary; the session is already established. if claims is not None: return fresh_cookie = firebase_auth.create_session_cookie( _get_id_token(request), feconf.FIREBASE_SESSION_COOKIE_MAX_AGE) response.set_cookie( feconf.FIREBASE_SESSION_COOKIE_NAME, value=fresh_cookie, max_age=feconf.FIREBASE_SESSION_COOKIE_MAX_AGE, overwrite=True, # Toggles https vs http. The production server uses https, but the local # developement server uses http. secure=(not constants.EMULATOR_MODE), # Using the HttpOnly flag when generating a cookie helps mitigate the # risk of client side script accessing the protected cookie (if the # browser supports it). # Learn more: https://owasp.org/www-community/HttpOnly. httponly=True) def destroy_auth_session(response): """Clears login cookies from the given response headers. Args: response: webapp2.Response. Response to clear the cookies from. """ response.delete_cookie(feconf.FIREBASE_SESSION_COOKIE_NAME) def get_auth_claims_from_request(request): """Authenticates the request and returns claims about its authorizer. Args: request: webapp2.Request. The HTTP request to authenticate. Returns: AuthClaims|None. Claims about the currently signed in user. If no user is signed in, then returns None. Raises: InvalidAuthSessionError. The request contains an invalid session. StaleAuthSessionError. The cookie has lost its authority. """ return _get_auth_claims_from_session_cookie(_get_session_cookie(request)) def mark_user_for_deletion(user_id): """Marks the user, and all of their auth associations, as deleted. This function also disables the user's Firebase account so that they cannot be used to sign in. Args: user_id: str. The unique ID of the user whose associations should be deleted. """ # NOTE: We use get_multi(include_deleted=True) because get() returns None # for models with deleted=True, but we need to make changes to those models # when managing deletion. (assoc_by_user_id_model,) = auth_models.UserAuthDetailsModel.get_multi( [user_id], include_deleted=True) if assoc_by_user_id_model is not None: assoc_by_user_id_model.deleted = True assoc_by_user_id_model.update_timestamps() assoc_by_user_id_model.put() assoc_by_auth_id_model = ( auth_models.UserIdByFirebaseAuthIdModel.get_by_user_id(user_id) if assoc_by_user_id_model is None else # NOTE: We use get_multi(include_deleted=True) because get() returns # None for models with deleted=True, but we need to make changes to # those models when managing deletion. auth_models.UserIdByFirebaseAuthIdModel.get_multi( [assoc_by_user_id_model.firebase_auth_id], include_deleted=True)[0]) if assoc_by_auth_id_model is not None: assoc_by_auth_id_model.deleted = True assoc_by_auth_id_model.update_timestamps() assoc_by_auth_id_model.put() else: logging.error( '[WIPEOUT] User with user_id=%s has no Firebase account' % user_id) return try: firebase_auth.update_user(assoc_by_auth_id_model.id, disabled=True) except (firebase_exceptions.FirebaseError, ValueError): # NOTE: logging.exception appends the stack trace automatically. The # errors are not re-raised because wipeout_services, the user of this # function, does not use exceptions to keep track of failures. It uses # the verify_external_auth_associations_are_deleted() function instead. logging.exception( '[WIPEOUT] Failed to disable Firebase account! Stack trace:') def delete_external_auth_associations(user_id): """Deletes all associations that refer to the user outside of Oppia. Args: user_id: str. The unique ID of the user whose associations should be deleted. """ auth_id = get_auth_id_from_user_id(user_id, include_deleted=True) if auth_id is None: return try: firebase_auth.delete_user(auth_id) except firebase_auth.UserNotFoundError: logging.exception('[WIPEOUT] Firebase account already deleted') except (firebase_exceptions.FirebaseError, ValueError): # NOTE: logging.exception appends the stack trace automatically. The # errors are not re-raised because wipeout_services, the user of this # function, does not use exceptions to keep track of failures. It uses # the verify_external_auth_associations_are_deleted() function instead. logging.exception('[WIPEOUT] Firebase Admin SDK failed! Stack trace:') def verify_external_auth_associations_are_deleted(user_id): """Returns true if and only if we have successfully verified that all external associations have been deleted. Args: user_id: str. The unique ID of the user whose associations should be checked. Returns: bool. True if and only if we have successfully verified that all external associations have been deleted. """ auth_id = get_auth_id_from_user_id(user_id, include_deleted=True) if auth_id is None: return True try: # TODO(#11474): Replace with `get_users()` (plural) because `get_user()` # (singular) does not distinguish between disabled and deleted users. We # can't do it right now because firebase-admin==3.2.1 does not offer the # get_users() API. We will need to fix this when we've moved to a more # recent version (after the Python 3 migration). firebase_auth.get_user(auth_id) except firebase_auth.UserNotFoundError: return True except (firebase_exceptions.FirebaseError, ValueError): # NOTE: logging.exception appends the stack trace automatically. The # errors are not re-raised because wipeout_services, the user of this # function, will keep retrying the other "delete" family of functions # until this returns True (in 12h intervals). logging.exception('[WIPEOUT] Firebase Admin SDK failed! Stack trace:') return False def get_auth_id_from_user_id(user_id, include_deleted=False): """Returns the auth ID associated with the given user ID. Args: user_id: str. The user ID. include_deleted: bool. Whether to return the ID of models marked for deletion. Returns: str|None. The auth ID associated with the given user ID, or None if no association exists. """ (assoc_by_user_id_model,) = auth_models.UserAuthDetailsModel.get_multi( [user_id], include_deleted=include_deleted) return ( None if assoc_by_user_id_model is None else assoc_by_user_id_model.firebase_auth_id) def get_multi_auth_ids_from_user_ids(user_ids): """Returns the auth IDs associated with the given user IDs. Args: user_ids: list(str). The user IDs. Returns: list(str|None). The auth IDs associated with each of the given user IDs, or None for associations which don't exist. """ return [ None if model is None else model.firebase_auth_id for model in auth_models.UserAuthDetailsModel.get_multi(user_ids) ] def get_user_id_from_auth_id(auth_id, include_deleted=False): """Returns the user ID associated with the given auth ID. Args: auth_id: str. The auth ID. include_deleted: bool. Whether to return the ID of models marked for deletion. Returns: str|None. The user ID associated with the given auth ID, or None if no association exists. """ (assoc_by_auth_id_model,) = ( auth_models.UserIdByFirebaseAuthIdModel.get_multi( [auth_id], include_deleted=include_deleted)) return ( None if assoc_by_auth_id_model is None else assoc_by_auth_id_model.user_id) def get_multi_user_ids_from_auth_ids(auth_ids): """Returns the user IDs associated with the given auth IDs. Args: auth_ids: list(str). The auth IDs. Returns: list(str|None). The user IDs associated with each of the given auth IDs, or None for associations which don't exist. """ return [ None if model is None else model.user_id for model in auth_models.UserIdByFirebaseAuthIdModel.get_multi(auth_ids) ] def associate_auth_id_with_user_id(auth_id_user_id_pair): """Commits the association between auth ID and user ID. Args: auth_id_user_id_pair: auth_domain.AuthIdUserIdPair. The association to commit. Raises: Exception. The IDs are already associated with a value. """ auth_id, user_id = auth_id_user_id_pair user_id_collision = get_user_id_from_auth_id(auth_id, include_deleted=True) if user_id_collision is not None: raise Exception('auth_id=%r is already associated with user_id=%r' % ( auth_id, user_id_collision)) auth_id_collision = get_auth_id_from_user_id(user_id, include_deleted=True) if auth_id_collision is not None: raise Exception('user_id=%r is already associated with auth_id=%r' % ( user_id, auth_id_collision)) # A new {auth_id: user_id} mapping needs to be created. We know the model # doesn't exist because get_auth_id_from_user_id returned None, even with # include_deleted=True. assoc_by_auth_id_model = ( auth_models.UserIdByFirebaseAuthIdModel(id=auth_id, user_id=user_id)) assoc_by_auth_id_model.update_timestamps() assoc_by_auth_id_model.put() # The {user_id: auth_id} mapping needs to be created, but the model used to # store the relationship might already exist because other services use it # as well (e.g. user_services uses UserAuthDetailsModel.parent_user_id). In # such situations, the return value of get_auth_id_from_user_id would be # None, so that isn't strong enough to determine whether we need to create a # new model rather than update an existing one. # # NOTE: We use get_multi(include_deleted=True) because get() returns None # for models with deleted=True, but we need to make changes to those models # when managing deletion. (assoc_by_user_id_model,) = auth_models.UserAuthDetailsModel.get_multi( [user_id], include_deleted=True) if (assoc_by_user_id_model is None or assoc_by_user_id_model.firebase_auth_id is None): assoc_by_user_id_model = auth_models.UserAuthDetailsModel( id=user_id, firebase_auth_id=auth_id) assoc_by_user_id_model.update_timestamps() assoc_by_user_id_model.put() def associate_multi_auth_ids_with_user_ids(auth_id_user_id_pairs): """Commits the associations between auth IDs and user IDs. Args: auth_id_user_id_pairs: list(auth_domain.AuthIdUserIdPair). The associations to commit. Raises: Exception. One or more auth associations already exist. """ # Turn list(pair) to pair(list): https://stackoverflow.com/a/7558990/4859885 auth_ids, user_ids = python_utils.ZIP(*auth_id_user_id_pairs) user_id_collisions = get_multi_user_ids_from_auth_ids(auth_ids) if any(user_id is not None for user_id in user_id_collisions): user_id_collisions = ', '.join( '{auth_id=%r: user_id=%r}' % (auth_id, user_id) for auth_id, user_id in python_utils.ZIP( auth_ids, user_id_collisions) if user_id is not None) raise Exception('already associated: %s' % user_id_collisions) auth_id_collisions = get_multi_auth_ids_from_user_ids(user_ids) if any(auth_id is not None for auth_id in auth_id_collisions): auth_id_collisions = ', '.join( '{user_id=%r: auth_id=%r}' % (user_id, auth_id) for user_id, auth_id in python_utils.ZIP( user_ids, auth_id_collisions) if auth_id is not None) raise Exception('already associated: %s' % auth_id_collisions) # A new {auth_id: user_id} mapping needs to be created. We know the model # doesn't exist because get_auth_id_from_user_id returned None. assoc_by_auth_id_models = [ auth_models.UserIdByFirebaseAuthIdModel(id=auth_id, user_id=user_id) for auth_id, user_id in python_utils.ZIP(auth_ids, user_ids) ] auth_models.UserIdByFirebaseAuthIdModel.update_timestamps_multi( assoc_by_auth_id_models) auth_models.UserIdByFirebaseAuthIdModel.put_multi(assoc_by_auth_id_models) # The {user_id: auth_id} mapping needs to be created, but the model used to # store the relationship might already exist because other services use it # as well (e.g. user_services uses UserAuthDetailsModel.parent_user_id). In # such situations, the return value of get_multi_auth_ids_from_user_ids # would be None, so that isn't strong enough to determine whether we need to # create a new model rather than update an existing one. assoc_by_user_id_models = [ auth_models.UserAuthDetailsModel(id=user_id, firebase_auth_id=auth_id) for auth_id, user_id, assoc_by_user_id_model in python_utils.ZIP( auth_ids, user_ids, auth_models.UserAuthDetailsModel.get_multi(user_ids)) if (assoc_by_user_id_model is None or assoc_by_user_id_model.firebase_auth_id is None) ] if assoc_by_user_id_models: auth_models.UserAuthDetailsModel.update_timestamps_multi( assoc_by_user_id_models) auth_models.UserAuthDetailsModel.put_multi(assoc_by_user_id_models) def grant_super_admin_privileges(user_id): """Grants the user super admin privileges. Args: user_id: str. The Oppia user ID to promote to super admin. """ auth_id = get_auth_id_from_user_id(user_id) if auth_id is None: raise ValueError('user_id=%s has no Firebase account' % user_id) custom_claims = '{"role":"%s"}' % feconf.FIREBASE_ROLE_SUPER_ADMIN firebase_auth.set_custom_user_claims(auth_id, custom_claims) # NOTE: Revoke session cookies and ID tokens of the user so they are forced # to log back in to obtain their updated privileges. firebase_auth.revoke_refresh_tokens(auth_id) def revoke_super_admin_privileges(user_id): """Revokes the user's super admin privileges. Args: user_id: str. The Oppia user ID to revoke privileges from. """ auth_id = get_auth_id_from_user_id(user_id) if auth_id is None: raise ValueError('user_id=%s has no Firebase account' % user_id) firebase_auth.set_custom_user_claims(auth_id, None) # NOTE: Revoke session cookies and ID tokens of the user so they are forced # to log back in to obtain their updated privileges. firebase_auth.revoke_refresh_tokens(auth_id) def seed_firebase(): """Prepares Oppia and Firebase to run the SeedFirebaseOneOffJob. NOTE: This function is idempotent. TODO(#11462): Delete this handler once the Firebase migration logic is rollback-safe and all backup data is using post-migration data. """ seed_model = auth_models.FirebaseSeedModel.get( auth_models.ONLY_FIREBASE_SEED_MODEL_ID, strict=False) if seed_model is None: # Exactly 1 seed model must exist. auth_models.FirebaseSeedModel( id=auth_models.ONLY_FIREBASE_SEED_MODEL_ID).put() user_ids_with_admin_email = [ key.id() for key in user_models.UserSettingsModel.query( user_models.UserSettingsModel.email == feconf.ADMIN_EMAIL_ADDRESS ).iter(keys_only=True) ] assoc_by_user_id_models = [ model for model in auth_models.UserAuthDetailsModel.get_multi( user_ids_with_admin_email) if model is not None and model.gae_id != feconf.SYSTEM_COMMITTER_ID ] if len(assoc_by_user_id_models) != 1: raise Exception( '%s must correspond to exactly 1 user (excluding user_id=%s), but ' 'found user_ids=[%s]' % ( feconf.ADMIN_EMAIL_ADDRESS, feconf.SYSTEM_COMMITTER_ID, ', '.join(m.id for m in assoc_by_user_id_models))) else: assoc_by_user_id_model = assoc_by_user_id_models[0] user_id = assoc_by_user_id_model.id auth_id = assoc_by_user_id_model.firebase_auth_id if auth_id is None: auth_id = user_id[4:] if user_id.startswith('uid_') else user_id assoc_by_user_id_model.firebase_auth_id = auth_id assoc_by_user_id_model.update_timestamps(update_last_updated_time=False) assoc_by_user_id_model.put() assoc_by_auth_id_model = ( auth_models.UserIdByFirebaseAuthIdModel.get(auth_id, strict=False)) if assoc_by_auth_id_model is None: auth_models.UserIdByFirebaseAuthIdModel( id=auth_id, user_id=user_id).put() elif assoc_by_auth_id_model.user_id != user_id: assoc_by_auth_id_model.user_id = user_id assoc_by_auth_id_model.update_timestamps(update_last_updated_time=False) assoc_by_auth_id_model.put() custom_claims = '{"role":"%s"}' % feconf.FIREBASE_ROLE_SUPER_ADMIN try: user = firebase_auth.get_user_by_email(feconf.ADMIN_EMAIL_ADDRESS) except firebase_auth.UserNotFoundError: create_new_firebase_account = True else: if user.uid != auth_id: firebase_auth.update_user(user.uid, disabled=True) firebase_auth.delete_user(user.uid) create_new_firebase_account = True else: firebase_auth.set_custom_user_claims(user.uid, custom_claims) create_new_firebase_account = False if create_new_firebase_account: firebase_auth.import_users([ firebase_auth.ImportUserRecord( auth_id, email=feconf.ADMIN_EMAIL_ADDRESS, custom_claims=custom_claims), ]) def _get_session_cookie(request): """Returns the session cookie authorizing the signed in user, if present. Args: request: webapp2.Request. The HTTP request to inspect. Returns: str|None. Value of the session cookie authorizing the signed in user, if present, otherwise None. """ return request.cookies.get(feconf.FIREBASE_SESSION_COOKIE_NAME) def _get_id_token(request): """Returns the ID token authorizing a user, or None if missing. Oppia uses the OAuth 2.0's Bearer authentication scheme to send ID Tokens. Bearer authentication (a.k.a. token authentication) is an HTTP authentication scheme based on "bearer tokens", an encrypted JWT generated by a trusted identity provider in response to login requests. The name "Bearer authentication" can be understood as: "give access to the bearer of this token." These tokens _must_ be sent in the `Authorization` header of HTTP requests, and _must_ have the format: `Bearer <token>`. Learn more about: HTTP authentication schemes: https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication OAuth 2.0 Bearer authentication scheme: https://oauth.net/2/bearer-tokens/ OpenID Connect 1.0 ID Tokens: https://openid.net/specs/openid-connect-core-1_0.html#IDToken Args: request: webapp2.Request. The HTTP request to inspect. Returns: str|None. The ID Token of the request, if present, otherwise None. """ scheme, _, token = request.headers.get('Authorization', '').partition(' ') return token if scheme == 'Bearer' else None def _get_auth_claims_from_session_cookie(cookie): """Returns claims from the session cookie, or None if invalid. Args: cookie: str|None. The session cookie to extract claims from. Returns: AuthClaims|None. The claims from the session cookie, if available. Otherwise returns None. Raises: InvalidAuthSessionError. The cookie has an invalid value. StaleAuthSessionError. The cookie has lost its authority. """ # It's OK for a session cookie to be None or empty, it just means that the # request hasn't been authenticated. if not cookie: return None try: claims = firebase_auth.verify_session_cookie(cookie, check_revoked=True) except firebase_auth.ExpiredSessionCookieError: raise auth_domain.StaleAuthSessionError('session has expired') except firebase_auth.RevokedSessionCookieError: raise auth_domain.StaleAuthSessionError('session has been revoked') except (firebase_exceptions.FirebaseError, ValueError) as error: raise auth_domain.InvalidAuthSessionError('session invalid: %s' % error) else: return _create_auth_claims(claims) def _create_auth_claims(firebase_claims): """Returns a new AuthClaims domain object from Firebase claims. Args: firebase_claims: dict(str: *). The raw claims returned by the Firebase SDK. Returns: AuthClaims. Oppia's representation of auth claims. """ auth_id = firebase_claims.get('sub') email = firebase_claims.get('email') role_is_super_admin = ( email == feconf.ADMIN_EMAIL_ADDRESS or firebase_claims.get('role') == feconf.FIREBASE_ROLE_SUPER_ADMIN) return auth_domain.AuthClaims( auth_id, email, role_is_super_admin=role_is_super_admin)
StarcoderdataPython
1714916
<reponame>sgk98/travelling_salesman import random import pickle import math import nn def probability(old,new,T): if new<old: return 1.0 else: #return 0.0 return math.exp(- abs(old-new)/T) def get_rand(n): init=[i for i in range(1,n)] random.shuffle(init) init=[0]+init+[0] return init def get_state(oldstate): ind1=random.randint(1,len(oldstate)-2) ind2=random.randint(1,len(oldstate)-2) oldstate[ind1],oldstate[ind2]=oldstate[ind2],oldstate[ind1] return oldstate def eval_state(state,adj_mat): cost=0 for i in range(len(state)-1): cost+=adj_mat[state[i]][state[i+1]] return cost def anneal(): fo=open("adj_mat","rb") adj_mat=pickle.load(fo) init=nn.nn() cost=eval_state(init,adj_mat) T=50000 alpha=0.99 while T>0.00000000000000000000000000001: newstate=get_state(init) p=random.random() old=eval_state(init,adj_mat) new=eval_state(newstate,adj_mat) if p<probability(old,new,T): init=newstate cost=min(cost,new) T*=alpha print(cost) if __name__=="__main__": anneal()
StarcoderdataPython
1646742
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2018-01-13 13:47 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('account', '0014_auto_20180107_1224'), ] operations = [ migrations.RemoveField( model_name='user', name='create_time', ), migrations.RemoveField( model_name='user', name='privilege', ), ]
StarcoderdataPython
3304647
<gh_stars>1-10 ''' NOTE!! Remember to include Oracle instaclient in PATH: set PATH=%PATH%;C:\oracle\instantclient_18_3 ''' import sys from pathlib import Path import click import cx_Oracle from db_connect import (usr, pwd, server, service, port) BASE = Path(__file__).parents[0] REFNR_FILE = BASE / Path('refnr.txt') con = cx_Oracle.connect(f'{usr}/{pwd}@{server}:{port}/{service}', encoding='UTF-8', nencoding='UTF-8') cursor = con.cursor() cursor.execute('SELECT count(*) FROM hvd_ref@refgeo WHERE mv_status !=-1') refnr_count = cursor.fetchone()[0] cursor.execute('''SELECT refnr, typ FROM hvd_ref@refgeo WHERE mv_status !=-1''') with open(REFNR_FILE, 'w') as outfile: with click.progressbar(cursor, label='Reading refnumbers', length=refnr_count) as progress_cursor: for refnr, _ in progress_cursor: outfile.write(f'{refnr}\n') con.close()
StarcoderdataPython
147219
# -*- coding: utf-8 -*- # flake8: noqa: F401 # noreorder """ Pytube: a very serious Python library for downloading YouTube Videos. """ __title__ = "pytube3" __author__ = "<NAME>, <NAME>" __license__ = "MIT License" __copyright__ = "Copyright 2019 <NAME>" from pytube.version import __version__ from pytube.streams import Stream from pytube.query import StreamQuery from pytube.__main__ import YouTube from pytube.contrib.playlist import Playlist
StarcoderdataPython
3228973
<gh_stars>0 #!/usr/bin/env python # -*- coding: utf-8 -*- from django.shortcuts import redirect class AuthRedirectMixin(object): def get(self, request, *args, **kwargs): if request.user.is_authenticated(): return redirect('/') else: return super(AuthRedirectMixin, self).get(self, request, *args, **kwargs) class RequestFormMixin(object): def get_form_kwargs(self): kwargs = super(RequestFormMixin, self).get_form_kwargs() kwargs['request'] = self.request return kwargs
StarcoderdataPython
164809
<reponame>stevepbyrne/dbus-systemcalc-py import os import sys # Modify path so we can find our own packages test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) sys.path.insert(1, os.path.join(test_dir, '..', 'ext', 'velib_python', 'test')) sys.path.insert(1, os.path.join(test_dir, '..', 'ext', 'velib_python')) sys.path.insert(1, os.path.join(test_dir, '..'))
StarcoderdataPython
3389844
<reponame>finswimmer/clikit<filename>tests/handler/help/test_help_text_handler.py<gh_stars>10-100 # -*- coding: utf-8 -*- from __future__ import unicode_literals import pytest from clikit.api.args import Args from clikit.args.string_args import StringArgs from clikit.config.default_application_config import DefaultApplicationConfig from clikit.console_application import ConsoleApplication @pytest.fixture() def app(): config = DefaultApplicationConfig() config.set_display_name("The Application") config.set_version("1.2.3") with config.command("command") as c: with c.sub_command("add") as sc1: sc1.set_description('Description of "add"') sc1.add_argument("argument", 0, 'Description of "argument"') with c.sub_command("delete") as sc2: sc2.set_description('Description of "delete"') sc2.add_option("opt", "o", description='Description of "opt"') sc2.add_argument("sub-arg", description='Description of "sub-arg"') application = ConsoleApplication(config) return application def test_render_for_application(app, io): help_command = app.get_command("help") handler = help_command.config.handler raw_args = StringArgs("") args = Args(help_command.args_format, raw_args) status = handler.handle(args, io, app.get_command("command")) expected = """\ The Application version 1.2.3 USAGE console [-h] [-q] [-v [<...>]] [-V] [--ansi] [--no-ansi] [-n] <command> [<arg1>] ... [<argN>] ARGUMENTS <command> The command to execute <arg> The arguments of the command GLOBAL OPTIONS -h (--help) Display this help message -q (--quiet) Do not output any message -v (--verbose) Increase the verbosity of messages: "-v" for normal output, "-vv" for more verbose output and "-vvv" for debug -V (--version) Display this application version --ansi Force ANSI output --no-ansi Disable ANSI output -n (--no-interaction) Do not ask any interactive question AVAILABLE COMMANDS command help Display the manual of a command """ assert 0 == status assert expected == io.fetch_output() def test_render_for_application_does_not_display_hidden_commands(app, io): app.get_command("command").config.hide() help_command = app.get_command("help") handler = help_command.config.handler raw_args = StringArgs("") args = Args(help_command.args_format, raw_args) status = handler.handle(args, io, app.get_command("command")) expected = """\ The Application version 1.2.3 USAGE console [-h] [-q] [-v [<...>]] [-V] [--ansi] [--no-ansi] [-n] <command> [<arg1>] ... [<argN>] ARGUMENTS <command> The command to execute <arg> The arguments of the command GLOBAL OPTIONS -h (--help) Display this help message -q (--quiet) Do not output any message -v (--verbose) Increase the verbosity of messages: "-v" for normal output, "-vv" for more verbose output and "-vvv" for debug -V (--version) Display this application version --ansi Force ANSI output --no-ansi Disable ANSI output -n (--no-interaction) Do not ask any interactive question AVAILABLE COMMANDS help Display the manual of a command """ assert 0 == status assert expected == io.fetch_output() def test_render_sub_command(app, io): help_command = app.get_command("help") handler = help_command.config.handler raw_args = StringArgs("command delete") args = Args(help_command.args_format, raw_args) args.set_argument("command", "command") status = handler.handle(args, io, app.get_command("command")) expected = """\ USAGE console command delete [-o] [<sub-arg>] ARGUMENTS <sub-arg> Description of "sub-arg" OPTIONS -o (--opt) Description of "opt" GLOBAL OPTIONS -h (--help) Display this help message -q (--quiet) Do not output any message -v (--verbose) Increase the verbosity of messages: "-v" for normal output, "-vv" for more verbose output and "-vvv" for debug -V (--version) Display this application version --ansi Force ANSI output --no-ansi Disable ANSI output -n (--no-interaction) Do not ask any interactive question """ assert 0 == status assert expected == io.fetch_output() def test_render_parent_command_with_help_command_argument(app, io): help_command = app.get_command("help") handler = help_command.config.handler raw_args = StringArgs("help command") args = Args(help_command.args_format, raw_args) args.set_argument("command", "command") status = handler.handle(args, io, app.get_command("command")) expected = """\ USAGE console command or: console command add [<argument>] or: console command delete [-o] [<sub-arg>] COMMANDS add Description of "add" <argument> Description of "argument" delete Description of "delete" <sub-arg> Description of "sub-arg" -o (--opt) Description of "opt" GLOBAL OPTIONS -h (--help) Display this help message -q (--quiet) Do not output any message -v (--verbose) Increase the verbosity of messages: "-v" for normal output, "-vv" for more verbose output and "-vvv" for debug -V (--version) Display this application version --ansi Force ANSI output --no-ansi Disable ANSI output -n (--no-interaction) Do not ask any interactive question """ assert 0 == status assert expected == io.fetch_output() def test_render_sub_command_with_help_command_argument(app, io): help_command = app.get_command("help") handler = help_command.config.handler raw_args = StringArgs("help command delete") args = Args(help_command.args_format, raw_args) args.set_argument("command", "command") status = handler.handle(args, io, app.get_command("command")) expected = """\ USAGE console command delete [-o] [<sub-arg>] ARGUMENTS <sub-arg> Description of "sub-arg" OPTIONS -o (--opt) Description of "opt" GLOBAL OPTIONS -h (--help) Display this help message -q (--quiet) Do not output any message -v (--verbose) Increase the verbosity of messages: "-v" for normal output, "-vv" for more verbose output and "-vvv" for debug -V (--version) Display this application version --ansi Force ANSI output --no-ansi Disable ANSI output -n (--no-interaction) Do not ask any interactive question """ assert 0 == status assert expected == io.fetch_output() def test_render_parent_command_does_not_display_hidden_sub_commands(app, io): app.get_command("command").config.get_sub_command_config("delete").hide() help_command = app.get_command("help") handler = help_command.config.handler raw_args = StringArgs("help command") args = Args(help_command.args_format, raw_args) args.set_argument("command", "command") status = handler.handle(args, io, app.get_command("command")) expected = """\ USAGE console command or: console command add [<argument>] COMMANDS add Description of "add" <argument> Description of "argument" GLOBAL OPTIONS -h (--help) Display this help message -q (--quiet) Do not output any message -v (--verbose) Increase the verbosity of messages: "-v" for normal output, "-vv" for more verbose output and "-vvv" for debug -V (--version) Display this application version --ansi Force ANSI output --no-ansi Disable ANSI output -n (--no-interaction) Do not ask any interactive question """ assert 0 == status assert expected == io.fetch_output()
StarcoderdataPython
3357103
<filename>hutch_python/cache.py """ This module is responsible for accumulating all loaded objects and making sure they are available in the ``xxx.db`` virtual module. It is used extensively in `load_conf.load_conf`. """ from importlib import import_module from pathlib import Path import datetime import logging import sys from .utils import IterableNamespace logger = logging.getLogger(__name__) class LoadCache: """ Class that accumulates objects in a virtual module. This virtual module can be imported from as if it were a normal module. Parameters ---------- module: ``str`` Name of the virtual module to create. If the module has the same name as a real module, the real module will be masked. hutch_dir: ``Path``, optional This allows us to write a ``db.txt`` file to let the user know what objects get imported. **objs: kwargs Initial objects to place into the namespace Attributes ---------- objs: `IterableNamespace` This is a namespace containing all the objects that have been attached to the ``LoadCache``. """ def __init__(self, module, hutch_dir=None, **objs): self.objs = IterableNamespace(**objs) self.hutch_dir = hutch_dir self.module = module self.spoof_module(module) self.spoof_module('hutch_python.db') def spoof_module(self, module_name): """ Create a fake module that is actually self.objs """ # Check for real module that it needs to be slipped into module_parts = module_name.split('.') parent = '.'.join(module_parts[:-1]) if parent: try: parent_module = import_module(parent) setattr(parent_module, module_parts[-1], self.objs) except ImportError: logger.debug('Skip patching parent module %s, does not import', parent, exc_info=True) # Place it here so it looks like we've already imported it sys.modules[module_name] = self.objs def __call__(self, **objs): """ Add objects to the namespace. Parameters ---------- **objs: kwargs The key will is the namespace-accessible name, and the object is the object we are adding. """ self.objs.__dict__.update(**objs) def write_file(self): """ Write a ``db.txt`` file in the hutch's directory. This file informs the user which objects get loaded by ``hutch-python``. """ if self.hutch_dir is not None: parts = self.module.split('.') parts[-1] = parts[-1] + '.txt' db_path = self.hutch_dir / Path('/'.join(parts)) text = (header.format(parts[0]) + body.format(datetime.datetime.now())) for name, obj in self.objs.__dict__.items(): text += '{:<20} {}\n'.format(name, obj.__class__) if not db_path.exists(): db_path.touch() db_path.chmod(0o666) with db_path.open('w') as f: f.write(text) # For writing the files header = """ The objects referenced in this file are populated by the {0}python\n initialization. If you wish to use devices from this file, import\n them from {0}.db after calling the {0}python startup script.\n\n """ body = ('hutch-python last loaded on {}\n' 'with the following objects:\n\n')
StarcoderdataPython
1795598
<reponame>bashu/sigmacms-fluent-pages from optparse import make_option from django.conf import settings from django.contrib.sites.models import Site from django.core.management.base import BaseCommand, CommandError, NoArgsCommand from django.utils import translation from django.utils.translation import get_language_info import sys from fluent_pages.models import UrlNode from parler.utils.context import switch_language class Command(NoArgsCommand): """ Generate rewrite/redirect rules for the web server to redirect a single unmaintained language to another one. """ help = "Find all pages of a given language, and redirect to the canonical version." args = "language" option_list = BaseCommand.option_list + ( make_option('--format', default='nginx', help='Choose the output format, defaults to "nginx"'), make_option('--site', default=int(settings.SITE_ID), help="Choose the site ID to "), make_option('--from'), make_option('--host'), make_option('--to', default=settings.LANGUAGE_CODE), ) def handle(self, *args, **options): site = options['site'] host = options['host'] from_lang = options['from'] to_lang = options['to'] if not from_lang: raise CommandError("Provide a --from=.. language to redirect for") if not host: host = Site.objects.get_current().domain if '://' not in host: host = "http://{0}".format(host) from_name = get_language_info(from_lang)['name'] to_name = get_language_info(to_lang)['name'] with translation.override(from_lang): qs = (UrlNode.objects .parent_site(site) .non_polymorphic() .translated(to_lang) .order_by('translations___cached_url')) if not qs: raise CommandError("No URLs found for site {0} in {1}".format(site, from_name)) self.stdout.write('# Redirecting all translated {0} URLs to the {1} site\n'.format(from_name, to_name)) self.stdout.write("# Generated using {0}".format(" ".join(sys.argv))) for page in qs: from_url = page.default_url with switch_language(page, to_lang): to_url = page.get_absolute_url() if from_url == to_url: continue if from_url.endswith('/'): from_regexp = from_url.rstrip('/') from_rule = "~ ^{0}(/|$)".format(from_regexp) else: from_regexp = from_url from_rule = "= {0}".format(from_regexp) if page.plugin.urls: self.stdout.write("location {0} {{ rewrite ^{1}(.*)$ {2}{3}$1; }}\n".format( from_rule, from_regexp, host, to_url.rstrip('/') )) else: self.stdout.write("location {0} {{ return 301 {1}{2}; }}\n".format( from_rule, host, to_url )) # Final redirect for all identical URLs self.stdout.write("\n# Redirect all remaining and identical URls:\n") self.stdout.write("location / {{ rewrite ^/(.*)$ {0}/$1 permanent; }}\n".format(host))
StarcoderdataPython
3232575
import sys import pandas as pd # From Assignment 2, copied manually here just to remind you # that you can copy stuff manually if importing isn't working out. # You can just use this or you can replace it with your function. def countTokens(text): token_counts = {} tokens = text.split(' ') for word in tokens: if not word in token_counts: token_counts[word] = 0 token_counts[word] += 1 return token_counts def largest_counts(data): # TODO: Finish implementing this function # TODO: Cut up the rows in the dataset according to how you stored things. # The below assumes test data is stored first and negative is stored before positive. # If you did the same, no change is required. neg_test_data = data[:12500] neg_train_data = data[25000:37500] pos_test_data = data[12500:25000] pos_train_data = data[37500:50000] # TODO: SORT the count dicts which countTokens() returns # by value (count) in reverse (descending) order. # It is your task to Google and learn how to do this, but we will help of course, # if you come to use with questions. This can be daunting at first, but give it time. # Spend some (reasonable) time across a few days if necessary, and you will do it! # As is, the counts returned by the counter AREN'T sorted! # So you won't be able to easily retrieve the most frequent words. # NB: str.cat() turns whole column into one text train_counts_pos_original = countTokens(pos_train_data["review"].str.cat()) train_counts_pos_cleaned = countTokens( pos_train_data["cleaned_review"].str.cat()) train_counts_pos_lowercased = countTokens( pos_train_data["lowercased"].str.cat()) train_counts_pos_no_stop = countTokens( pos_train_data["no stopwords"].str.cat()) train_counts_pos_lemmatized = countTokens( pos_train_data["lemmatized"].str.cat()) # Once the dicts are sorted, output the first 20 rows for each. # This is already done below, but changes may be needed depending on what you did to sort the dicts. # The [:19] "slicing" syntax expects a list. If you sorting call return a list (which is likely, as being sorted # is conceptualy a properly of LISTS, NOT dicts), # you may want to remove the additional list(dict_name.items()) conversion. with open('counts.txt', 'w') as f: f.write('Original POS reviews:\n') for k, v in list(train_counts_pos_original.items())[:20]: f.write('{}\t{}\n'.format(k, v)) f.write('Cleaned POS reviews:\n') for k, v in list(train_counts_pos_cleaned.items())[:20]: f.write('{}\t{}\n'.format(k, v)) f.write('Lowercased POS reviews:\n') for k, v in list(train_counts_pos_lowercased.items())[:20]: f.write('{}\t{}\n'.format(k, v)) f.write('No stopwords POS reviews:\n') for k, v in list(train_counts_pos_no_stop.items())[:20]: f.write('{}\t{}\n'.format(k, v)) f.write('Lemmatized POS reviews:\n') for k, v in list(train_counts_pos_lemmatized.items())[:20]: f.write('{}\t{}\n'.format(k, v)) # TODO: Do the same for all the remaining training dicts, per Assignment spec. # TODO: Copy the output of the above print statements # into your document/report, or otherwise create a table/visualization for these counts. # Manually is fine, or you may explore bar charts in pandas! Be creative :). def main(argv): data = pd.read_csv(argv[1], index_col=[0]) # print(data.head()) # <- Verify the format. Comment this back out once done. largest_counts(data) if __name__ == "__main__": main(sys.argv)
StarcoderdataPython
1795191
<reponame>bhardwajRahul/web3.py import pytest from eth_abi.exceptions import ( ValueOutOfBounds, ) from hypothesis import ( given, strategies as st, ) from web3._utils.events import ( DataArgumentFilter, TopicArgumentFilter, normalize_topic_list, ) @pytest.mark.parametrize( "topic_list,expected", ( ( ("0x1", "0x2", ["0x3"], None, "0x4", None, None, None), ("0x1", "0x2", "0x3", None, "0x4") ), ( (None, ["0x2", "0x2a"], "0x3", None, "0x4", None, [None], None), (None, ["0x2", "0x2a"], "0x3", None, "0x4") ), ( (None, None, [None]), tuple() ) ) ) def test_normalize_topic_list(topic_list, expected): assert normalize_topic_list(topic_list) == expected @given(st.text()) def test_match_single_string_type_properties_data_arg(value): data_filter = DataArgumentFilter(arg_type="string") data_filter.match_single(value) @given(st.text()) def test_match_single_string_type_properties_topic_arg(w3, value): topic_filter = TopicArgumentFilter(arg_type="string", abi_codec=w3.codec) topic_filter.match_single(value) @given(st.lists(elements=st.text(), max_size=10, min_size=0)) def test_match_any_string_type_properties(w3, values): topic_filter = TopicArgumentFilter(arg_type="string", abi_codec=w3.codec) topic_filter.match_any(*values) assert len(topic_filter.match_values) == len(values) @given(st.lists(elements=st.binary(), max_size=10, min_size=0)) def test_match_any_bytes_type_properties(w3, values): topic_filter = TopicArgumentFilter(arg_type="bytes", abi_codec=w3.codec) topic_filter.match_any(*values) assert len(topic_filter.match_values) == len(values) @given(st.lists(elements=st.binary(), max_size=10, min_size=1)) def test_match_any_bytes_type_properties_strict(w3_strict_abi, values): topic_filter = TopicArgumentFilter(arg_type="bytes", abi_codec=w3_strict_abi.codec) topic_filter.match_any(*values) assert len(topic_filter.match_values) == len(values) def test_match_hex_type_properties_strict(w3_strict_abi): topic_filter = TopicArgumentFilter(arg_type="bytes2", abi_codec=w3_strict_abi.codec) topic_filter.match_any("0x1233") assert len(topic_filter.match_values) == 1 @pytest.mark.parametrize("values", (b"123", b"1", "0x12", "0x", "0x121212")) def test_match_any_bytes_type_properties_strict_errors(w3_strict_abi, values): topic_filter = TopicArgumentFilter(arg_type="bytes2", abi_codec=w3_strict_abi.codec) topic_filter.match_any(values) with pytest.raises(ValueOutOfBounds): topic_filter.match_values
StarcoderdataPython
3231406
<gh_stars>1-10 import argparse import logging import pickle import minato from automlcli.commands.subcommand import Subcommand from automlcli.models import Model logger = logging.getLogger(__name__) @Subcommand.register( name="retrain", description="retrain model with new data", help="retrain model with new data", ) class RetrainCommand(Subcommand): def set_arguments(self) -> None: self.parser.add_argument( "model", type=str, help="path to an automl configuration file", ) self.parser.add_argument( "data", type=str, help="path to a training data file", ) self.parser.add_argument( "output", type=str, default=None, help="path to a output file of retrained model", ) def run(self, args: argparse.Namespace) -> None: logger.info("Load model from %s", args.model) with minato.open(args.model, "rb") as fp: model = pickle.load(fp) # type: Model logger.info("Retrain model with %s", args.data) model.retrain(args.data) with minato.open(args.output, "wb") as fp: pickle.dump(model, fp) logger.info("Done!")
StarcoderdataPython
1709323
# =========================================================== # ========================= imports ========================= import sys import datetime from gnsspy.funcs.funcs import (gpsweekday, datetime2doy) from gnsspy.doc.IGS import IGS, is_IGS # =========================================================== def obsFileName(stationName, date, zipped = False): doy = datetime2doy(date, string = True) if len(doy) == 1: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "o" elif len(doy) == 2: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "o" else: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "o" if zipped == True: rinexFile = rinexFile + ".Z" return rinexFile def sp3FileName(epoch, product="igs"): now = datetime.date.today() # today's date timeDif = now - epoch # time difference between rinex epoch and today if timeDif.days == 0: raise Warning("IGS orbit files are not released for", epoch.ctime()) sys.exit("Exiting...") elif 0 < timeDif.days < 13: print("IGS final orbit file is not released for", epoch.ctime(), "\nDownloading IGS Rapid orbit file...") product = 'igr' # sp3 file name gpsWeek, gpsWeekday = gpsweekday(epoch, Datetime = True) if len(str(gpsWeek)) == 3: sp3File = product.lower() + "0" + str(gpsWeekday) + ".sp3" else: sp3File = product.lower() + str(gpsWeekday) + ".sp3" return sp3File def clockFileName(epoch, interval=30, product="cod"): now = datetime.date.today() timeDif = now - epoch if timeDif.days == 0: raise Warning("IGS clock files are not released for", epoch.ctime()) sys.exit("Exiting...") elif 0 < timeDif.days < 13: product = 'igr' if interval < 30: product = 'cod' extension = '.clk_05s' else: extension = '.clk' gpsWeek, gpsWeekday = gpsweekday(epoch, Datetime = True) if len(str(gpsWeek)) == 3: clockFile = product.lower() + "0" + str(gpsWeekday) + extension else: clockFile = product.lower() + str(gpsWeekday) + extension return clockFile def ionFileName(date, product = "igs", zipped = False): doy = datetime2doy(date, string = True) if len(doy) == 1: ionFile = product + "g" + doy + "0." + str(date.year)[-2:] + "i" elif len(doy) == 2: ionFile = product + "g" + doy + "0." + str(date.year)[-2:] + "i" else: ionFile = product + "g" + doy + "0." + str(date.year)[-2:] + "i" if zipped == True: ionFile = ionFile + ".Z" return ionFile def navFileName(stationName, date, zipped = False): doy = datetime2doy(date, string = True) if len(doy) == 1: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "n" elif len(doy) == 2: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "n" else: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "n" if zipped == True: rinexFile = rinexFile + ".Z" return rinexFile def nav3FileName(stationName, date, zipped = False): doy = datetime2doy(date, string = True) # for RINEX data names siteInfo = IGS(stationName) if stationName.upper() == "BRDC": rinexFile = "BRDC00IGS_R_" + str(date.year) + str(doy) + "0000_01D_MN.rnx" else: rinexFile = siteInfo.SITE[0] + "_R_" + str(date.year) + str(doy) + "0000_01D_MN.rnx" """ if len(doy) == 1: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "p" elif len(doy) == 2: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "p" else: rinexFile = stationName + doy + "0." + str(date.year)[-2:] + "p" """ if zipped == True: rinexFile = rinexFile + ".gz" return rinexFile def obs3FileName(stationName, date, zipped = False): doy = datetime2doy(date, string = True) # for RINEX data names siteInfo = IGS(stationName) rinexFile = siteInfo.SITE[0] + "_R_" + str(date.year) + str(doy) + "0000_01D_30S_MO.crx" if zipped == True: rinexFile = rinexFile + ".gz" return rinexFile
StarcoderdataPython
1647146
<reponame>CedricTravelletti/MESLAS<filename>meslas/covariance/heterotopic.py """ Code for multidimensional sampling. We will be considering multivariate random fields Z=(Z^1, ..., Z^p). The term *response index* denotes the index of the component of the field we ate considering. We will sometime use the word measurement point to denote a (location, response index) pair. We will be using the notation conventions from the papers. x's will denote a location j's will denote reponse a index Uppercase for concatenated quantities, i.e. a big X is a vector of x's. First dimension of tensors represent the different samples/locations (batch dimension). Other dimensions are for the "dimensions" of the repsonse (or input domain). THIS IS FOR HETEROTOPIC SAMPLING (most general form). # TODO: Inmplement convenience methods for full sampling (all indices). Conventions ----------- Spatial locations will be denoted by s, capital letters for bunches. Response indices denoted by l. Couple of (locations, response indices) denoted by x. """ import torch from torch.distributions import multivariate_normal class StationaryCovariance(): """ Stationary covariance function. Parameters ---------- factor_stationary_cov: function(H, L1, L2) Covariance function. Only allow covariances that factor into a stationary spatial part that only depends on the euclidean distance matrix H and a purely response index component. L1 and L2 are the index matrix. n_out: int Number of output dimensions. """ def __init__(self, factor_stationary_cov): self.factor_stationary_cov = factor_stationary_cov self.n_out = n_out def K(self, S1, S2, L1, L2): """ Same as above, but for vector of measurements. Parameters ---------- S1: (M, d) Tensor Spatial location vector. Note if d=1, should still have two dimensions. S2: (N, d) Tensor Spatial location vector. L1: (M) Tensor Response indices vector. L2: (N) Tensor Response indices vector. Returns ------- K: (M, N) Tensor Covariane matrix between the two sets of measurements. """ # Distance matrix. H = torch.cdist(S1, S2, p=2, compute_mode='donot_use_mm_for_euclid_dist') return self.factor_stationary_cov(H, L1, L2) class FactorStationaryCovariance(StationaryCovariance): """ Convenience class for specifiying a factor model. Parameters ---------- spatial_cov: function(H) Spatial covariance function. cross_cov: function(L1, L2) Cross-covariance function. n_out: int Number of output dimensions. """ def __init__(self, spatial_cov, cross_cov, n_out): self.factor_stationary_cov = lambda H, L1, L2: spatial_cov(H) * cross_cov(L1, L2) self.n_out = n_out def __repr__(self): out_string = ("Factor Covariance Module:\n" "------------------\n" "Spatial part: \n" "\t cross-corellation parameter gamma0:\n" "\t individual variances sigma0s:\n") return out_string class FactorCovariance(): """ General (not necessarily stationary) separable covariance function. Parameters ---------- spatial_cov: function(H) Spatial covariance function. cross_cov: function(L1, L2) Cross-covariance function. n_out: int Number of output dimensions. """ def __init__(self, spatial_cov, cross_cov, n_out): self.spatial_cov = spatial_cov self.cross_cov = cross_cov self.n_out = n_out def K(self, S1, S2, L1, L2): """ Same as above, but for vector of measurements. Parameters ---------- S1: (M, d) Tensor Spatial location vector. Note if d=1, should still have two dimensions. S2: (N, d) Tensor Spatial location vector. L1: (M) Tensor Response indices vector. L2: (N) Tensor Response indices vector. Returns ------- K: (M, N) Tensor Covariane matrix between the two sets of measurements. """ # Distance matrix. H = torch.cdist(S1, S2, p=2, compute_mode='donot_use_mm_for_euclid_dist') return self.spatial_cov(H) * self.cross_cov(S1, L1, S2, L2)
StarcoderdataPython
1734451
translate_table = {'a':'b', 'c':'d' } s = raw_input('String please: ') s.translate(translate_table)
StarcoderdataPython
7030
# coding: utf-8 import functools def memoize(fn): known = dict() @functools.wraps(fn) def memoizer(*args): if args not in known: known[args] = fn(*args) return known[args] return memoizer @memoize def nsum(n): '''返回前n个数字的和''' assert(n >= 0), 'n must be >= 0' return 0 if n == 0 else n + nsum(n-1) @memoize def fibonacci(n): '''返回斐波那契数列的第n个数''' assert(n >= 0), 'n must be >= 0' return n if n in (0, 1) else fibonacci(n-1) + fibonacci(n-2) if __name__ == '__main__': from timeit import Timer measure = [{'exec': 'fibonacci(100)', 'import': 'fibonacci', 'func': fibonacci}, {'exec': 'nsum(200)', 'import': 'nsum', 'func': nsum}] for m in measure: t = Timer('{}'.format(m['exec']), 'from __main__ import \ {}'.format(m['import'])) print('name: {}, doc: {}, executing: {}, time: \ {}'.format(m['func'].__name__, m['func'].__doc__, m['exec'], t.timeit()))
StarcoderdataPython
1694532
<reponame>arbonagw/AstralShipwright<filename>Scripts/UploadSteamDemo.py #!/usr/bin/env python # -*- coding: utf-8 -*- #------------------------------------------------------------------------------- # Upload a demo for distribution on Steam, see UploadSteam.py for details # # <NAME> 2022 #------------------------------------------------------------------------------- import os os.system("UploadSteam.py demo")
StarcoderdataPython
4801763
from helper import unittest, PillowTestCase, hopper from PIL import Image import os class TestImageLoad(PillowTestCase): def test_sanity(self): im = hopper() pix = im.load() self.assertEqual(pix[0, 0], (20, 20, 70)) def test_close(self): im = Image.open("Tests/images/hopper.gif") im.close() self.assertRaises(ValueError, im.load) self.assertRaises(ValueError, im.getpixel, (0, 0)) def test_contextmanager(self): fn = None with Image.open("Tests/images/hopper.gif") as im: fn = im.fp.fileno() os.fstat(fn) self.assertRaises(OSError, os.fstat, fn) if __name__ == '__main__': unittest.main()
StarcoderdataPython
3352748
from tempfile import NamedTemporaryFile import pytest from testapp.models import Attachment from djantic import ModelSchema @pytest.mark.django_db def test_image_field_schema(): class AttachmentSchema(ModelSchema): class Config: model = Attachment image_file = NamedTemporaryFile(suffix=".jpg") attachment = Attachment.objects.create( description="My image upload", image=image_file.name, ) assert AttachmentSchema.schema() == { "title": "AttachmentSchema", "description": "Attachment(id, description, image)", "type": "object", "properties": { "id": {"title": "Id", "description": "id", "type": "integer"}, "description": { "title": "Description", "description": "description", "maxLength": 255, "type": "string", }, "image": { "title": "Image", "description": "image", "maxLength": 100, "type": "string", }, }, "required": ["description"], } assert AttachmentSchema.from_django(attachment).dict() == { "id": attachment.id, "description": attachment.description, "image": attachment.image.name, }
StarcoderdataPython
142833
<reponame>cron-ooo/django-compressor from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import smart_str from django.utils.functional import cached_property from compressor.exceptions import ParserError from compressor.parser import ParserBase class LxmlParser(ParserBase): """ LxmlParser will use `lxml.html` parser to parse rendered contents of {% compress %} tag. """ def __init__(self, content): try: from lxml.html import fromstring from lxml.etree import tostring except ImportError as err: raise ImproperlyConfigured("Error while importing lxml: %s" % err) except Exception as err: raise ParserError("Error while initializing parser: %s" % err) self.fromstring = fromstring self.tostring = tostring super().__init__(content) @cached_property def tree(self): """ Document tree. """ content = '<root>%s</root>' % self.content tree = self.fromstring(content) self.tostring(tree, encoding=str) return tree def css_elems(self): return self.tree.xpath('//link[re:test(@rel, "^stylesheet$", "i")]|style', namespaces={"re": "http://exslt.org/regular-expressions"}) def js_elems(self): return self.tree.findall('script') def elem_attribs(self, elem): return elem.attrib def elem_content(self, elem): return smart_str(elem.text) def elem_name(self, elem): return elem.tag def elem_str(self, elem): return smart_str(self.tostring(elem, method='html', encoding=str))
StarcoderdataPython
4806057
<reponame>craigahobbs/sunrise<filename>sunrise.py # Licensed under the MIT License # https://github.com/craigahobbs/sunrise/blob/main/LICENSE import argparse import csv from datetime import datetime, timedelta, timezone import sys import ephem import pytz # The list of cities for which to generate sunrise data CITIES = [ {'name': 'Chicago', 'lat': '41.8781', 'lon': '-87.6298', 'tz': 'US/Central'}, {'name': 'Denver', 'lat': '39.7392', 'lon': '-104.9903', 'tz': 'US/Mountain'}, {'name': 'Honolulu', 'lat': '21.3069', 'lon': '-157.8583', 'tz': 'US/Hawaii'}, {'name': 'Houston', 'lat': '29.7604', 'lon': '-95.3698', 'tz': 'US/Central'}, {'name': 'Juneau', 'lat': '58.3019', 'lon': '-134.4197', 'tz': 'US/Alaska'}, {'name': 'Kansas City', 'lat': '39.1155', 'lon': '-94.6268', 'tz': 'US/Central'}, {'name': 'Los Angeles', 'lat': '34.0522', 'lon': '-118.2437', 'tz': 'US/Pacific'}, {'name': 'Miami', 'lat': '25.7617', 'lon': '-80.1918', 'tz': 'US/Eastern'}, {'name': 'New York', 'lat': '40.7128', 'lon': '-74.0060', 'tz': 'US/Eastern'}, {'name': 'Philadelphia', 'lat': '39.9526', 'lon': '-75.1652', 'tz': 'US/Eastern'}, {'name': 'Phoenix', 'lat': '33.4484', 'lon': '-112.0740', 'tz': 'US/Mountain'}, {'name': 'San Francisco', 'lat': '37.7749', 'lon': '-122.4194', 'tz': 'US/Pacific'}, {'name': 'Seattle', 'lat': '47.6062', 'lon': '-122.3321', 'tz': 'US/Pacific'} ] def main(): # Command line arguments parser = argparse.ArgumentParser(description='Generate sunrise data') parser.add_argument('-y', '--year', type=int, nargs='?', default=datetime.now().year, help='The year to start generating sunrise data') parser.add_argument('-n', '--years', type=int, nargs='?', default=1, help='The number of years of sunrise data to generate') args = parser.parse_args() # Initialize the ephem observer observer = ephem.Observer() observer.pressure = 0.0 # Generate the sunrise data city by city data = [] timedelta_day = timedelta(days=1) for city in CITIES: city_tz = pytz.timezone(city['tz']) # Update the observer's location observer.lat = city['lat'] observer.lon = city['lon'] # For each day add one sunrise date row noon_utc = datetime(args.year, 1, 1, 12, tzinfo=city_tz).astimezone(timezone.utc) date = noon_utc.replace(year=args.year, month=1, day=1, tzinfo=None) end_utc = datetime(args.year + args.years, 1, 1, tzinfo=city_tz).astimezone(timezone.utc) end_date = end_utc.replace(year=args.year + args.years, month=1, day=1, tzinfo=None) daylight_yesterday = None while date < end_date: # Update the observer's date observer.date = date # Calculate sunrise, sunset (horizon '-0:34') observer.horizon = '-0:34' sunrise = local_time_hours(observer.previous_rising(ephem.Sun()).datetime(), city_tz) # pylint: disable=no-member sunset = local_time_hours(observer.next_setting(ephem.Sun()).datetime(), city_tz) # pylint: disable=no-member # Calculate civil twilight (horizon @ -6) observer.horizon = '-6' twilight_rise_dt = observer.previous_rising(ephem.Sun(), use_center=True).datetime() # pylint: disable=no-member twilight_set_dt = observer.next_setting(ephem.Sun(), use_center=True).datetime() # pylint: disable=no-member twilight_rise = local_time_hours(twilight_rise_dt, city_tz) twilight_set = local_time_hours(twilight_set_dt, city_tz) # Compute daylight daylight = (twilight_set_dt - twilight_rise_dt).total_seconds() / (60 * 60) daylight_change = (daylight - daylight_yesterday) * 60 if daylight_yesterday is not None else None # Create the sunrise data row data.append({ 'City': city['name'], 'Date': date.strftime('%Y-%m-%d'), 'Sunrise': round(sunrise, 3), 'Sunset': round(sunset, 3), 'TwilightRise': round(twilight_rise, 3), 'TwilightSet': round(twilight_set, 3), 'Daylight': round(daylight, 3), 'DaylightChange': round(daylight_change, 3) if daylight_change is not None else 'null' }) # Next day date = date + timedelta_day daylight_yesterday = daylight # Write the CSV writer = csv.DictWriter(sys.stdout, [ 'City', 'Date', 'Sunrise', 'Sunset', 'TwilightRise', 'TwilightSet', 'Daylight', 'DaylightChange' ]) writer.writeheader() for row in data: writer.writerow(row) # Helper function to compute the local time in hours (0-24) def local_time_hours(naive_gmt_dt, local_tz): local_dt = naive_gmt_dt.replace(tzinfo=timezone.utc).astimezone(local_tz) return local_dt.hour + (local_dt.minute + (local_dt.second + local_dt.microsecond / 1000000) / 60) / 60 ###################################################################### if __name__ == '__main__': main()
StarcoderdataPython
1693004
# -*- coding: utf-8 -*- """ @author: WZM @time: 2021/1/2 17:52 @function: 测试模型精度 """ from net.ouy_net import Network import numpy as np import torch import os def load_net(fname, net): import h5py h5f = h5py.File(fname, mode='r') for k, v in net.state_dict().items(): param = torch.from_numpy(np.asarray(h5f[k])) v.copy_(param) def evaluate_model(trained_model, data_loader, index): net = Network(index) load_net(trained_model, net) device = torch.device('cuda:0') if torch.cuda.is_available(): net = net.to(device) net.eval() count = 0 total = 0 lableresultpath = trained_model.replace(".h5", ".txt") if os.path.exists(lableresultpath): os.remove(lableresultpath) valid_loss = 0.0 for blob in data_loader: im_data = blob[0] dem_data = blob[2] img_data = blob[1] gt_data = blob[3].reshape((blob[3].shape[0], 1)) index = 61 pre_label = net(im_data, dem_data, img_data, index, gt_data) pre_label = pre_label.data.cpu().numpy() valid_loss += net.loss.item() label = pre_label.argmax(axis=1).flatten() num = len(label) for i in range(0, num): if gt_data[i] == label[i]: count = count + 1 total = total + 1 return 1.0 * count / total, valid_loss def evaluate_model1(net, data_loader, index): device = torch.device('cuda:0') if torch.cuda.is_available(): net = net.to(device) net.eval() count = 0 total = 0 # lableresultpath = trained_model.replace(".h5", ".txt") # if os.path.exists(lableresultpath): # os.remove(lableresultpath) valid_loss = 0.0 for blob in data_loader: im_data = blob[0] dem_data = blob[2] img_data = blob[1] gt_data = blob[3].reshape((blob[3].shape[0], 1)) index = 61 with torch.no_grad(): pre_label = net(im_data, dem_data, img_data, index, gt_data) pre_label = pre_label.data.cpu().numpy() valid_loss += net.loss.item() label = pre_label.argmax(axis=1).flatten() num = len(label) for i in range(0, num): if gt_data[i] == label[i]: count = count + 1 total = total + 1 return 1.0 * count / total, valid_loss
StarcoderdataPython
3209547
import warnings warnings.filterwarnings('ignore', category=FutureWarning) from flask import abort, render_template, Flask import logging import db APP = Flask(__name__) # Start page @APP.route('/') def index(): stats = {} x = db.execute('SELECT COUNT(*) AS movies FROM MOVIE').fetchone() stats.update(x) x = db.execute('SELECT COUNT(*) AS actors FROM ACTOR').fetchone() stats.update(x) x = db.execute('SELECT COUNT(*) AS streams FROM STREAM').fetchone() stats.update(x) logging.info(stats) return render_template('index.html',stats=stats) # Initialize db # It assumes a script called db.sql is stored in the sql folder @APP.route('/init/') def init(): return render_template('init.html', init=db.init()) # Movies @APP.route('/movies/') def list_movies(): movies = db.execute( ''' SELECT MovieId, Title, Year, Duration FROM MOVIE ORDER BY Title ''').fetchall() return render_template('movie-list.html', movies=movies) @APP.route('/movies/<int:id>/') def get_movie(id): movie = db.execute( ''' SELECT MovieId, Title, Year, Duration FROM MOVIE WHERE movieId = %s ''', id).fetchone() if movie is None: abort(404, 'Movie id {} does not exist.'.format(id)) genres = db.execute( ''' SELECT GenreId, Label FROM MOVIE_GENRE NATURAL JOIN GENRE WHERE movieId = %s ORDER BY Label ''', id).fetchall() actors = db.execute( ''' SELECT ActorId, Name FROM MOVIE_ACTOR NATURAL JOIN ACTOR WHERE MovieId = %s ORDER BY Name ''', id).fetchall() streams = db.execute( ''' SELECT StreamId, StreamDate FROM STREAM WHERE MovieId = %s ORDER BY StreamDate Desc ''', id).fetchall(); return render_template('movie.html', movie=movie, genres=genres, actors=actors, streams=streams) @APP.route('/movies/search/<expr>/') def search_movie(expr): search = { 'expr': expr } expr = '%' + expr + '%' movies = db.execute( ''' SELECT MovieId, Title FROM MOVIE WHERE Title LIKE %s ''', expr).fetchall() return render_template('movie-search.html', search=search,movies=movies) # Actors @APP.route('/actors/') def list_actors(): actors = db.execute(''' SELECT ActorId, Name FROM Actor ORDER BY Name ''').fetchall() return render_template('actor-list.html', actors=actors) @APP.route('/actors/<int:id>/') def view_movies_by_actor(id): actor = db.execute( ''' SELECT ActorId, Name FROM ACTOR WHERE actorId = %s ''', id).fetchone() if actor is None: abort(404, 'Actor id {} does not exist.'.format(id)) movies = db.execute( ''' SELECT MovieId, Title FROM MOVIE NATURAL JOIN MOVIE_ACTOR WHERE actorId = %s ORDER BY Title ''', id).fetchall() return render_template('actor.html', actor=actor, movies=movies) @APP.route('/actors/search/<expr>/') def search_actor(expr): search = { 'expr': expr } # SQL INJECTION POSSIBLE! - avoid this! actors = db.execute( ' SELECT ActorId, Name' ' FROM ACTOR ' ' WHERE NAME LIKE \'%' + expr + '%\'' ).fetchall() return render_template('actor-search.html', search=search,actors=actors) # Genres @APP.route('/genres/') def list_genres(): genres = db.execute(''' SELECT GenreId, Label FROM GENRE ORDER BY Label ''').fetchall() return render_template('genre-list.html', genres=genres) @APP.route('/genres/<int:id>/') def view_movies_by_genre(id): genre = db.execute( ''' SELECT GenreId, Label FROM GENRE WHERE GenreId = %s ''', id).fetchone() if genre is None: abort(404, 'Genre id {} does not exist.'.format(id)) movies = db.execute( ''' SELECT MovieId, Title FROM MOVIE NATURAL JOIN MOVIE_GENRE WHERE GenreId = %s ORDER BY Title ''', id).fetchall() return render_template('genre.html', genre=genre, movies=movies) # Streams @APP.route('/streams/<int:id>/') def get_stream(id): stream = db.execute( ''' SELECT StreamId, StreamDate, Charge, MovieId, Title, CustomerId, Name FROM STREAM NATURAL JOIN MOVIE NATURAL JOIN CUSTOMER WHERE StreamId = %s ''', id).fetchone() if stream is None: abort(404, 'Stream id {} does not exist.'.format(id)) return render_template('stream.html', stream=stream) # Staff @APP.route('/staff/') def list_staff(): staff = db.execute(''' SELECT S1.StaffId AS StaffId, S1.Name AS Name, S1.Job AS Job, S1.Supervisor AS Supervisor, S2.Name AS SupervisorName FROM STAFF S1 LEFT JOIN STAFF S2 ON(S1.Supervisor = S2.StaffId) ORDER BY S1.Name ''').fetchall() return render_template('staff-list.html', staff=staff) @APP.route('/staff/<int:id>/') def show_staff(id): staff = db.execute( ''' SELECT StaffId, Name, Supervisor, Job FROM STAFF WHERE staffId = %s ''', id).fetchone() if staff is None: abort(404, 'Staff id {} does not exist.'.format(id)) superv={} if not (staff['Supervisor'] is None): superv = db.execute( ''' SELECT Name FROM staff WHERE staffId = %s ''', staff['Supervisor']).fetchone() supervisees = [] supervisees = db.execute( ''' SELECT StaffId, Name from staff where supervisor = %s ORDER BY Name ''',id).fetchall() return render_template('staff.html', staff=staff, superv=superv, supervisees=supervisees)
StarcoderdataPython
1676268
<filename>examples/cv/mnist_lenet5_image_classification_pure_lightning.py # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from dataclasses import dataclass import pytorch_lightning as ptl from omegaconf import DictConfig from nemo.collections.cv.models import MNISTLeNet5, MNISTLeNet5Config from nemo.core.config import Config, TrainerConfig, set_config from nemo.utils import logging @dataclass class AppConfig(Config): """ This is structured config for this application. Args: name: Description of the application. trainer: configuration of the trainer. model: configuation of the model. """ name: str = "Training of a LeNet-5 Model using a pure PyTorchLightning approach - using DDP on 2 GPUs." trainer: TrainerConfig = TrainerConfig(gpus=2, accelerator="ddp") model: MNISTLeNet5Config = MNISTLeNet5Config() @set_config(config=AppConfig) def main(cfg: DictConfig): # Show configuration - user can modify every parameter from command line! logging.info("Application config\n" + cfg.pretty()) # The "model" - with dataloader/dataset inside of it. lenet5 = MNISTLeNet5(cfg.model) # Setup train data loader and optimizer lenet5.setup_training_data() # Setup optimizer and scheduler lenet5.setup_optimization() # Create trainer. trainer = ptl.Trainer(**(cfg.trainer)) # Train. trainer.fit(model=lenet5) if __name__ == "__main__": main() # TODO: No cfg in function call, and no hydra runner
StarcoderdataPython
3360899
<gh_stars>0 ''' DESKRIPSI SOAL Koko membuat N buah tumpukan kayu yang bertempat di sebuah wadah. Kemudian, dia memiringkan wadah tersebut ke kanan. Karena dimiringkan tersebut, bisa jadi banyak kayu yang bergeser ke kanan. Misalkan awalnya Koko mempunyai 4 buah tumpukan, dengan tinggi 3, 2, 1, 2. Setelah dimiringkan ke kanan, posisi akhirnya yaitu 1, 2, 2, 3 (dimulai dari kiri), seperti terlihat pada gambar berikut. (Gambar ada di direktori yang sama dengan nama quizJ.jpg) Jika N sangat banyak, Koko penasaran bagaimana posisi akhirnya. PETUNJUK MASUKAN Input terdiri atas 1 baris yang terdiri atas beberapa angka yang menyatakan tinggi dari masing-masing tumpukan. (tinggi masing-masing tumpukan tidak akan lebih dari 100). Setiap angka dipisahkan dengan spasi PETUNJUK KELUARAN Outputkan posisi akhirnya. CONTOH MASUKAN 3 2 1 2 CONTOH KELUARAN 1 2 2 3 CONTOH MASUKAN 2 3 8 CONTOH KELUARAN 2 3 8 ''' # This is an ascending sorting problem num_list = list(map(int, input().split())) def merge_sort(original_list): original_list_len = len(original_list) if original_list_len == 1: return original_list # Split mid_index = original_list_len // 2 left_list = original_list[:mid_index] right_list = original_list[mid_index:] merge_sort(left_list) merge_sort(right_list) # Merge i = 0 # iterator for left_list j = 0 # iterator for right_list k = 0 # iterator for original_list left_list_len = len(left_list) right_list_len = len(right_list) while i < left_list_len and j < right_list_len: if left_list[i] < right_list[j]: original_list[k] = left_list[i] i += 1 else: original_list[k] = right_list[j] j += 1 k += 1 # Leftover while i < left_list_len: original_list[k] = left_list[i] i += 1 k += 1 while j < right_list_len: original_list[k] = right_list[j] j += 1 k += 1 merge_sort(num_list) num_list_str_list = list(map(str, num_list)) print(' '.join(num_list_str_list))
StarcoderdataPython
3344783
<reponame>freedge/fake-switches # Copyright 2015-2016 Internap. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from fake_switches.netconf import dict_2_etree, XML_ATTRIBUTES, XML_TEXT from hamcrest import assert_that, has_length, has_items, equal_to, is_, is_not, contains_string from ncclient.operations import RPCError from ncclient.xml_ import to_xml from tests import contains_regex from tests.juniper import BaseJuniper, vlan from tests.juniper.assertion_tools import has_xpath from tests.netconf.netconf_protocol_test import xml_equals_to class JuniperBaseProtocolTest(BaseJuniper): test_switch = "juniper" def test_capabilities(self): assert_that(self.nc.server_capabilities, has_items( "urn:ietf:params:xml:ns:netconf:base:1.0", "urn:ietf:params:xml:ns:netconf:capability:candidate:1.0", "urn:ietf:params:xml:ns:netconf:capability:confirmed-commit:1.0", "urn:ietf:params:xml:ns:netconf:capability:validate:1.0", "urn:ietf:params:xml:ns:netconf:capability:url:1.0?protocol=http,ftp,file", "http://xml.juniper.net/netconf/junos/1.0", "http://xml.juniper.net/dmi/system/1.0", )) def test_get_running_config_shows_nothing_by_default(self): result = self.nc.get_config(source="running") conf = result._NCElement__result.xml assert_that(conf, contains_regex( '<configuration xmlns="http://xml.juniper.net/xnm/1.1/xnm" junos:commit-localtime="[^"]*" junos:commit-seconds="[^"]*" junos:commit-user="[^"]*"')) assert_that(result.xpath("data/configuration/*"), has_length(0)) def test_only_configured_interfaces_are_returned(self): self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"description": "I see what you did there!"}]}}) self.nc.commit() result = self.nc.get_config(source="running") assert_that(result.xpath("data/configuration/interfaces/*"), has_length(1)) self.cleanup(reset_interface("ge-0/0/3")) def test_lock_edit_candidate_add_vlan_and_commit(self): with self.nc.locked(target='candidate'): result = self.nc.edit_config(target='candidate', config=dict_2_etree({ "config": { "configuration": { "vlans": { "vlan": { "name": "VLAN2999", } } } }})) assert_that(result.xpath("//rpc-reply/ok"), has_length(1)) result = self.nc.commit() assert_that(result.xpath("//rpc-reply/ok"), has_length(1)) result = self.nc.get_config(source="running") assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(1)) self.edit({ "vlans": { "vlan": { XML_ATTRIBUTES: {"operation": "delete"}, "name": "VLAN2999" } } }) self.nc.commit() result = self.nc.get_config(source="running") assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_locking_fails_if_changes_are_being_made(self): nc2 = self.create_client() try: self.nc.edit_config(target='candidate', config=dict_2_etree({ "config": { "configuration": { "vlans": { "vlan": [ {"name": "VLAN2999"}, {"description": "WHAAT"} ] } } }})) with self.assertRaises(RPCError): with nc2.locked(target='candidate'): self.fail('Should not be able to lock an edited configuration') finally: self.nc.discard_changes() nc2.close_session() def test_double_locking_with_two_sessions(self): nc2 = self.create_client() try: with self.nc.locked(target='candidate'): with self.assertRaises(RPCError): with nc2.locked(target='candidate'): self.fail("The second lock should not have worked.") finally: nc2.close_session() def test_bad_configuration_element(self): with self.assertRaises(RPCError): self.nc.edit_config(target='candidate', config=dict_2_etree({ "config": { "configuration": { "vbleh": "shizzle" } }})) def test_create_vlan(self): self.nc.edit_config(target='candidate', config=dict_2_etree({"config": {"configuration": { "vlans": { "vlan": [ {"name": "VLAN2999"}, {"description": "WHAAT"}, {"vlan-id": "2995"} ] } }}})) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/*"), has_length(1)) assert_that(result.xpath("data/configuration/*"), has_length(1)) assert_that(result.xpath("data/configuration/vlans/*"), has_length(1)) assert_that(result.xpath("data/configuration/vlans/vlan/*"), has_length(3)) vlan2995 = result.xpath("data/configuration/vlans/vlan")[0] assert_that(vlan2995.xpath("name")[0].text, equal_to("VLAN2999")) assert_that(vlan2995.xpath("description")[0].text, equal_to("WHAAT")) assert_that(vlan2995.xpath("vlan-id")[0].text, equal_to("2995")) self.cleanup(vlan("VLAN2999")) def test_vlan_configuration_merging(self): self.edit({ "vlans": { "vlan": [ {"name": "VLAN2999"}, {"vlan-id": "2995"} ]}}) self.edit({ "vlans": { "vlan": [ {"name": "VLAN2999"}, {"description": "shizzle"} ]}}) self.nc.commit() self.edit({ "vlans": { "vlan": [ {"name": "VLAN2999"}, {"vlan-id": "2996"}, {"description": {XML_ATTRIBUTES: {"operation": "delete"}}} ]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(1)) vlan2995 = result.xpath("data/configuration/vlans/vlan")[0] assert_that(vlan2995.xpath("name")[0].text, equal_to("VLAN2999")) assert_that(vlan2995.xpath("description"), has_length(0)) assert_that(vlan2995.xpath("vlan-id")[0].text, equal_to("2996")) self.cleanup(vlan("VLAN2999")) def test_deletion_errors(self): self.edit({ "vlans": { "vlan": [ {"name": "VLAN2999"}, {"vlan-id": "2995"}]}}) with self.assertRaises(RPCError): self.edit({ "vlans": { "vlan": { "name": "VLAN3000", XML_ATTRIBUTES: {"operation": "delete"}}}}) with self.assertRaises(RPCError): self.edit({ "vlans": { "vlan": [ {"name": "VLAN2999"}, {"description": {XML_ATTRIBUTES: {"operation": "delete"}}} ]}}) self.nc.commit() with self.assertRaises(RPCError): self.edit({ "vlans": { "vlan": { "name": "VLAN3000", XML_ATTRIBUTES: {"operation": "delete"}}}}) with self.assertRaises(RPCError): self.edit({ "vlans": { "vlan": [ {"name": "VLAN2999"}, {"description": {XML_ATTRIBUTES: {"operation": "delete"}}} ]}}) self.cleanup(vlan("VLAN2999")) def test_access_mode(self): self.edit({ "vlans": { "vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access", "vlan": [ {"members": "2995"}, ]}}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("name")[0].text, equal_to("ge-0/0/3")) assert_that(int003.xpath("unit/family/ethernet-switching/*"), has_length(2)) assert_that(int003.xpath("unit/family/ethernet-switching/port-mode")[0].text, equal_to("access")) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members"), has_length(1)) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members")[0].text, equal_to("2995")) self.cleanup(vlan("VLAN2995"), reset_interface("ge-0/0/3")) def test_assigning_unknown_vlan_raises(self): self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "vlan": {"members": "2000"}}}}]}]}}) with self.assertRaises(RPCError): self.nc.commit() def test_assigning_unknown_vlan_in_a_range_raises(self): self.edit({ "vlans": { "vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "vlan": {"members": "2995-2996"}}}}]}]}}) with self.assertRaises(RPCError): self.nc.commit() def test_assigning_unknown_native_vlan_raises(self): self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "native-vlan-id": "2000"}}}]}]}}) with self.assertRaises(RPCError): self.nc.commit() def test_trunk_mode_allows_no_vlan_members(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, {"vlan": [ {"name": "VLAN2996"}, {"vlan-id": "2996"}]}, {"vlan": [ {"name": "VLAN2997"}, {"vlan-id": "2997"}]}, ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"native-vlan-id": "2996"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk" }}}]}]}}) self.nc.commit() self.cleanup(vlan("VLAN2995"), vlan("VLAN2996"), vlan("VLAN2997"), reset_interface("ge-0/0/3")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_trunk_mode(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, {"vlan": [ {"name": "VLAN2996"}, {"vlan-id": "2996"}]}, {"vlan": [ {"name": "VLAN2997"}, {"vlan-id": "2997"}]}, ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "native-vlan-id": "2996", "vlan": [ {"members": "2995"}, {"members": "2997"}, ]}}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("name")[0].text, equal_to("ge-0/0/3")) assert_that(int003.xpath("unit/family/ethernet-switching/*"), has_length(3)) assert_that(int003.xpath("unit/family/ethernet-switching/port-mode")[0].text, equal_to("trunk")) assert_that(int003.xpath("unit/family/ethernet-switching/native-vlan-id")[0].text, equal_to("2996")) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members"), has_length(2)) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members")[0].text, equal_to("2995")) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members")[1].text, equal_to("2997")) self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "vlan": [ {"members": {XML_TEXT: "2995", XML_ATTRIBUTES: {"operation": "delete"}}}, ]}}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members"), has_length(1)) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members")[0].text, equal_to("2997")) self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "vlan": [ {"members": {XML_TEXT: "2997", XML_ATTRIBUTES: {"operation": "delete"}}}, ]}}}]}]}}) self.nc.commit() self.cleanup(vlan("VLAN2995"), vlan("VLAN2996"), vlan("VLAN2997"), reset_interface("ge-0/0/3")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_interface_trunk_native_vlan_merge(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, {"vlan": [ {"name": "VLAN2996"}, {"vlan-id": "2996"}]}, {"vlan": [ {"name": "VLAN2997"}, {"vlan-id": "2997"}]}, ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "native-vlan-id": "2995", "vlan": [ {"members": "2997"}, ]}}}]}]}}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "native-vlan-id": "2996", "vlan": [ {"members": "2997"}, ]}}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("unit/family/ethernet-switching/native-vlan-id")[0].text, equal_to("2996")) self.cleanup(vlan("VLAN2995"), vlan("VLAN2996"), vlan("VLAN2997"), reset_interface("ge-0/0/3")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_interface_set_trunk_native_vlan_then_set_members_after(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, {"vlan": [ {"name": "VLAN2996"}, {"vlan-id": "2996"}]}, {"vlan": [ {"name": "VLAN2997"}, {"vlan-id": "2997"}]}, ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk" }}}]}]}}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "native-vlan-id": "2995" }}}]}]}}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "vlan": [ {"members": "2997"}, ]}}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("unit/family/ethernet-switching/native-vlan-id")[0].text, equal_to("2995")) self.cleanup(vlan("VLAN2995"), vlan("VLAN2996"), vlan("VLAN2997"), reset_interface("ge-0/0/3")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_passing_from_trunk_mode_to_access_gets_rid_of_stuff_in_trunk_mode(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN1100"}, {"vlan-id": "1100"}]}, {"vlan": [ {"name": "VLAN1200"}, {"vlan-id": "1200"}]}, {"vlan": [ {"name": "VLAN1300"}, {"vlan-id": "1300"}]}, {"vlan": [ {"name": "VLAN1400"}, {"vlan-id": "1400"}]}, ]}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk" }}}]}]}}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "native-vlan-id": "1200" }}}]}]}}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "vlan": [ {"members": "1100"}, {"members": "1300"}, {"members": "1400"}, ]}}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("unit/family/ethernet-switching/port-mode")[0].text, equal_to("trunk")) assert_that(int003.xpath("unit/family/ethernet-switching/native-vlan-id")[0].text, equal_to("1200")) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members"), has_length(3)) self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access" }}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("unit/family/ethernet-switching/port-mode")[0].text, equal_to("access")) assert_that(int003.xpath("unit/family/ethernet-switching/native-vlan-id"), has_length(0)) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members"), has_length(0)) self.cleanup(vlan("VLAN1100"), vlan("VLAN1200"), vlan("VLAN1300"), vlan("VLAN1400"), reset_interface("ge-0/0/3")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_display_interface_with_description_and_trunk_native_vlan(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, {"vlan": [ {"name": "VLAN2996"}, {"vlan-id": "2996"}]}, {"vlan": [ {"name": "VLAN2997"}, {"vlan-id": "2997"}]}, ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"description": "I see what you did there!"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "native-vlan-id": "2996", "vlan": [ {"members": "2995"}, {"members": "2997"}, ]}}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("name")[0].text, equal_to("ge-0/0/3")) assert_that(int003.xpath("description")[0].text, equal_to("I see what you did there!")) assert_that(int003.xpath("unit/family/ethernet-switching/vlan/members")), has_length(2) members = int003.xpath("unit/family/ethernet-switching/vlan/members") assert_that(members[0].text, equal_to("2995")) assert_that(members[1].text, equal_to("2997")) assert_that(int003.xpath("unit/family/ethernet-switching/native-vlan-id")[0].text, equal_to("2996")) self.cleanup(vlan("VLAN2995"), vlan("VLAN2996"), vlan("VLAN2997"), reset_interface("ge-0/0/3")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_display_interface_trunk_native_vlan_and_no_vlan_members_or_trunk_mode(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2996"}, {"vlan-id": "2996"}]} ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "native-vlan-id": "2996" }}}]}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/3"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int003 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int003.xpath("name")[0].text, equal_to("ge-0/0/3")) assert_that(int003.xpath("unit/family/ethernet-switching/native-vlan-id")[0].text, equal_to("2996")) self.cleanup(vlan("VLAN2996"), reset_interface("ge-0/0/3")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"vlans": {}}} })) assert_that(result.xpath("data/configuration/vlans/vlan"), has_length(0)) def test_set_spanning_tree_options(self): self.edit({ "protocols": { "rstp": { "interface": [ {"name": "ge-0/0/3"}, {"edge": ""}, {"no-root-port": ""}]}}, "interfaces": [ _access_interface("ge-0/0/3") ]}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"protocols": {"rstp": {"interface": {"name": "ge-0/0/3.0"}}}}} })) assert_that(result.xpath("data/configuration/protocols/rstp/interface"), has_length(1)) interface = result.xpath("data/configuration/protocols/rstp/interface")[0] assert_that(interface, has_length(3)) assert_that(interface.xpath("name")[0].text, equal_to("ge-0/0/3.0")) assert_that(interface.xpath("edge"), has_length(1)) assert_that(interface.xpath("no-root-port"), has_length(1)) self.edit({ "protocols": { "rstp": { "interface": { XML_ATTRIBUTES: {"operation": "delete"}, "name": "ge-0/0/3"}}}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"protocols": ""}} })) assert_that(result.xpath("data/configuration/protocols"), has_length(0)) self.cleanup(reset_interface("ge-0/0/3")) def test_deleting_spanning_tree_options(self): self.edit({ "protocols": { "rstp": { "interface": [ {"name": "ge-0/0/3"}, {"edge": ""}, {"no-root-port": ""}]}}, "interfaces": [ _access_interface("ge-0/0/3") ]}) self.nc.commit() self.edit({ "protocols": { "rstp": { "interface": [ {"name": "ge-0/0/3"}, {"edge": {XML_ATTRIBUTES: {"operation": "delete"}}}, {"no-root-port": {XML_ATTRIBUTES: {"operation": "delete"}}}]}}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"protocols": {"rstp": {"interface": {"name": "ge-0/0/3.0"}}}}} })) assert_that(result.xpath("data/configuration/protocols/rstp/interface"), has_length(0)) self.cleanup(reset_interface("ge-0/0/3")) def test_deleting_spanning_tree_options_on_unconfigured_bond_does_nothing(self): self.edit({ "protocols": { "rstp": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ae2"}]}}}) self.nc.commit() def test_set_lldp(self): self.edit({ "protocols": { "lldp": { "interface": [ {"name": "ge-0/0/3"}, {"disable": ""}]}}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"protocols": {"lldp": {"interface": {"name": "ge-0/0/3.0"}}}}} })) assert_that(result.xpath("data/configuration/protocols/lldp/interface"), has_length(1)) interface = result.xpath("data/configuration/protocols/lldp/interface")[0] assert_that(interface, has_length(2)) assert_that(interface.xpath("name")[0].text, equal_to("ge-0/0/3.0")) assert_that(len(interface.xpath("disable")), equal_to(1)) self.edit({ "protocols": { "lldp": { "interface": [ {"name": "ge-0/0/3"}, {"disable": {XML_ATTRIBUTES: {"operation": "delete"}}}]}}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"protocols": {"lldp": {"interface": {"name": "ge-0/0/3.0"}}}}} })) assert_that(result.xpath("data/configuration/protocols/lldp/interface")[0], has_length(1)) self.edit({ "protocols": { "lldp": { "interface": { XML_ATTRIBUTES: {"operation": "delete"}, "name": "ge-0/0/3"}}}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"protocols": ""}} })) assert_that(result.xpath("data/configuration/protocols"), has_length(0)) def test_lldp_on_unconfigured_bond_works(self): self.edit({ "protocols": { "lldp": { "interface": [ {"name": "ae3"}, {"disable": ""}]}}}) self.nc.commit() self.edit({ "protocols": { "lldp": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ae3"}]}}}) self.nc.commit() def test_deleting_lldp_on_unconfigured_bond_does_nothing(self): self.edit({ "protocols": { "lldp": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ae2"}]}}}) self.nc.commit() def test_lldp_is_not_affected_by_the_deletion_of_the_interface(self): self.edit({ "protocols": { "lldp": { "interface": [ {"name": "ge-0/0/3"}, {"disable": ""}]}}}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ge-0/0/3"}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"protocols": {"lldp": {"interface": {"name": "ge-0/0/3.0"}}}}} })) assert_that(result.xpath("data/configuration/protocols/lldp/interface"), has_length(1)) self.cleanup(reset_interface("ge-0/0/3")) def test_set_interface_description(self): self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/2"}, {"description": "Hey there beautiful"}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/2"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(1)) int002 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int002.xpath("name")[0].text, equal_to("ge-0/0/2")) assert_that(int002.xpath("description")[0].text, equal_to("Hey there beautiful")) self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/2"}, {"description": {XML_ATTRIBUTES: {"operation": "delete"}}}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/2"}}}} })) assert_that(result.xpath("data/configuration/interfaces/interface"), has_length(0)) def test_set_interface_raises_on_physical_interface_with_bad_trailing_input(self): with self.assertRaises(RPCError) as exc: self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/43foobar"}, {"ether-options": { "auto-negotiation": {}}} ]}}) assert_that(str(exc.exception), contains_string("invalid trailing input 'foobar' in 'ge-0/0/43foobar'")) def test_set_interface_raises_for_physical_interface_for_out_of_range_port(self): with self.assertRaises(RPCError) as exc: self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/128"}, {"ether-options": { "auto-negotiation": {}}} ]}}) assert_that(str(exc.exception), contains_string("port value outside range 1..127 for '128' in 'ge-0/0/128'")) def test_set_interface_raises_on_aggregated_invalid_interface_type(self): with self.assertRaises(RPCError) as exc: self.edit({ "interfaces": { "interface": [ {"name": "ae34foobar345"}, {"ether-options": { "auto-negotiation": {}}} ]}}) assert_that(str(exc.exception), contains_string("invalid interface type in 'ae34foobar345'")) def test_set_interface_raises_on_aggregated_out_of_range_port(self): with self.assertRaises(RPCError) as exc: self.edit({ "interfaces": { "interface": [ {"name": "ae34345"}, {"aggregated-ether-options": { "link-speed": "10g"}} ]}}) assert_that(str(exc.exception), contains_string("device value outside range 0..127 for '34345' in 'ae34345'")) def test_set_interface_disabling(self): self.edit({"interfaces": {"interface": [{"name": "ge-0/0/2"}, {"disable": ""}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/2"}}}}})) int002 = result.xpath("data/configuration/interfaces/interface")[0] assert_that(int002.xpath("disable"), has_length(1)) self.edit({"interfaces": { "interface": [{"name": "ge-0/0/2"}, {"disable": {XML_ATTRIBUTES: {"operation": "delete"}}}]}}) self.nc.commit() result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/2"}}}}})) assert_that(result.xpath("data/configuration/interfaces"), has_length(0)) def test_set_interface_enabling_already_enabled(self): with self.assertRaises(RPCError) as exc: self.edit({"interfaces": { "interface": [{"name": "ge-0/0/2"}, {"disable": {XML_ATTRIBUTES: {"operation": "delete"}}}]}}) self.nc.commit() assert_that(str(exc.exception), is_("statement not found: ")) def test_create_aggregated_port(self): self.edit({ "interfaces": { "interface": [ {"name": "ae1"}, {"description": "This is a Greg hated"}]}}) self.nc.commit() ae1 = self.get_interface("ae1") assert_that(ae1.xpath("*"), has_length(2)) assert_that(ae1.xpath("description")[0].text, is_("This is a Greg hated")) self.edit({ "interfaces": { "interface": [ {"name": "ae1"}, {"description": {XML_ATTRIBUTES: {"operation": "delete"}}}, {"aggregated-ether-options": { "link-speed": "10g", "lacp": { "active": {}, "periodic": "slow"}}}]}}) self.nc.commit() ae1 = self.get_interface("ae1") assert_that(ae1.xpath("*"), has_length(2)) assert_that(ae1.xpath("aggregated-ether-options/*"), has_length(2)) assert_that(ae1.xpath("aggregated-ether-options/link-speed")[0].text, is_("10g")) assert_that(ae1.xpath("aggregated-ether-options/lacp/*"), has_length(2)) assert_that(ae1.xpath("aggregated-ether-options/lacp/active"), has_length(1)) assert_that(ae1.xpath("aggregated-ether-options/lacp/periodic")[0].text, is_("slow")) self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, {"vlan": [ {"name": "VLAN2997"}, {"vlan-id": "2997"}]}, ], "interfaces": { "interface": [ {"name": "ae1"}, {"aggregated-ether-options": { "link-speed": {XML_ATTRIBUTES: {"operation": "delete"}}, "lacp": { "active": {XML_ATTRIBUTES: {"operation": "delete"}}, "periodic": "slow"}}}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "vlan": [ {"members": "2995"}, {"members": "2997"}]}}}]}]}}) self.nc.commit() ae1 = self.get_interface("ae1") assert_that(ae1.xpath("*"), has_length(3)) assert_that(ae1.xpath("aggregated-ether-options/*"), has_length(1)) assert_that(ae1.xpath("aggregated-ether-options/lacp/periodic")[0].text, is_("slow")) assert_that(ae1.xpath("unit/family/ethernet-switching/vlan/members"), has_length(2)) self.cleanup(vlan("VLAN2995"), vlan("VLAN2997"), reset_interface("ae1")) result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ae1"}}}}})) assert_that(result.xpath("configuration/interfaces"), has_length(0)) def test_auto_negotiation_and_no_auto_negotiation_are_mutually_exclusive(self): self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "auto-negotiation": {}}}]}]}) self.nc.commit() ge001 = self.get_interface("ge-0/0/1") assert_that(ge001.xpath("ether-options/auto-negotiation"), has_length(1)) assert_that(ge001.xpath("ether-options/no-auto-negotiation"), has_length(0)) self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "no-auto-negotiation": {}}}]}]}) self.nc.commit() ge001 = self.get_interface("ge-0/0/1") assert_that(ge001.xpath("ether-options/auto-negotiation"), has_length(0)) assert_that(ge001.xpath("ether-options/no-auto-negotiation"), has_length(1)) self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "no-auto-negotiation": {XML_ATTRIBUTES: {"operation": "delete"}}}}]}]}) self.nc.commit() assert_that(self.get_interface("ge-0/0/1"), is_(None)) def test_posting_delete_on_both_auto_negotiation_flags_delete_and_raises(self): with self.assertRaises(RPCError) as expect: self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "auto-negotiation": {XML_ATTRIBUTES: {"operation": "delete"}}, "no-auto-negotiation": {XML_ATTRIBUTES: {"operation": "delete"}}}}]}]}) assert_that(str(expect.exception), contains_string("warning: statement not found: no-auto-negotiation")) assert_that(str(expect.exception), contains_string("warning: statement not found: auto-negotiation")) with self.assertRaises(RPCError) as expect: self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "auto-negotiation": {}, "no-auto-negotiation": {}}}]}]}) assert_that(str(expect.exception), contains_string("syntax error")) self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "auto-negotiation": {}}}]}]}) self.nc.commit() with self.assertRaises(RPCError) as expect: self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "auto-negotiation": {XML_ATTRIBUTES: {"operation": "delete"}}, "no-auto-negotiation": {XML_ATTRIBUTES: {"operation": "delete"}}}}]}]}) self.nc.commit() assert_that(str(expect.exception), contains_string("warning: statement not found: no-auto-negotiation")) assert_that(str(expect.exception), is_not(contains_string("warning: statement not found: auto-negotiation"))) assert_that(self.get_interface("ge-0/0/1"), is_(None)) def test_assign_port_to_aggregated_interface(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, ], "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access"}}}]}]}, {"interface": [ {"name": "ge-0/0/2"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access"}}}]}]}, ]}) self.nc.commit() self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, ], "interfaces": [ {"interface": [ {"name": "ae1"}, {"aggregated-ether-options": { "link-speed": "10g", "lacp": { "active": {}, "periodic": "slow"}}}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "vlan": [ {"members": "2995"}]}}}]}]}, {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "auto-negotiation": {}, "speed": {"ethernet-10g": {}}, "ieee-802.3ad": {"bundle": "ae1"}}}, {"unit": {XML_ATTRIBUTES: {"operation": "delete"}}}]}, {"interface": [{XML_ATTRIBUTES: {"operation": "replace"}}, {"name": "ge-0/0/2"}, {"ether-options": { "speed": {"ethernet-10g": {}}, "ieee-802.3ad": {"bundle": "ae1"}}}]}, ]}) self.nc.commit() ge001 = self.get_interface("ge-0/0/1") assert_that(ge001.xpath("*"), has_length(2)) assert_that(ge001.xpath("unit"), has_length(0)) assert_that(ge001.xpath("ether-options/*"), has_length(3)) assert_that(ge001.xpath("ether-options/auto-negotiation"), has_length(1)) assert_that(ge001.xpath("ether-options/speed/ethernet-10g"), has_length(1)) assert_that(ge001.xpath("ether-options/ieee-802.3ad/bundle")[0].text, is_("ae1")) ge002 = self.get_interface("ge-0/0/2") assert_that(ge002.xpath("*"), has_length(2)) assert_that(ge002.xpath("unit"), has_length(0)) assert_that(ge002.xpath("ether-options/*"), has_length(2)) assert_that(ge002.xpath("ether-options/speed/ethernet-10g"), has_length(1)) assert_that(ge002.xpath("ether-options/ieee-802.3ad/bundle")[0].text, is_("ae1")) self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "auto-negotiation": {XML_ATTRIBUTES: {"operation": "delete"}}, "speed": "10g", "ieee-802.3ad": {XML_ATTRIBUTES: {"operation": "delete"}}}}]}, {"interface": [ {"name": "ge-0/0/2"}, {"ether-options": {XML_ATTRIBUTES: {"operation": "delete"}}}]}, ]}) self.nc.commit() ge001 = self.get_interface("ge-0/0/1") assert_that(ge001.xpath("unit"), has_length(0)) assert_that(ge001.xpath("ether-options/*"), has_length(1)) assert_that(ge001.xpath("ether-options/speed/ethernet-10g"), has_length(1)) ge002 = self.get_interface("ge-0/0/2", ) assert_that(ge002, is_(None)) self.cleanup(vlan("VLAN2995"), reset_interface("ae1"), reset_interface("ge-0/0/1"), reset_interface("ge-0/0/2")) def test_compare_configuration(self): result = self.nc.compare_configuration() output = result.xpath("configuration-information/configuration-output")[0] assert_that(output.text, is_not("There were some changes")) self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN2995"}, {"vlan-id": "2995"}]}, ]}) result = self.nc.compare_configuration() output = result.xpath("configuration-information/configuration-output")[0] assert_that(output.text, is_("There were some changes")) self.nc.commit() result = self.nc.compare_configuration() output = result.xpath("configuration-information/configuration-output")[0] assert_that(output.text, is_not("There were some changes")) self.cleanup(vlan("VLAN2995")) def test_discard_trunk_members_really_works(self): self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN10"}, {"vlan-id": "10"}]}, ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "vlan": [ {"members": "10"}, ]}}}]}]}}) self.nc.commit() before = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": [{"interfaces": {}}, {"vlans": {}}]} })) self.edit({ "vlans": [ {"vlan": [ {"name": "VLAN11"}, {"vlan-id": "11"}]}, ], "interfaces": { "interface": [ {"name": "ge-0/0/3"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk", "vlan": [ {"members": "11"}, ]}}}]}]}}) self.nc.discard_changes() after = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": [{"interfaces": {}}, {"vlans": {}}]} })) assert_that(to_xml(before.xpath("data/configuration/vlans")[0]), xml_equals_to(after.xpath("data/configuration/vlans")[0])) assert_that(to_xml(before.xpath("data/configuration/interfaces")[0]), xml_equals_to(after.xpath("data/configuration/interfaces")[0])) self.cleanup(vlan("VLAN10"), reset_interface("ge-0/0/3")) def test_replace_port_with_nothing_leaves_configuration_empty(self): self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access"}}}]}]}, ]}) self.nc.commit() self.edit({ "interfaces": [ {"interface": [{XML_ATTRIBUTES: {"operation": "replace"}}, {"name": "ge-0/0/1"}, ]} ]}) self.nc.commit() get_interface_reply = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/1"}}}}})) assert_that(get_interface_reply.xpath("data/configuration"), has_length(0)) self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"disable": ""}]}]}) self.nc.commit() get_interface_reply = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/1"}}}}})) assert_that(get_interface_reply.xpath("data/configuration"), has_length(1)) self.cleanup(reset_interface("ge-0/0/1")) def test_delete_port_leaves_configuration_empty(self): self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access"}}}]}]}, ]}) self.nc.commit() self.edit({ "interfaces": [ {"interface": [{XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ge-0/0/1"}, ]} ]}) self.nc.commit() get_interface_reply = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/1"}}}}})) assert_that(get_interface_reply.xpath("data/configuration"), has_length(0)) self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"disable": ""}]}]}) self.nc.commit() get_interface_reply = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": "ge-0/0/1"}}}}})) assert_that(get_interface_reply.xpath("data/configuration"), has_length(1)) self.cleanup(reset_interface("ge-0/0/1")) def test_operational_request_unknown_fails(self): with self.assertRaises(RPCError): self.nc.rpc(dict_2_etree({ "waaaat": {}})) with self.assertRaises(RPCError): self.nc.rpc(dict_2_etree({ "get-interface-information": { "wrong-param": {}}})) def test_operational_request_get_interface_information_terse(self): self.edit({ "interfaces": [ {"interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ge-0/0/1"}]}, {"interface": [ {"name": "ge-0/0/2"}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access"}}}]}]}, {"interface": [ {"name": "ge-0/0/3"}, {"ether-options": { "ieee-802.3ad": {"bundle": "ae1"}}}, {"unit": {XML_ATTRIBUTES: {"operation": "delete"}}}]}, {"interface": [ {"name": "ge-0/0/4"}, {"disable": ""}]}, {"interface": [ {"name": "ae3"}, {"aggregated-ether-options": { "lacp": { "active": {}, "periodic": "slow"}}}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "trunk"}}}]}]}, ]}) self.nc.commit() terse = self.nc.rpc(dict_2_etree({ "get-interface-information": { "terse": {}}})) assert_that(terse.xpath("interface-information/physical-interface"), has_length(8)) # 4 physical 4 bonds deleted_interface = terse.xpath("interface-information/physical-interface/name[contains(text(),'\nge-0/0/1\n')]/..")[0] assert_that(deleted_interface.xpath("*"), has_length(3)) assert_that(deleted_interface.xpath("admin-status")[0].text, is_("\nup\n")) assert_that(deleted_interface.xpath("oper-status")[0].text, is_("\ndown\n")) access_mode_interface = terse.xpath("interface-information/physical-interface/name[contains(text(),'\nge-0/0/2\n')]/..")[0] assert_that(access_mode_interface.xpath("*"), has_length(4)) assert_that(access_mode_interface.xpath("admin-status")[0].text, is_("\nup\n")) assert_that(access_mode_interface.xpath("oper-status")[0].text, is_("\ndown\n")) assert_that(access_mode_interface.xpath("logical-interface/*"), has_length(5)) assert_that(access_mode_interface.xpath("logical-interface/name")[0].text, is_("\nge-0/0/2.0\n")) assert_that(access_mode_interface.xpath("logical-interface/admin-status")[0].text, is_("\nup\n")) assert_that(access_mode_interface.xpath("logical-interface/oper-status")[0].text, is_("\ndown\n")) assert_that(access_mode_interface.xpath("logical-interface/filter-information"), has_length(1)) assert_that(access_mode_interface.xpath("logical-interface/filter-information/*"), has_length(0)) assert_that(access_mode_interface.xpath("logical-interface/address-family/*"), has_length(1)) assert_that(access_mode_interface.xpath("logical-interface/address-family/address-family-name")[0].text, is_("\neth-switch\n")) bond_member_interface = terse.xpath("interface-information/physical-interface/name[contains(text(),'\nge-0/0/3\n')]/..")[0] assert_that(bond_member_interface.xpath("*"), has_length(3)) assert_that(bond_member_interface.xpath("admin-status")[0].text, is_("\nup\n")) assert_that(bond_member_interface.xpath("oper-status")[0].text, is_("\ndown\n")) disabled_interface = terse.xpath("interface-information/physical-interface/name[contains(text(),'\nge-0/0/4\n')]/..")[0] assert_that(disabled_interface.xpath("admin-status")[0].text, is_("\ndown\n")) inactive_bond = terse.xpath("interface-information/physical-interface/name[contains(text(),'\nae1\n')]/..")[0] assert_that(inactive_bond.xpath("*"), has_length(3)) assert_that(inactive_bond.xpath("admin-status")[0].text, is_("\nup\n")) assert_that(inactive_bond.xpath("oper-status")[0].text, is_("\ndown\n")) active_bond = terse.xpath("interface-information/physical-interface/name[contains(text(),'\nae3\n')]/..")[0] assert_that(active_bond.xpath("*"), has_length(4)) assert_that(active_bond.xpath("admin-status")[0].text, is_("\nup\n")) assert_that(active_bond.xpath("oper-status")[0].text, is_("\ndown\n")) assert_that(active_bond.xpath("logical-interface/*"), has_length(5)) assert_that(active_bond.xpath("logical-interface/name")[0].text, is_("\nae3.0\n")) assert_that(active_bond.xpath("logical-interface/admin-status")[0].text, is_("\nup\n")) assert_that(active_bond.xpath("logical-interface/oper-status")[0].text, is_("\ndown\n")) assert_that(active_bond.xpath("logical-interface/filter-information"), has_length(1)) assert_that(active_bond.xpath("logical-interface/filter-information/*"), has_length(0)) assert_that(active_bond.xpath("logical-interface/address-family/*"), has_length(1)) assert_that(active_bond.xpath("logical-interface/address-family/address-family-name")[0].text, is_("\neth-switch\n")) self.cleanup(reset_interface("ae3"), reset_interface("ge-0/0/1"), reset_interface("ge-0/0/2"), reset_interface("ge-0/0/3"), reset_interface("ge-0/0/4")) def test_removing_bond_membership_node_should_raise(self): with self.assertRaises(RPCError) as expect: self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/1"}, {"ether-options": { "ieee-802.3ad": {XML_ATTRIBUTES: {"operation": "delete"}}}}]} ]}) assert_that(str(expect.exception), is_("statement not found: 802.3ad")) def test_set_interface_mtu(self): self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/2"}, {"mtu": "1000"}]}, {"interface": [ {"name": "ae2"}, {"mtu": "1500"}]}, ]}) self.nc.commit() assert_that(self._interface("ge-0/0/2"), has_xpath("mtu", equal_to("1000"))) assert_that(self._interface("ae2"), has_xpath("mtu", equal_to("1500"))) self.edit({ "interfaces": [ {"interface": [ {"name": "ge-0/0/2"}, {"mtu": {XML_ATTRIBUTES: {"operation": "delete"}}}]}, {"interface": [ {"name": "ae2"}, {"mtu": {XML_ATTRIBUTES: {"operation": "delete"}}}]} ]}) self.nc.commit() assert_that(self._interface("ge-0/0/2"), is_(None)) assert_that(self._interface("ae2"), is_(None)) def test_set_interface_mtu_error_messages(self): with self.assertRaises(RPCError) as exc: self.edit({ "interfaces": { "interface": [ {"name": "ge-0/0/2"}, {"mtu": "wat"}]}}) assert_that(str(exc.exception), contains_string("Invalid numeric value: 'wat'")) with self.assertRaises(RPCError) as exc: self.edit({ "interfaces": { "interface": [ {"name": "ae2"}, {"mtu": "0"}]}}) assert_that(str(exc.exception), contains_string("Value 0 is not within range (256..9216)")) def test_that_an_aggregated_interface_cannot_be_deleted_while_there_is_still_an_rstp_configuration(self): self.edit({ "protocols": { "rstp": { "interface": [ {"name": "ae3"}, {"edge": ""}, {"no-root-port": ""}]}}, "interfaces": [ _access_interface("ae3")]}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ae3"}]}}) with self.assertRaises(RPCError): self.nc.commit() self.cleanup(reset_interface("ae3")) def test_that_an_interface_cannot_be_deleted_while_there_is_still_an_rstp_configuration(self): self.edit({ "protocols": { "rstp": { "interface": [ {"name": "ge-0/0/3"}, {"edge": ""}, {"no-root-port": ""}]}}, "interfaces": [ _access_interface("ge-0/0/3")]}) self.nc.commit() self.edit({ "interfaces": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": "ge-0/0/3"}]}}) with self.assertRaises(RPCError): self.nc.commit() self.cleanup(reset_interface("ge-0/0/3")) def _interface(self, name): result = self.nc.get_config(source="running", filter=dict_2_etree({"filter": { "configuration": {"interfaces": {"interface": {"name": name}}}} })) try: return result.xpath("data/configuration/interfaces/interface")[0] except IndexError: return None def reset_interface(interface_name): def m(edit): edit({ "interfaces": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": interface_name}]}, "protocols": { "rstp": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": interface_name}]}, "lldp": { "interface": [ {XML_ATTRIBUTES: {"operation": "delete"}}, {"name": interface_name}]}}}) return m def _access_interface(name): return {"interface": [ {"name": name}, {"unit": [ {"name": "0"}, {"family": { "ethernet-switching": { "port-mode": "access"}}}]}]}
StarcoderdataPython
3270606
<reponame>papsebestyen/sscutils from dataclasses import dataclass from typing import List import sqlalchemy as sa from ...primitive_types import PrimitiveType, get_np_type, get_sa_type @dataclass class Column: name: str dtype: PrimitiveType nullable: bool = False def to_sql_col(col: Column): return sa.Column(col.name, get_sa_type(col.dtype), nullable=col.nullable) def to_dt_map(cols: List[Column]): return {c.name: get_np_type(c.dtype) for c in cols}
StarcoderdataPython
159439
<gh_stars>0 from django import forms from django.contrib import admin, messages from django.urls import path from .models import VolunteerCategory, Request, CategoryType, Role from django.shortcuts import render, redirect import datetime from django.http import HttpResponse import csv from backend.settings import TIME_ZONE from pytz import timezone import time from django.contrib.admin.models import LogEntry, CHANGE from django.contrib.contenttypes.models import ContentType DATE_CHOICES = [ (datetime.date(2022, 5, 25), "Wednesday"), (datetime.date(2022, 5, 26), "Thursday"), (datetime.date(2022, 5, 27), "Friday"), (datetime.date(2022, 5, 28), "Saturday"), (datetime.date(2022, 5, 29), "Sunday"), ] def datetime_range(start, end, delta): current = start while current < end: yield current current += delta class DailyCheckinForm(forms.Form): selected_date = forms.ChoiceField(choices=DATE_CHOICES) # Register your models here. @admin.register(CategoryType) class CategoryTypeAdmin(admin.ModelAdmin): list_display = ["tag"] @admin.register(Role) class RoleAdmin(admin.ModelAdmin): list_display = [ "title", "description", "number_of_open_positions", "number_of_positions", "category", ] @admin.register(Request) class RequestAdmin(admin.ModelAdmin): list_display = ["status", "user", "role"] list_filter = [ "status", ] change_list_template = "admin/request_changelist.html" def get_urls(self): urls = super().get_urls() my_urls = [ path("export-checkin/", self.export_checkin), ] return my_urls + urls def export_checkin(self, request): if request.method == "POST": form = DailyCheckinForm(request.POST) # check whether it's valid: if form.is_valid(): selected_date = form.cleaned_data["selected_date"] accepted_requests = Request.requests.select_related( "user", "role__category" ).filter( role__category__start_time__date=selected_date, status="ACCEPTED" ) if len(accepted_requests) != 0: field_names = [ "last_name", "first_name", "start_time", "end_time", "category", "role", "signature", ] response = HttpResponse(content_type="text/csv") response[ "Content-Disposition" ] = "attachment; filename=daily-checkin-{}.csv".format( selected_date ) writer = csv.writer(response) writer.writerow(field_names) for r in accepted_requests.order_by("user__last_name"): user = r.user role = r.role.category row = writer.writerow( [ user.last_name, user.first_name, role.start_time.astimezone(timezone(TIME_ZONE)).time(), role.end_time.astimezone(timezone(TIME_ZONE)).time(), role.title, r.role.title, " ", ] ) LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=ContentType.objects.get_for_model(Request).pk, object_id=request.user.pk, object_repr="Daily Checkin for {} exported".format( selected_date ), action_flag=CHANGE, change_message="Daily Checkin for {} exported".format( selected_date ), ) return response else: messages.error(request, "No shifts for selected date") storage = messages.get_messages(request) storage.used = True return redirect("..") form = DailyCheckinForm() payload = {"form": form} return render(request, "admin/export_daily_checkin_form.html", payload) class RoleInline(admin.TabularInline): model = Role @admin.register(VolunteerCategory) class VolunteerCategoryAdmin(admin.ModelAdmin): list_display = ( "title", "description", "start_time", "end_time", "category_type", "number_of_open_positions", "number_of_positions", ) list_filter = ["start_time", "category_type"] search_fields = ["title"] inlines = [RoleInline] change_list_template = "admin/volunteer_category_changelist.html" def get_urls(self): urls = super().get_urls() my_urls = [ path("export-schedule/", self.export_schedule), ] return my_urls + urls def export_schedule(self, request): def filter_roles_for_day(year, month, day): return Role.roles.filter( category__start_time__date=datetime.date(year, month, day), ).order_by("category__category_type", "title") def time_range_for_day( start_year=2022, start_month=5, start_day=25, start_hour=8, start_minute=0, end_year=2022, end_month=5, end_day=26, end_hour=0, end_minute=0, ): return [ dt.time() for dt in datetime_range( datetime.datetime( year=start_year, month=start_month, day=start_day, hour=start_hour, minute=start_minute, ).astimezone(timezone(TIME_ZONE)), datetime.datetime( year=end_year, month=end_month, day=end_day, hour=end_hour, minute=end_minute, ).astimezone(timezone(TIME_ZONE)), datetime.timedelta(minutes=30), ) ] def round_time_to_nearest(t, nearest): return t - datetime.timedelta(minutes=t.minute % nearest, seconds=t.second,) def write_day_schedule_to_csv(csv_writer, day_roles, day_heading): for r in day_roles: number_of_positions = r.number_of_positions accepted_requests = r.requests.filter(status=Request.ACCEPTED) volunteers = [ "{} {}".format(ar.user.first_name, ar.user.last_name) for ar in accepted_requests ] volunteers.extend(["X"] * r.number_of_open_positions) for i in range(number_of_positions): row = [r.category.category_type.tag, r.title] row.extend([""] * len(wednesday_times)) start = r.category.start_time.astimezone(timezone(TIME_ZONE)) end = r.category.end_time.astimezone(timezone(TIME_ZONE)) start = round_time_to_nearest(start, 30) end = round_time_to_nearest(end, 30) dts = [ dt.time() for dt in datetime_range( start, end, datetime.timedelta(minutes=30), ) ] for dt in dts: try: row[day_heading.index(dt)] = volunteers[i] except: print("invalid date") csv_writer.writerow(row) wednesday_roles = filter_roles_for_day(2022, 5, 25) thursday_roles = filter_roles_for_day(2022, 5, 26) friday_roles = filter_roles_for_day(2022, 5, 27) saturday_roles = filter_roles_for_day(2022, 5, 28) sunday_roles = filter_roles_for_day(2022, 5, 29) wednesday_times = time_range_for_day( start_year=2022, start_month=5, start_day=25, start_hour=8, start_minute=0, end_year=2022, end_month=5, end_day=26, end_hour=0, end_minute=0, ) thursday_times = time_range_for_day( start_year=2022, start_month=5, start_day=26, start_hour=8, start_minute=0, end_year=2022, end_month=5, end_day=27, end_hour=0, end_minute=0, ) friday_times = time_range_for_day( start_year=2022, start_month=5, start_day=27, start_hour=8, start_minute=0, end_year=2022, end_month=5, end_day=28, end_hour=0, end_minute=0, ) saturday_times = time_range_for_day( start_year=2022, start_month=5, start_day=28, start_hour=7, start_minute=30, end_year=2022, end_month=5, end_day=29, end_hour=0, end_minute=0, ) sunday_times = time_range_for_day( start_year=2022, start_month=5, start_day=29, start_hour=7, start_minute=30, end_year=2022, end_month=5, end_day=30, end_hour=0, end_minute=0, ) wednesday_heading = ["", "time"] wednesday_heading.extend(wednesday_times) thursday_heading = ["", "time"] thursday_heading.extend(thursday_times) friday_heading = ["", "time"] friday_heading.extend(friday_times) saturday_heading = ["", "time"] saturday_heading.extend(saturday_times) sunday_heading = ["", "time"] sunday_heading.extend(sunday_times) response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = "attachment; filename=master-schedule.csv" writer = csv.writer(response) writer.writerow(["Wednesday"]) writer.writerow(wednesday_heading) write_day_schedule_to_csv(writer, wednesday_roles, wednesday_heading) writer.writerow(["Thursday"]) writer.writerow(thursday_heading) write_day_schedule_to_csv(writer, thursday_roles, thursday_heading) writer.writerow(["Friday"]) writer.writerow(friday_heading) write_day_schedule_to_csv(writer, friday_roles, friday_heading) writer.writerow(["Saturday"]) writer.writerow(saturday_heading) write_day_schedule_to_csv(writer, saturday_roles, saturday_heading) writer.writerow(["Sunday"]) writer.writerow(sunday_heading) write_day_schedule_to_csv(writer, sunday_roles, sunday_heading) LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=ContentType.objects.get_for_model(Role).pk, object_id=request.user.pk, object_repr="Master schedule exported", action_flag=CHANGE, change_message="Master schedule exported", ) return response
StarcoderdataPython
1747098
<filename>telethon/tl/custom/conversation.py import asyncio import functools import inspect import itertools import time from .chatgetter import ChatGetter from ... import helpers, utils, errors # Sometimes the edits arrive very fast (within the same second). # In that case we add a small delta so that the age is older, for # comparision purposes. This value is enough for up to 1000 messages. _EDIT_COLLISION_DELTA = 0.001 def _checks_cancelled(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): if self._cancelled: raise asyncio.CancelledError('The conversation was cancelled before') return f(self, *args, **kwargs) return wrapper class Conversation(ChatGetter): """ Represents a conversation inside an specific chat. A conversation keeps track of new messages since it was created until its exit and easily lets you query the current state. If you need a conversation across two or more chats, you should use two conversations and synchronize them as you better see fit. """ _id_counter = 0 _custom_counter = 0 def __init__(self, client, input_chat, *, timeout, total_timeout, max_messages, exclusive, replies_are_responses): # This call resets the client ChatGetter.__init__(self, input_chat=input_chat) self._id = Conversation._id_counter Conversation._id_counter += 1 self._client = client self._timeout = timeout self._total_timeout = total_timeout self._total_due = None self._outgoing = set() self._last_outgoing = 0 self._incoming = [] self._last_incoming = 0 self._max_incoming = max_messages self._last_read = None self._custom = {} self._pending_responses = {} self._pending_replies = {} self._pending_edits = {} self._pending_reads = {} self._exclusive = exclusive self._cancelled = False # The user is able to expect two responses for the same message. # {desired message ID: next incoming index} self._response_indices = {} if replies_are_responses: self._reply_indices = self._response_indices else: self._reply_indices = {} self._edit_dates = {} @_checks_cancelled async def send_message(self, *args, **kwargs): """ Sends a message in the context of this conversation. Shorthand for `telethon.client.messages.MessageMethods.send_message` with ``entity`` already set. """ sent = await self._client.send_message( self._input_chat, *args, **kwargs) # Albums will be lists, so handle that ms = sent if isinstance(sent, list) else (sent,) self._outgoing.update(m.id for m in ms) self._last_outgoing = ms[-1].id return sent @_checks_cancelled async def send_file(self, *args, **kwargs): """ Sends a file in the context of this conversation. Shorthand for `telethon.client.uploads.UploadMethods.send_file` with ``entity`` already set. """ sent = await self._client.send_file( self._input_chat, *args, **kwargs) # Albums will be lists, so handle that ms = sent if isinstance(sent, list) else (sent,) self._outgoing.update(m.id for m in ms) self._last_outgoing = ms[-1].id return sent @_checks_cancelled def mark_read(self, message=None): """ Marks as read the latest received message if ``message is None``. Otherwise, marks as read until the given message (or message ID). This is equivalent to calling `client.send_read_acknowledge <telethon.client.messages.MessageMethods.send_read_acknowledge>`. """ if message is None: if self._incoming: message = self._incoming[-1].id else: message = 0 elif not isinstance(message, int): message = message.id return self._client.send_read_acknowledge( self._input_chat, max_id=message) def get_response(self, message=None, *, timeout=None): """ Gets the next message that responds to a previous one. Args: message (`Message <telethon.tl.custom.message.Message>` | `int`, optional): The message (or the message ID) for which a response is expected. By default this is the last sent message. timeout (`int` | `float`, optional): If present, this `timeout` (in seconds) will override the per-action timeout defined for the conversation. """ return self._get_message( message, self._response_indices, self._pending_responses, timeout, lambda x, y: True ) def get_reply(self, message=None, *, timeout=None): """ Gets the next message that explicitly replies to a previous one. """ return self._get_message( message, self._reply_indices, self._pending_replies, timeout, lambda x, y: x.reply_to_msg_id == y ) def _get_message( self, target_message, indices, pending, timeout, condition): """ Gets the next desired message under the desired condition. Args: target_message (`object`): The target message for which we want to find another response that applies based on `condition`. indices (`dict`): This dictionary remembers the last ID chosen for the input `target_message`. pending (`dict`): This dictionary remembers {msg_id: Future} to be set once `condition` is met. timeout (`int`): The timeout (in seconds) override to use for this operation. condition (`callable`): The condition callable that checks if an incoming message is a valid response. """ start_time = time.time() target_id = self._get_message_id(target_message) # If there is no last-chosen ID, make sure to pick one *after* # the input message, since we don't want responses back in time if target_id not in indices: for i, incoming in enumerate(self._incoming): if incoming.id > target_id: indices[target_id] = i break else: indices[target_id] = len(self._incoming) # We will always return a future from here, even if the result # can be set immediately. Otherwise, needing to await only # sometimes is an annoying edge case (i.e. we would return # a `Message` but `get_response()` always `await`'s). future = self._client.loop.create_future() # If there are enough responses saved return the next one last_idx = indices[target_id] if last_idx < len(self._incoming): incoming = self._incoming[last_idx] if condition(incoming, target_id): indices[target_id] += 1 future.set_result(incoming) return future # Otherwise the next incoming response will be the one to use # # Note how we fill "pending" before giving control back to the # event loop through "await". We want to register it as soon as # possible, since any other task switch may arrive with the result. pending[target_id] = future return self._get_result(future, start_time, timeout, pending, target_id) def get_edit(self, message=None, *, timeout=None): """ Awaits for an edit after the last message to arrive. The arguments are the same as those for `get_response`. """ start_time = time.time() target_id = self._get_message_id(message) target_date = self._edit_dates.get(target_id, 0) earliest_edit = min( (x for x in self._incoming if x.edit_date and x.id > target_id and x.edit_date.timestamp() > target_date ), key=lambda x: x.edit_date.timestamp(), default=None ) future = self._client.loop.create_future() if earliest_edit and earliest_edit.edit_date.timestamp() > target_date: self._edit_dates[target_id] = earliest_edit.edit_date.timestamp() future.set_result(earliest_edit) return future # we should always return something we can await # Otherwise the next incoming response will be the one to use self._pending_edits[target_id] = future return self._get_result(future, start_time, timeout, self._pending_edits, target_id) def wait_read(self, message=None, *, timeout=None): """ Awaits for the sent message to be marked as read. Note that receiving a response doesn't imply the message was read, and this action will also trigger even without a response. """ start_time = time.time() future = self._client.loop.create_future() target_id = self._get_message_id(message) if self._last_read is None: self._last_read = target_id - 1 if self._last_read >= target_id: return self._pending_reads[target_id] = future return self._get_result(future, start_time, timeout, self._pending_reads, target_id) async def wait_event(self, event, *, timeout=None): """ Waits for a custom event to occur. Timeouts still apply. .. note:: Only use this if there isn't another method available! For example, don't use `wait_event` for new messages, since `get_response` already exists, etc. Unless you're certain that your code will run fast enough, generally you should get a "handle" of this special coroutine before acting. Generally, you should do this: >>> from telethon import TelegramClient, events >>> >>> client = TelegramClient(...) >>> >>> async def main(): >>> async with client.conversation(...) as conv: >>> response = conv.wait_event(events.NewMessage(incoming=True)) >>> await conv.send_message('Hi') >>> response = await response This way your event can be registered before acting, since the response may arrive before your event was registered. It depends on your use case since this also means the event can arrive before you send a previous action. """ start_time = time.time() if isinstance(event, type): event = event() await event.resolve(self._client) counter = Conversation._custom_counter Conversation._custom_counter += 1 future = self._client.loop.create_future() self._custom[counter] = (event, future) return await self._get_result(future, start_time, timeout, self._custom, counter) async def _check_custom(self, built): for key, (ev, fut) in list(self._custom.items()): ev_type = type(ev) inst = built[ev_type] if inst: filter = ev.filter(inst) if inspect.isawaitable(filter): filter = await filter if filter: fut.set_result(inst) del self._custom[key] def _on_new_message(self, response): response = response.message if response.chat_id != self.chat_id or response.out: return if len(self._incoming) == self._max_incoming: self._cancel_all(ValueError('Too many incoming messages')) return self._incoming.append(response) # Most of the time, these dictionaries will contain just one item # TODO In fact, why not make it be that way? Force one item only. # How often will people want to wait for two responses at # the same time? It's impossible, first one will arrive # and then another, so they can do that. for msg_id, future in list(self._pending_responses.items()): self._response_indices[msg_id] = len(self._incoming) future.set_result(response) del self._pending_responses[msg_id] for msg_id, future in list(self._pending_replies.items()): if msg_id == response.reply_to_msg_id: self._reply_indices[msg_id] = len(self._incoming) future.set_result(response) del self._pending_replies[msg_id] def _on_edit(self, message): message = message.message if message.chat_id != self.chat_id or message.out: return # We have to update our incoming messages with the new edit date for i, m in enumerate(self._incoming): if m.id == message.id: self._incoming[i] = message break for msg_id, future in list(self._pending_edits.items()): if msg_id < message.id: edit_ts = message.edit_date.timestamp() # We compare <= because edit_ts resolution is always to # seconds, but we may have increased _edit_dates before. # Since the dates are ever growing this is not a problem. if edit_ts <= self._edit_dates.get(msg_id, 0): self._edit_dates[msg_id] += _EDIT_COLLISION_DELTA else: self._edit_dates[msg_id] = message.edit_date.timestamp() future.set_result(message) del self._pending_edits[msg_id] def _on_read(self, event): if event.chat_id != self.chat_id or event.inbox: return self._last_read = event.max_id remove_reads = [] for msg_id, pending in list(self._pending_reads.items()): if msg_id >= self._last_read: remove_reads.append(msg_id) pending.set_result(True) del self._pending_reads[msg_id] for to_remove in remove_reads: del self._pending_reads[to_remove] def _get_message_id(self, message): if message is not None: # 0 is valid but false-y, check for None return message if isinstance(message, int) else message.id elif self._last_outgoing: return self._last_outgoing else: raise ValueError('No message was sent previously') @_checks_cancelled def _get_result(self, future, start_time, timeout, pending, target_id): due = self._total_due if timeout is None: timeout = self._timeout if timeout is not None: due = min(due, start_time + timeout) # NOTE: We can't try/finally to pop from pending here because # the event loop needs to get back to us, but it might # dispatch another update before, and in that case a # response could be set twice. So responses must be # cleared when their futures are set to a result. return asyncio.wait_for( future, timeout=None if due == float('inf') else due - time.time(), loop=self._client.loop ) def _cancel_all(self, exception=None): self._cancelled = True for pending in itertools.chain( self._pending_responses.values(), self._pending_replies.values(), self._pending_edits.values()): if exception: pending.set_exception(exception) else: pending.cancel() for _, fut in self._custom.values(): if exception: fut.set_exception(exception) else: fut.cancel() async def __aenter__(self): self._input_chat = \ await self._client.get_input_entity(self._input_chat) self._chat_peer = utils.get_peer(self._input_chat) # Make sure we're the only conversation in this chat if it's exclusive chat_id = utils.get_peer_id(self._chat_peer) conv_set = self._client._conversations[chat_id] if self._exclusive and conv_set: raise errors.AlreadyInConversationError() conv_set.add(self) self._cancelled = False self._last_outgoing = 0 self._last_incoming = 0 for d in ( self._outgoing, self._incoming, self._pending_responses, self._pending_replies, self._pending_edits, self._response_indices, self._reply_indices, self._edit_dates, self._custom): d.clear() if self._total_timeout: self._total_due = time.time() + self._total_timeout else: self._total_due = float('inf') return self def cancel(self): """ Cancels the current conversation. Pending responses and subsequent calls to get a response will raise ``asyncio.CancelledError``. This method is synchronous and should not be awaited. """ self._cancel_all() async def cancel_all(self): """ Calls `cancel` on *all* conversations in this chat. Note that you should ``await`` this method, since it's meant to be used outside of a context manager, and it needs to resolve the chat. """ chat_id = await self._client.get_peer_id(self._input_chat) for conv in self._client._conversations[chat_id]: conv.cancel() async def __aexit__(self, exc_type, exc_val, exc_tb): chat_id = utils.get_peer_id(self._chat_peer) conv_set = self._client._conversations[chat_id] conv_set.discard(self) if not conv_set: del self._client._conversations[chat_id] self._cancel_all() __enter__ = helpers._sync_enter __exit__ = helpers._sync_exit
StarcoderdataPython
1600960
<reponame>jonzxz/project-piscator ## Application Objects from app import db, encryption_engine ## Utilities from datetime import datetime # Defines model for EmailAddress class class EmailAddress(db.Model): __tablename__ = 'email_address' email_id = db.Column(db.Integer, primary_key=True) email_address = db.Column(db.String(30), index=True, unique=True\ , nullable=False) email_password = db.Column(db.String(255), nullable=False) phishing_mail_detected = db.Column(db.Integer, nullable=True, default=0) total_mails_checked = db.Column(db.Integer, nullable=True, default=0) active = db.Column(db.Boolean, nullable=False, default=True) last_updated = db.Column(db.DateTime, nullable=True, default=None) created_at = db.Column(db.DateTime, index=True,default=datetime.now) notification_preference = db.Column(db.Boolean, nullable=False, default=True) # FK owner_id = db.Column(db.Integer, db.ForeignKey('user.user_id')) owner = db.relationship('User', backref='addresses') phishing_mails = db.relationship('PhishingEmail', backref='owner'\ , lazy='dynamic') def __repr__(self): return "Email Address: {} -- Owned by User ID: {}"\ .format(self.email_address, self.owner_id) def get_email_id(self) -> int: return self.email_id def get_email_address(self) -> str: return self.email_address def set_email_address(self, email_addr: str) -> None: self.email_address = email_addr def get_email_password(self) -> str: return self.email_password def set_email_password(self, pw: str) -> None: self.email_password = encryption_engine.encrypt(pw) def get_decrypted_email_password(self) ->str: return encryption_engine.decrypt(self.email_password) def set_owner_id(self, user_id: int): self.owner_id = user_id def get_owner_id(self) -> int: return self.owner_id def get_phishing_mail_detected(self) -> int: return self.phishing_mail_detected def set_phishing_mail_detected(self, num_phish_detected: int) -> None: self.phishing_mail_detected += num_phish_detected def get_total_mails_checked(self) -> int: return self.total_mails_checked def set_total_mails_checked(self, num_mails_checked: int) -> None: self.total_mails_checked += num_mails_checked def get_active_status(self) -> bool: return self.active def set_active_status(self, boolean: bool) -> None: self.active = boolean def set_created_at(self, created_at: datetime) -> None: self.created_at = created_at def get_created_at(self) -> datetime: return self.created_at def get_notification_pref(self) -> bool: return self.notification_preference def set_notification_pref(self, pref: bool) -> None: self.notification_preference = pref def set_last_updated(self, last_updated: datetime) -> None: self.last_updated = last_updated def get_last_updated(self) -> datetime: return self.last_updated def get_prettified_date(self) -> str: return self.get_last_updated().strftime('%d-%m-%Y %H:%M')
StarcoderdataPython
1712450
<gh_stars>0 from pyapp.conf import settings # Ensure settings are configured settings.configure(["pyapp_ext.pyspark.default_settings"])
StarcoderdataPython
1717947
<filename>app/hold/urls.py from django.conf.urls import patterns, url from app.hold import views urlpatterns = patterns('', url(r'index/$', views.index, name='index'), url(r'detail/(?P<fund_id>\d+)/$', views.detail, name='detail'), url(r'get_hold/(?P<fund_id>\d+)/$', views.get_hold, name='get_hold'), )
StarcoderdataPython
113129
import argparse import glob import os import random import logging import numpy as np import math from tqdm import tqdm import time import torch from transformers import AutoTokenizer, AutoModelForMaskedLM from transformers import DataCollatorForLanguageModeling from transformers.optimization import AdamW, get_linear_schedule_with_warmup from torch.utils.data import Dataset, DataLoader import pytorch_lightning as ptl from pytorch_lightning.logging.test_tube import TestTubeLogger from pytorch_lightning.callbacks import ModelCheckpoint, LearningRateLogger logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # DONE: reproduce RoBERTa numbers on the Longformer corpus # DONE: testing ddp single machine # DONE: testing ddp multiple machines # DONE: testing resume from checkpoint # TODO: try on a TPU-pod # TODO: run on beaker on ai2-server1/2 try: import torch_xla.core.xla_model as xm except ImportError: XLA_AVAILABLE = False else: XLA_AVAILABLE = True class MMapTextDataset(Dataset): def __init__(self, mmap_filename, chunk_size, bos_token_id, eos_token_id): # `chunk_size - 2` to reserve space for <s> and </s> self.num_instances = np.memmap(mmap_filename, mode='r', dtype=np.uint16).shape[0] // (chunk_size - 2) # defer loading the token_ids memmap until after the first __getitem__ call. # when spawning new processes for ddp, there is a hard limit in python < 3.8 that # pickle files need to be < 4GB. By waiting until after the first __getitem__ we # don't have to pickle the memmap self.token_ids = None self._mmap_filename = mmap_filename self._chunk_size = chunk_size self._bos_token_id = bos_token_id self._eos_token_id = eos_token_id def __len__(self): return self.num_instances def __getitem__(self, i): if self.token_ids is None: self.token_ids = np.memmap(self._mmap_filename, mode='r', dtype=np.uint16) from_index = i * (self._chunk_size - 2) to_index = (i + 1) * (self._chunk_size - 2) data = np.concatenate(([self._bos_token_id], self.token_ids[from_index:to_index], [self._eos_token_id])) return torch.tensor(data, dtype=torch.long) # ========================= preprocessing code ========================= # @staticmethod def _process_file(full_fname): "Step 1: tokenize an input text file then save token ids into `np.memmap` shards of size `args.shard_size`" fname = full_fname.split('/')[-1] log_filename = f'{args.input_dir}/logs-{args.shard_size}/{fname}.log' if os.path.isfile(log_filename): logging.info(f'Skipping {full_fname} ...') return # log file already exists. Skip current file. logging.info(f'Processing {full_fname} ...') with open(full_fname, 'r') as fin: token_list = [] shard_count = 0 tokens_count = 0 def _write_shard(): if len(token_list) == 0: return if token_list[-1] != MMapTextDataset.tokenizer.sep_token_id: # handle a rare case token_list.append(MMapTextDataset.tokenizer.sep_token_id) shared_filename = f'{args.input_dir}/shards-{args.shard_size}/{fname}-{shard_count}.bin' logging.info(f'Writing {len(token_list)} tokens to shared {shared_filename}') fp = np.memmap(shared_filename, dtype=np.uint16, mode='w+', shape=len(token_list)) fp[:] = token_list[:] del fp # flush and close file for line in tqdm(fin): line = line.strip() if line == '': # drop empty lines continue tokens = MMapTextDataset.tokenizer.encode(line, add_special_tokens=False) # `__getitem__` adds special tokens token_list.extend(tokens) if len(token_list) > args.shard_size: _write_shard() tokens_count += len(token_list) token_list = [] shard_count += 1 else: token_list.append(MMapTextDataset.tokenizer.sep_token_id) _write_shard() tokens_count += len(token_list) with open(log_filename, 'w') as f: f.write(f'Generated {tokens_count} tokens in {shard_count + 1} shards') @staticmethod def _combine_shards(output_fname, shards_list): "Step 2: combining memmap shards into one `train.bin` or `val.bin` file" total_size = 0 for filename in shards_list: total_size += np.memmap(filename, mode='r', dtype=np.uint16).shape[0] logging.info(f'Writing {total_size} tokens to {output_fname}') all_token_ids = np.empty(total_size, dtype=np.uint16) last_token_index = 0 for filename in tqdm(shards_list): shared = np.memmap(filename, mode='r', dtype=np.uint16) all_token_ids[last_token_index:last_token_index+len(shared)] = shared[:] last_token_index += len(shared) fp = np.memmap(output_fname, dtype=np.uint16, mode='w+', shape=total_size) fp[:] = all_token_ids[:] del fp @staticmethod def raw_text_to_mmap(args): """This is the main preprocessing function. It processes all the text files in `args.input_dir` and outputs two np.memmap files, one for training and one for validation with ratio `args.train_dev_split`. Processing each input file involves tokenizing it, sharding it into shards of size `args.shard_size`, then writing each shard as an np.memmap file. The stream of tokens in the memmap file represents documents separated with `tokenizer.sep_token`. In `__getitem__`, the `tokenizer.bos_token` and `tokenizer.eos_token` are added. The reason for not adding them at preprocessing time is to allow different sequence lengths later on. Notice that this is the "FULL-SENTENCES" setting in the RoBERTa paper, Table2. """ MMapTextDataset.tokenizer = AutoTokenizer.from_pretrained(args.tokenizer, use_fast=True) assert len(MMapTextDataset.tokenizer) < 65535 # will use uint16 to store token ids all_files = glob.glob(f'{args.input_dir}/*.txt') if os.path.exists(f'{args.input_dir}/cache/train.bin') and os.path.exists(f'{args.input_dir}/cache/val.bin'): logger.info("Cache already exists. Remove the cache directory to regenerate") return try: os.mkdir(f'{args.input_dir}/cache/') except FileExistsError: pass try: os.mkdir(f'{args.input_dir}/shards-{args.shard_size}/') except FileExistsError: pass try: os.mkdir(f'{args.input_dir}/logs-{args.shard_size}/') # log progrss to be able to resume except FileExistsError: pass # STEP1: tokenizing and saving to shards if args.num_preprocessing_workers > 1: from multiprocessing.pool import Pool with Pool(args.num_preprocessing_workers) as p: list(tqdm(p.imap(MMapTextDataset._process_file, all_files), total=len(all_files))) else: [MMapTextDataset._process_file(f) for f in tqdm(all_files)] # STEP2: shuffling shards and combining them into train.bin and val.bin files all_shards = glob.glob(f'{args.input_dir}/shards-{args.shard_size}/*.bin') random.shuffle(all_shards) # shuffling based on shards not individual lines val_shards_count = int(args.train_dev_split * len(all_shards)) val_shards = all_shards[:val_shards_count] train_shards = all_shards[val_shards_count:] # TODO: if MMapTextDataset._combining_shards is very slow for large files, it can be skipped but we nned to # update the dataset to read from multiple shards directly MMapTextDataset._combine_shards(f'{args.input_dir}/cache/val.bin', val_shards) MMapTextDataset._combine_shards(f'{args.input_dir}/cache/train.bin', train_shards) del MMapTextDataset.tokenizer # ========================= end preprocessing code ========================= # class Pretrainer(ptl.LightningModule): def __init__(self, hparams): super().__init__() self.args = hparams self.hparams = self.args self.model = AutoModelForMaskedLM.from_pretrained(args.model) self.config = self.model.config tokenizer = AutoTokenizer.from_pretrained(args.tokenizer) self.pad_token_id = tokenizer.pad_token_id self.eos_token_id = tokenizer.eos_token_id self.bos_token_id = tokenizer.bos_token_id logger.info(f'Creating dataset cache from dir {self.args.input_dir}. This could be slow the first time.') MMapTextDataset.raw_text_to_mmap(args) # TODO: add support for other objective functions (whole word masking, BART objectives) self.data_collator = DataCollatorForLanguageModeling( tokenizer=tokenizer, mlm=True, mlm_probability=self.args.mlm_prob ) self.start_time = 0 def to(self, *args, **kwargs): param_count_before_to = len(list(self.parameters())) super().to(*args, **kwargs) if self.trainer.use_tpu: # need to re-tie the weights after moving to XLA! self.model.tie_weights() if 'roberta' in self.args.model: self.model.lm_head.bias = self.model.lm_head.decoder.bias param_count_after_to = len(list(self.parameters())) assert param_count_before_to == param_count_after_to def forward(self, input_ids=None, labels=None): # get the padding mask - 1 for NOT masked, 0 for MASKED/PAD attention_mask = (input_ids != self.pad_token_id).int() # output is loss, prediction_scores, hidden_states output = self.model(input_ids=input_ids, attention_mask=attention_mask, labels=labels) return output[0] # loss def training_step(self, batch, batch_nb): loss = self(**batch) input_ids = batch['input_ids'] tensorboard_logs = { 'input_size': input_ids.numel(), 'mlm_loss': loss, 'mlm_bpc': loss/math.log(2), 'mlm_perplexity': torch.exp(loss), 'token_per_step': input_ids.numel() * self.args.grad_accum * self.trainer.world_size, } if self.start_time != 0: elapsed_time = time.time() - self.start_time tensorboard_logs['second_per_batch'] = elapsed_time self.start_time = time.time() if self.on_gpu: tensorboard_logs['memory'] = torch.cuda.memory_allocated(loss.device) / 1024 ** 3 return {'loss': loss, 'log': tensorboard_logs} def validation_step(self, batch, batch_nb): # TODO: log how long evaluation takes self.start_time = 0 # reset training_step timer loss = self(**batch) tensorboard_logs = { 'val_mlm_loss': loss.detach(), } return {'val_loss': tensorboard_logs["val_mlm_loss"], 'log': tensorboard_logs} def validation_epoch_end(self, outputs): avg_loss = torch.stack([x['log']['val_mlm_loss'] for x in outputs if 'val_mlm_loss' in x['log']]).mean() if self.use_ddp: # TODO: PTL is already doing this. Is it still needed here? # https://github.com/PyTorchLightning/pytorch-lightning/blob/0.8.5/pytorch_lightning/metrics/converters.py#L251 torch.distributed.all_reduce(avg_loss, op=torch.distributed.ReduceOp.SUM) avg_loss /= torch.distributed.get_world_size() elif self.use_tpu: avg_loss = xm.all_reduce(xm.REDUCE_SUM, avg_loss) / xm.xrt_world_size() logs = {'val_mlm_loss': avg_loss} return {'log': logs, 'progress_bar': logs, "val_loss": avg_loss} def configure_optimizers(self): no_decay = ["bias", "LayerNorm.weight"] optimizer_grouped_parameters = [ { "params": [p for n, p in self.named_parameters() if not any(nd in n for nd in no_decay) and p.requires_grad], "weight_decay": self.args.weight_decay, }, { "params": [p for n, p in self.named_parameters() if any(nd in n for nd in no_decay) and p.requires_grad], "weight_decay": 0.0, }, ] optimizer = AdamW(optimizer_grouped_parameters, lr=self.args.lr, eps=self.args.adam_epsilon) scheduler = get_linear_schedule_with_warmup( optimizer, num_warmup_steps=self.args.warmup_steps, num_training_steps=self.args.train_steps ) return [optimizer], [{"scheduler": scheduler, "interval": "step"}] def _get_loader(self, fname, is_train): dataset = MMapTextDataset(fname, chunk_size=self.args.seqlen, bos_token_id=self.bos_token_id, eos_token_id=self.eos_token_id) # TODO: consider `replace_sampler_ddp=True` and removing the following if statement if self.trainer.use_ddp: sampler = torch.utils.data.distributed.DistributedSampler(dataset, shuffle=is_train) shuffle = False elif self.trainer.use_tpu: sampler = torch.utils.data.distributed.DistributedSampler( dataset, num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal(), shuffle=is_train, ) shuffle = False else: sampler = None shuffle = is_train loader = DataLoader( dataset, batch_size=self.args.batch_size, shuffle=shuffle, sampler=sampler, num_workers=self.args.num_workers, collate_fn=self.data_collator, drop_last=is_train, ) return loader def train_dataloader(self): return self._get_loader(f'{self.args.input_dir}/cache/train.bin', True) def val_dataloader(self): return self._get_loader(f'{self.args.input_dir}/cache/val.bin', False) def grad_norm(self, norm_type): # Override PTL `grad_norm` function to only return `total_grad_norm` instead norms of individual params # TODO: grad_norm reporting needs to take fp16 loss scale into account parameters = [p for p in self.parameters() if p.grad is not None] device = parameters[0].device total_norm = torch.zeros([], device=device if parameters else None) norm_type = float(norm_type) for p in parameters: param_norm = p.grad.data.pow(norm_type).sum() total_norm.add_(param_norm) total_norm = (total_norm ** (1.0 / norm_type)) return {'total_grad_norm': total_norm} @staticmethod def add_args(parser): parser.add_argument("--seed", type=int, default=3) # Dataset. Some of these params are only useful when generating the dataset cache parser.add_argument("--input_dir", type=str, default='/net/nfs.corp/s2-research/beltagy/longformer/data/') # Used only at the preprocessing phase parser.add_argument("--train_dev_split", type=float, default=0.05) parser.add_argument("--shard_size", type=int, default=1024 ** 3 // 4) # 250MB parser.add_argument("--num_preprocessing_workers", type=int, default=1) # Used only at the training phase parser.add_argument("--seqlen", type=int, default=512) parser.add_argument("--mlm_prob", type=float, default=0.15) # HF model loading parser.add_argument("--tokenizer", type=str, default='roberta-base') parser.add_argument("--model", type=str, default='roberta-base') # Checkpointing and logging parser.add_argument("--save_dir", type=str, default='/runs/') parser.add_argument("--save_prefix", type=str, default='test', help="path of output directory is --save_dir/--save_prefix") parser.add_argument("--resume", type=str, default=None, # It is better to use a different output dir. help="Path to a checkpoint to load model weights and training state. It overwrites args") parser.add_argument("--resume_model_only", type=str, default=None, help="Path to a checkpoint to load model weights but not training state") parser.add_argument("--log_rate", type=int, default=10) parser.add_argument("--disable_checkpointing", type=bool, default=False) # Training hyperparams parser.add_argument("--lr", type=float, default=1e-5) parser.add_argument("--train_steps", type=int, default=3000, help='# training grad. updates') parser.add_argument("--warmup_steps", type=int, default=1000, help='# warmup grad. updates') parser.add_argument("--val_every", type=int, default=1000, help='# training grad. updates between evaluations') parser.add_argument("--val_batches", type=int, default=1000, help='# evaluation **batches**') parser.add_argument("--weight_decay", type=float, default=0.01) parser.add_argument("--adam_epsilon", type=float, default=1e-6) parser.add_argument("--grad_clip", type=float, default=0) # TODO: test this with fp16. Likely not working # RoBERTa's tokens_per_step = 2^18 = 512(seqlen) x 1(gpu_count) x 32(batch_size) x 16(grad_accum) parser.add_argument("--batch_size", type=int, default=32) parser.add_argument("--grad_accum", type=int, default=1) # Compute resources parser.add_argument("--fp16", type=bool, default=False) parser.add_argument("--num_workers", type=int, default=0) parser.add_argument("--gpu_count", type=int, default=1, # `--gpus` is reserved for internal use by PTL help="Number of gpus. This respects `CUDA_VISIBLE_DEVICES`") # For multi-node training, use the PyTorch launch script. The script and instructions can be found here: # https://github.com/pytorch/pytorch/blob/master/torch/distributed/launch.py. # To run PTL in a mode compatible with the launch script, two things are needed: # - pass the argument `--use_env` to `torch.distributed.launch` # - make sure `--nproc_per_node` matches `--gpu_count` and `--nnodes` matches `--node_count`. # For example, to run on 2 nodes, 3 gpus each, the command line on node rank 1 would be like: # >>>> python -m torch.distributed.launch \ # --use_env --nnodes 2 --nproc_per_node 3 \ # --node_rank 1 --master_addr s2-server4 --master_port 12343 \ # scripts/pretrain.py \ # --gpu_count 2 --node_count 2 \ # --input_dir my_data_dir --save_prefix test_multinode parser.add_argument("--node_count", type=int, default=1, help="Number of nodes. It needs to match --nnodes of torch.distributed.launch") parser.add_argument("--tpu_core_count", type=int, default=None) return parser def main(args): random.seed(args.seed * 10) np.random.seed(args.seed * 100) torch.manual_seed(args.seed * 1000) if torch.cuda.is_available(): torch.cuda.manual_seed_all(args.seed * 10000) if args.resume_model_only is not None: pretrainer = Pretrainer.load_from_checkpoint(args.resume_model_only, args) else: pretrainer = Pretrainer(args) # logger here is a SummaryWritter for tensorboard # it is used by the trainer, and certain return variables # from the model are automatically logged logger = TestTubeLogger( save_dir=args.save_dir, name=args.save_prefix, version=0 # always use version=0 ) checkpoint_callback = ModelCheckpoint( # model saved to filepath/prefix_.... filepath=os.path.join(args.save_dir, args.save_prefix, 'checkpoint'), prefix='', save_top_k=1, save_last=True, verbose=True, monitor='val_loss', mode='min', period=-1, # to allow multiple checkpoints per epoch ) args.val_every *= args.grad_accum # PTL is expecting number of batches_per_gpu trainer = ptl.Trainer( gpus=args.gpu_count, num_nodes=args.node_count, num_tpu_cores=args.tpu_core_count, distributed_backend='ddp' if (args.gpu_count > 1 or args.node_count > 1) else None, replace_sampler_ddp=False, track_grad_norm=2, max_epochs=10000, min_epochs=0, max_steps=args.train_steps, # run for many epochs, but stop after max_steps val_check_interval=args.val_every, limit_val_batches=args.val_batches, early_stop_callback=None, row_log_interval=args.log_rate, progress_bar_refresh_rate=args.log_rate, logger=logger, checkpoint_callback=checkpoint_callback if not args.disable_checkpointing else None, accumulate_grad_batches=args.grad_accum, resume_from_checkpoint=args.resume, gradient_clip_val=args.grad_clip, precision=16 if args.fp16 else 32, amp_level='O2', num_sanity_val_steps=2, callbacks=[LearningRateLogger()], ) trainer.fit(pretrainer) if __name__ == "__main__": parser = Pretrainer.add_args(argparse.ArgumentParser(description="pretrain")) args = parser.parse_args() main(args)
StarcoderdataPython
4800021
<gh_stars>1-10 # encoding: utf-8 """ @author: liyao @contact: <EMAIL> @software: pycharm @time: 2020/6/12 1:39 下午 @desc: """ import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="feishu-sdk", version="1.0.2", author="liyao", author_email="<EMAIL>", description="Feishu Third-party Libraries", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/liyao2598330/feishu-sdk", packages=setuptools.find_packages(), classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", ], install_requires=[ 'urllib3', 'requests' ], python_requires='>=2.7', )
StarcoderdataPython
51908
<gh_stars>0 def no_boring_zeros(n): return int(str(n).strip("0")) if n != 0 else n
StarcoderdataPython
3205886
<reponame>KHanghoj/epiPALEOMIX<gh_stars>1-10 #!/usr/bin/python # # Copyright (c) 2012 <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # import os import sys import argparse import subprocess from pypeline.common.fileutils import missing_executables def _collect_positions(handle): positions = set() for line in handle: if not line.startswith("#"): fields = line.split("\t", 6) if "," in fields[4]: positions.add((fields[0], int(fields[1]))) positions = list(positions) positions.sort() return positions def print_status(index, contig, position, npositions, end="\r"): tmpl = " - Collected pileup %i of %i (%02.2f%% done): %s:%s ... %s" sys.stderr.write(tmpl % (index, npositions, (100.0 * index) / npositions, contig, position, end)) def main(argv): parser = argparse.ArgumentParser(prog="paleomix create_pileup") parser.add_argument("output", help="BGZipped pileup file.") parser.add_argument("mpileup_args", nargs=argparse.REMAINDER) args = parser.parse_args(argv) missing = missing_executables(("samtools", "tabix", "bgzip")) if missing: sys.stderr.write("ERROR: Required executables are missing:\n") sys.stderr.write(" - %s\n" % "\n\t- ".join(missing)) return 1 with open(args.output, "w") as handle: bgzip = subprocess.Popen("bgzip", stdin = subprocess.PIPE, stdout = handle) # While samtools mpileup has an option for specifying a list of # positions (-l), this requires traversing the entire file, and may # not calculate the BAQ. Given the low number of expected sites, # individual calls for each position are significantly faster. sys.stderr.write("Reading VCF from STDIN ...\n") positions = _collect_positions(sys.stdin) npositions = len(positions) sys.stderr.write(" - Read %i candidate positions ...\n" % (npositions,)) positions_file = args.output + ".positions" with open(positions_file, "w") as handle: for (contig, position) in positions: handle.write("%s\t%s\n" % (contig, position)) sys.stderr.write(" - Wrote positions to '%s' ...\n" % (positions_file,)) sys.stderr.write("Collecting pileups:\n") call = ["samtools", "mpileup", "-R", "-l", positions_file] call.extend(args.mpileup_args) proc = subprocess.Popen(call, stdout=subprocess.PIPE, close_fds=True) line = "NA\tNA\t" index = npositions = 1 for (index, line) in enumerate(proc.stdout, start=1): if (index - 1) % 100 == 0: contig, position, _ = line.split("\t", 2) print_status(index, contig, position, npositions) bgzip.stdin.write(line) contig, position, _ = line.split("\t", 2) print_status(index, contig, position, npositions, end="\n") if proc.wait(): sys.stderr.write("ERROR: Error running samtools, return-code=%i:\n" % proc.wait()) sys.stderr.write("\t- Command: %s\n" % " ".join(call)) return 1 bgzip.stdin.close() if bgzip.wait(): sys.stderr.write("ERROR: Error running bgzip, return-code %i\n" % bgzip.wait()) sys.stderr.write(" - Cleaning up ...") os.remove(positions_file) subprocess.check_call(["tabix", "-b", "2", "-e", "2", args.output]) return 0 if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
StarcoderdataPython
3300785
from typing import Any, Callable, TypeVar, Union from ..includes import includes from ..map import map from ..reduce import reduce T = TypeVar('T', bound='List') class List(list): def map(self: T, cb) -> T: return List(map(self, cb)) def reduce(self: T, cb, initializer=None) -> Union[str, int, float, bool, None]: return reduce(self, cb, initializer) @property def length(self: T) -> int: return len(self) def includes(self: T, val: Union[None, bool, str, int, float]) -> bool: return includes(self, val) def pop(self: T) -> Any: return list(self).pop() def join(self: T, join_with: str) -> str: return join_with.join(self) def find(self, cb: Callable[[Any, int], bool]) -> Union[Any, None]: for index, item in enumerate(self): if cb(item, index): return item return None
StarcoderdataPython
62547
import sys import os import numpy as np from pprint import pprint from datetime import datetime from datetime import timedelta import mysql.connector import math import matplotlib.pyplot as plt import matplotlib.colors from matplotlib import dates from mpl_toolkits.basemap import Basemap import calendar from scipy.optimize import curve_fit from coating_info_from_raw_signal import CoatingData flight_times = { 'science 1' : [datetime(2015,4,5,9,0),datetime(2015,4,5,14,0),15.6500, 78.2200] , ##'ferry 1' : [datetime(2015,4,6,9,0),datetime(2015,4,6,11,0),15.6500, 78.2200] , ##'ferry 2' : [datetime(2015,4,6,15,0),datetime(2015,4,6,18,0),-16.6667, 81.6000] , ##'science 2' : [datetime(2015,4,7,16,0),datetime(2015,4,7,21,0),-62.338, 82.5014] , #'science 3' : [datetime(2015,4,8,13,0),datetime(2015,4,8,17,0),-62.338, 82.5014] , #'science 4' : [datetime(2015,4,8,17,30),datetime(2015,4,8,22,0),-70.338, 82.5014] , #'science 5' : [datetime(2015,4,9,13,30),datetime(2015,4,9,18,0),-62.338, 82.0] , ##'ferry 3' : [datetime(2015,4,10,14,0),datetime(2015,4,10,17,0),-75.338, 81] , #'science 6' : [datetime(2015,4,11,15,0),datetime(2015,4,11,22,0),-90.9408, 80.5] , #'science 7' : [datetime(2015,4,13,15,0),datetime(2015,4,13,21,0),-95, 80.1] , #'science 8' : [datetime(2015,4,20,15,0),datetime(2015,4,20,20,0),-133.7306, 67.1], #'science 9' : [datetime(2015,4,20,21,0),datetime(2015,4,21,2,0),-133.7306, 69.3617] , #'science 10' : [datetime(2015,4,21,16,0),datetime(2015,4,21,22,0),-131, 69.55], #'tscience 10' : [datetime(2015,4,20,21,0),datetime(2015,4,21,2,0),-133.7306, 69.3617] , #'hscience 10' : [datetime(2015,4,21,16,0),datetime(2015,4,21,18,0),-131, 69.55], ##'gscience 10' : [datetime(2015,4,21,16,0),datetime(2015,4,21,18,0),-131, 69.55], } #database connection cnx = mysql.connector.connect(user='root', password='<PASSWORD>', host='localhost', database='black_carbon') cursor = cnx.cursor() binned_data = [] i = 70 while i < 220: binned_data.append(i) i+=10 plot_data = [] for flight in flight_times: print flight start_time = flight_times[flight][0] end_time = flight_times[flight][1] UNIX_start_time = calendar.timegm(start_time.utctimetuple()) UNIX_end_time = calendar.timegm(end_time.utctimetuple()) for bin in binned_data: bin_LL = bin bin_UL = bin +10 print bin_LL, ' to ', bin_UL cursor.execute(('SELECT (POW(rBC_mass_fg,(1/3.0))*101.994391398), coat_thickness_nm FROM polar6_coating_2015 where (POW(rBC_mass_fg,(1/3.0))*101.994391398) >= %s and (POW(rBC_mass_fg,(1/3.0))*101.994391398) < %s and particle_type = %s and instrument = %s and UNIX_UTC_ts >= %s and UNIX_UTC_ts < %s and coat_thickness_nm IS NOT NULL'),(bin_LL,bin_UL,'incand','UBCSP2',UNIX_start_time,UNIX_end_time)) coating_data = cursor.fetchall() #for row in coating_data: # if coat_thickness_nm == None: # coat_thickness_nm = 0 median_core_VED = np.median([row[0] for row in coating_data]) median_coat_th = np.median([row[1] for row in coating_data]) err25_coat_th = median_coat_th-np.percentile([row[1] for row in coating_data],25) err75_coat_th = np.percentile([row[1] for row in coating_data],75)-median_coat_th core_vol = (4/3)*math.pi*((median_core_VED/2)**3) particle_vol = (4/3)*math.pi*(((median_core_VED/2)+2*median_coat_th)**3) med_coat_volfrac = (particle_vol-core_vol)/particle_vol*1.0 err25_coat_volfrac = 0#((4/3)*math.pi*(((median_core_VED/2)+2*np.percentile([row[1] for row in coating_data],25))**3)-core_vol)/(4/3)*math.pi*(((median_core_VED/2)+2*np.percentile([row[1] for row in coating_data],25))**3) err75_coat_volfrac = 0#((4/3)*math.pi*(((median_core_VED/2)+2*np.percentile([row[1] for row in coating_data],75))**3)-core_vol)/(4/3)*math.pi*(((median_core_VED/2)+2*np.percentile([row[1] for row in coating_data],75))**3) plot_data.append([median_core_VED,median_coat_th,err25_coat_th,err75_coat_th,med_coat_volfrac,err25_coat_volfrac,err75_coat_volfrac]) core = [row[0] for row in plot_data] coat_th = [row[1] for row in plot_data] coat_th_minerr = [row[2] for row in plot_data] coat_th_maxerr = [row[3] for row in plot_data] med_coat_volfrac = [row[4] for row in plot_data] med_coat_minerr = [row[5] for row in plot_data] med_coat_maxerr = [row[6] for row in plot_data] fig = plt.figure() ax1 = plt.subplot2grid((2,1), (0,0), colspan=1) ax2 = plt.subplot2grid((2,1), (1,0), colspan=1) ax1.errorbar(core,coat_th,yerr = [coat_th_minerr,coat_th_maxerr],fmt='o',linestyle='-') ax1.set_ylabel('coat thickness (nm)') ax1.set_xlabel('core VED (nm)') ax1.set_xlim(70,220) ax1.set_ylim(0,80) ax2.errorbar(core,med_coat_volfrac,yerr = [med_coat_minerr,med_coat_maxerr],fmt='o',linestyle='-', color = 'grey') ax2.set_xlabel('core VED (nm)') ax2.set_ylabel('coating volume fraction') ax2.set_xlim(70,220) ax2.set_ylim(0,1) #ax3.errorbar(mass_med,altitudes,xerr = [mass_25,mass_75],fmt='o',linestyle='-', color = 'green') #ax3.set_xlabel('total mass conc (ng/m3 - STP)') #ax3.set_ylabel('altitude (m)') #ax3.set_xlim(0,180) #ax3.set_ylim(0,6000) fig.suptitle(flight, fontsize=20) dir = 'C:/Users/<NAME>/Documents/Data/Netcare/Spring 2015/' os.chdir(dir) #plt.savefig('.png', bbox_inches='tight') plt.show()
StarcoderdataPython
3332813
<reponame>fostroll/toxine<gh_stars>1-10 # -*- coding: utf-8 -*- # Toxine project # # Copyright (C) 2019-present by <NAME> # License: BSD, see LICENSE for details """ Toxine is a part of the RuMor project. It is a pipeline of the text preprocessing, preliminary entity tagging, and tokenization. """ from toxine._version import __version__ from toxine.text_preprocessor import TextPreprocessor
StarcoderdataPython
3240368
<reponame>uniphil/feedwerk import io from setuptools import find_packages, setup with io.open('README.md', 'rt', encoding='utf8') as f: readme = f.read() setup( name="feedwerk", version="1.0.0", url="https://github.com/uniphil/feedwerk", project_urls={ "Documentation": "https://github.com/uniphil/feedwerk", "Code": "https://github.com/uniphil/feedwerk", "Issue tracker": "https://github.com/uniphil/feedwerk/issues", }, license='BSD-3-Clause', author="phil", author_email="<EMAIL>", description="The atom feed generator from werkzeug.", long_description=readme, long_description_content_type="text/markdown", classifiers=[ "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", ], packages=find_packages(exclude=('tests*',)), include_package_data=True, install_requires=["werkzeug >= 1.0.0"], python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", extras_require={"dev": ["pytest"]}, )
StarcoderdataPython
1707692
<gh_stars>10-100 """ NXOS-specific utilities. """ import re from unicon.plugins.generic import GenericUtils from unicon.utils import AttributeDict class NxosUtils(GenericUtils): def get_redundancy_details(self, connection, timeout=None, who='my'): """ :arg connection: device connection object :return: device role and redundancy mode of the device """ timeout = timeout or connection.settings.EXEC_TIMEOUT redundancy_details = AttributeDict() show_red_out = connection.execute("show redundancy status", timeout=timeout) if who == "peer": block = 'Other supervisor' else: block = "This supervisor" output = self.output_block_extract(data=show_red_out, block=block) redundancy_details['role'] = "" output = output.split("\n") for line in output: if re.search("Redundancy state", line): redundancy_details['role'] =\ line[line.find(":") + 1:].strip().lower() if redundancy_details['role'] == "not present": redundancy_details['state'] = 'DISABLED' if re.search("Internal state", line): mode = line[line.find(":") + 1:].strip() if mode == "HA standby": redundancy_details['mode'] = 'sso' redundancy_details['state'] = 'STANDBY HOT' elif mode == "Active with HA standby": redundancy_details['mode'] = 'sso' redundancy_details['state'] = 'STANDBY HOT' elif mode == "Active with no standby": redundancy_details['mode'] = 'rpr' redundancy_details['state'] = 'STANDBY COLD' else: redundancy_details['mode'] = 'unknown' redundancy_details['state'] = 'unknown' return redundancy_details
StarcoderdataPython
1779723
<gh_stars>1-10 #!/usr/bin/env/python3 # Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. """Events API **Overview**: The event API is used to publish and process events. The module can be used to capture state transitions to debug or give insight into the execution flow. The :py:class:`torchelastic.events.Event` is an object containing information about an occurrence during the execution of the program. The destination handler for events can be configured by the event type parameter. .. note:: The event type ``torchelastic`` is reserved by torchelastic for platform level events that can be produced by the agent or worker. **Record Events**: The event module resembles python's logging framework in terms of usage and consists of two parts: handler configuration and event publishing. The example below shows the simple event publishing mechanism via :py:meth:`torchelastic.events.record_event` method. :: from torchelastic.events import Event, record_event event.configure(event.NullEventHandler()) # uses event_type = "torchelastic" event.configure(event.ConsoleEventHandler(), event_type = "foo") def execute(): event = Event(name="test_event", event_type="foo") # The code below will be processed by the ConsoleEventHandler record_event(event) Another way of using the module is via :py:meth:`torchelastic.events.record`. :: from torchelastic.events import record def execute(): metadata = {'key':'value'} record(event_name="test", event_type="console", metadata=metadata) **Writing a Custom Event Handler**: The custom event handler can be implemented by extending :py:class:`torchelastic.events.EventHandler` class. Example :: from torchelastic.events import EventHandler class StdoutEventHandler(EventHandler): def record(self, event): print(f"[{event.event_type}]: event_name: {event.name}") event.configure(StdoutEventHandler(), event_type="stdout_events") Now all events with event_type 'stdout_events' will be printed to stdout. """ from .api import ( # noqa F401 ConsoleEventHandler, Event, EventHandler, configure, record, record_event, )
StarcoderdataPython
3352467
<reponame>xksteven/ethics_amti<filename>examples/expert_val_justice/create_tasks.py # simple task generator double check the max Assignments import argparse import numpy as np import pandas as pd import json from sklearn.utils import shuffle parser = argparse.ArgumentParser(description='Create tasks to validate.') parser.add_argument('--hits', type=int, default=100, help='how many hits to submit') parser.add_argument('-s', '--skip', type=int, default=0, help='from where in the dataset to submit.') parser.add_argument('--num_samples', type=int, default=10, help='number of samples used per hit') args = parser.parse_args() df = pd.read_csv("data_to_curate/test_justice_arxiv.tsv", sep="\t", header=None) df = df[args.skip:args.skip + args.num_samples*args.hits] # df = df[args.skip:args.skip + args.hits] savefile = "data.jsonl" class mydict(dict): def __str__(self): return json.dumps(self) # df = shuffle(df) # to avoid pairs in the same batch list_of_dicts = [] # will save these dicts print(df) for hit_num in range(args.hits): start_idx = args.skip + args.num_samples*hit_num d = {} for j in range(args.num_samples): idx = start_idx + j sc = df.iloc[idx, 1] d["sent{}".format(j)] = sc d["good_{}".format(j)] = "good_{}".format(idx) d["bad_{}".format(j)] = "bad_{}".format(idx) d["low_quality_{}".format(j)] = "low_quality_{}".format(idx) list_of_dicts.append(d) with open("data.jsonl", "w") as f: for d in list_of_dicts: f.write("{}\n".format(mydict(d)))
StarcoderdataPython
124582
################################################################ # compareTools.py # # Defines how nodes and edges are compared. # Usable by other packages such as smallGraph # # Author: <NAME>, Oct. 2013 # Copyright (c) 2013-2014 <NAME> and <NAME> ################################################################ def generateListErr(ab,ba): listErr = [] if len(ab) == 0: ab = ['_'] if len(ba) == 0: ba = ['_'] for c1 in ab: for c2 in ba: listErr.append((c1,c2)) return listErr def defaultMetric(labelList1, labelList2): #new way but with 1 label per node diff = set(labelList1) ^ (set(labelList2)) # symetric diff if len(diff) == 0: return (0,[]) else: ab = diff&set(labelList1) ba = diff&set(labelList2) cost = max(len(ab),len(ba) ) return (cost,generateListErr(ab,ba)) synonym = {'X':'x','\\times':'x', 'P':'p', 'O':'o','C':'c', '\\prime':'COMMA'} def synonymMetric(labelList1, labelList2): def replace(x): if x in synonym.keys(): return synonym[x] else: return x a = map(replace, labelList1) b = map(replace, labelList2) diff = set(a) ^ (set(b)) # symetric diff if len(diff) == 0: return (0,[]) else: ab = diff&set(a) ba = diff&set(b) cost = max(len(ab),len(ba) ) return (cost,generateListErr(ab,ba)) ignoredLabelSet = set([]) selectedLabelSet = set([]) def filteredMetric(labelList1, labelList2): labelS1 = set(labelList1) - ignoredLabelSet # removing the ignored labels labelS2 = set(labelList2) - ignoredLabelSet # removing the ignored labels if len(selectedLabelSet) > 0: labelS1 &= selectedLabelSet # keep only the selected labels labelS2 &= selectedLabelSet # keep only the selected labels return defaultMetric(labelS1,labelS2) # no error if at least one symbol is OK def intersectMetric(labelList1, labelList2): #new way but with 1 label per node inter = set(labelList1) & (set(labelList2)) # symetric diff if len(inter) > 0: return (0,[]) else: ab = set(labelList1)-inter ba = set(labelList2)-inter return (1,generateListErr(ab,ba)) cmpNodes = defaultMetric cmpEdges = defaultMetric
StarcoderdataPython
1731261
from kokoropy import request, draw_matplotlib_figure, Autoroute_Controller, \ load_view class My_Controller(Autoroute_Controller): ''' Plotting example ''' def action_plot(self): max_range = 6.28 if 'range' in request.GET: max_range = float(request.GET['range']) # import things import numpy as np import matplotlib.pyplot as plt # determine x, sin(x) and cos(x) x = np.arange(0, max_range, 0.1) y1 = np.sin(x) y2 = np.cos(x) # make figure fig = plt.figure() fig.subplots_adjust(hspace = 0.5, wspace = 0.5) fig.suptitle('The legendary sine and cosine curves') # first subplot ax = fig.add_subplot(2, 1, 1) ax.plot(x, y1, 'b') ax.plot(x, y1, 'ro') ax.set_title ('y = sin(x)') ax.set_xlabel('x') ax.set_ylabel('y') # second subplot ax = fig.add_subplot(2, 1, 2) ax.plot(x, y2, 'b') ax.plot(x, y2, 'ro') ax.set_title ('y = cos(x)') ax.set_xlabel('x') ax.set_ylabel('y') # make canvas return draw_matplotlib_figure(fig) def action_index(self): return load_view('example','plotting')
StarcoderdataPython
3253164
import pandas as pd from sklearn.impute import KNNImputer path = "/Users/joshuaelms/Desktop/github_repos/CSCI-B365/Meteorology_Modeling_Project/data/pretty_data.csv" df = pd.read_csv(path) df_knn = KNNImputer().fit_transform(df) df_knn_actual = pd.DataFrame(df_knn) df_knn_actual.columns = df.columns write_path = "/Users/joshuaelms/Desktop/github_repos/CSCI-B365/Meteorology_Modeling_Project/data/pretty_data.csv" df_knn_actual.to_csv(write_path)
StarcoderdataPython
1711219
<reponame>ShacharWeis/Pi-Thermal-Printer-Camera import atexit import cPickle as pickle import errno import fnmatch import io import os import os.path import picamera import pygame import stat import threading import time import yuv2rgb from pygame.locals import * from subprocess import call import traceback import sys from PIL import Image as PILImage from PIL import ImageFilter from PIL import ImageOps from PIL import ImageDraw from PIL import ImageEnhance from Adafruit_Thermal import * # UI classes --------------------------------------------------------------- # Small resistive touchscreen is best suited to simple tap interactions. # Importing a big widget library seemed a bit overkill. Instead, a couple # of rudimentary classes are sufficient for the UI elements: # Icon is a very simple bitmap class, just associates a name and a pygame # image (PNG loaded from icons directory) for each. # There isn't a globally-declared fixed list of Icons. Instead, the list # is populated at runtime from the contents of the 'icons' directory. class Icon: def __init__(self, name): self.name = name try: self.bitmap = pygame.image.load(iconPath + '/' + name + '.png') except: pass # Button is a simple tappable screen region. Each has: # - bounding rect ((X,Y,W,H) in pixels) # - optional background color and/or Icon (or None), always centered # - optional foreground Icon, always centered # - optional single callback function # - optional single value passed to callback # Occasionally Buttons are used as a convenience for positioning Icons # but the taps are ignored. Stacking order is important; when Buttons # overlap, lowest/first Button in list takes precedence when processing # input, and highest/last Button is drawn atop prior Button(s). This is # used, for example, to center an Icon by creating a passive Button the # width of the full screen, but with other buttons left or right that # may take input precedence (e.g. the Effect labels & buttons). # After Icons are loaded at runtime, a pass is made through the global # buttons[] list to assign the Icon objects (from names) to each Button. class Button: def __init__(self, rect, **kwargs): self.rect = rect # Bounds self.color = None # Background fill color, if any self.iconBg = None # Background Icon (atop color fill) self.iconFg = None # Foreground Icon (atop background) self.bg = None # Background Icon name self.fg = None # Foreground Icon name self.callback = None # Callback function self.value = None # Value passed to callback for key, value in kwargs.iteritems(): if key == 'color': self.color = value elif key == 'bg' : self.bg = value elif key == 'fg' : self.fg = value elif key == 'cb' : self.callback = value elif key == 'value': self.value = value def selected(self, pos): x1 = self.rect[0] y1 = self.rect[1] x2 = x1 + self.rect[2] - 1 y2 = y1 + self.rect[3] - 1 if ((pos[0] >= x1) and (pos[0] <= x2) and (pos[1] >= y1) and (pos[1] <= y2)): if self.callback: if self.value is None: self.callback() else: self.callback(self.value) return True return False def draw(self, screen): if self.color: screen.fill(self.color, self.rect) if self.iconBg: screen.blit(self.iconBg.bitmap, (self.rect[0]+(self.rect[2]-self.iconBg.bitmap.get_width())/2, self.rect[1]+(self.rect[3]-self.iconBg.bitmap.get_height())/2)) if self.iconFg: screen.blit(self.iconFg.bitmap, (self.rect[0]+(self.rect[2]-self.iconFg.bitmap.get_width())/2, self.rect[1]+(self.rect[3]-self.iconFg.bitmap.get_height())/2)) def setBg(self, name): if name is None: self.iconBg = None else: for i in icons: if name == i.name: self.iconBg = i break # UI callbacks ------------------------------------------------------------- # These are defined before globals because they're referenced by items in # the global buttons[] list. def settingCallback(n): # Pass 1 (next setting) or -1 (prev setting) global screenMode screenMode += n if screenMode < 4: screenMode = len(buttons) - 1 elif screenMode >= len(buttons): screenMode = 4 def fxCallback(n): # Pass 1 (next effect) or -1 (prev effect) global fxMode setFxMode((fxMode + n) % len(fxData)) def quitCallback(): # Quit confirmation button saveSettings() raise SystemExit def viewCallback(n): # Viewfinder buttons global loadIdx, scaled, screenMode, screenModePrior, settingMode if n is 0: # Gear icon (settings) screenMode = settingMode # Switch to last settings mode elif n is 1: # Play icon (image playback) if scaled: # Last photo is already memory-resident loadIdx = saveIdx screenMode = 0 # Image playback screenModePrior = -1 # Force screen refresh else: # Load image r = imgRange(pathData) if r: showImage(r[1]) # Show last image in directory else: screenMode = 2 # No images else: # Rest of screen = shutter takePicture() def doneCallback(): # Exit settings global screenMode, settingMode if screenMode > 3: settingMode = screenMode saveSettings() screenMode = 3 # Switch back to viewfinder mode def imageCallback(n): # Pass 1 (next image), -1 (prev image) or 0 (delete) global screenMode if n is 0: screenMode = 1 # Delete confirmation else: showNextImage(n) def deleteCallback(n): # Delete confirmation global loadIdx, scaled, screenMode screenMode = 0 screenModePrior = -1 if n is True: os.remove(pathData + '/IMG_' + '%04d' % loadIdx + '.JPG') if(imgRange(pathData)): screen.fill(0) pygame.display.update() showNextImage(-1) else: # Last image deleteted; go to 'no images' mode screenMode = 2 scaled = None loadIdx = -1 # Global stuff ------------------------------------------------------------- screenMode = 3 # Current screen mode; default = viewfinder screenModePrior = -1 # Prior screen mode (for detecting changes) settingMode = 4 # Last-used settings mode (default = storage) fxMode = 0 # Image effect; default = Normal iconPath = '/home/pi/PiCam/icons' # Subdirectory containing UI bitmaps (PNG format) saveIdx = -1 # Image index for saving (-1 = none set yet) loadIdx = -1 # Image index for loading scaled = None # pygame Surface w/last-loaded image sizeData = [(1296, 972), (320, 240), (0.0, 0.0, 1.0, 1.0)] # A fixed list of image effects is used (rather than polling # camera.IMAGE_EFFECTS) because the latter contains a few elements # that aren't valid (at least in video_port mode) -- e.g. blackboard, # whiteboard, posterize (but posterise, British spelling, is OK). # Others have no visible effect (or might require setting add'l # camera parameters for which there's no GUI yet) -- e.g. saturation, # colorbalance, colorpoint. fxData = [ 'none', 'sketch', 'gpen', 'pastel', 'watercolor', 'oilpaint', 'hatch', 'negative', 'colorswap', 'posterise', 'denoise', 'blur', 'film', 'washedout', 'emboss', 'cartoon', 'solarize' ] pathData = '/home/pi/Photos' icons = [] # This list gets populated at startup # buttons[] is a list of lists; each top-level list element corresponds # to one screen mode (e.g. viewfinder, image playback, storage settings), # and each element within those lists corresponds to one UI button. # There's a little bit of repetition (e.g. prev/next buttons are # declared for each settings screen, rather than a single reusable # set); trying to reuse those few elements just made for an ugly # tangle of code elsewhere. buttons = [ # Screen mode 0 is photo playback [Button(( 0,188,320, 52), bg='done' , cb=doneCallback), Button(( 0, 0, 80, 52), bg='prev' , cb=imageCallback, value=-1), Button((240, 0, 80, 52), bg='next' , cb=imageCallback, value= 1), Button(( 88, 70,157,102)), # 'Working' label (when enabled) Button((148,129, 22, 22)), # Spinner (when enabled) Button((121, 0, 78, 52), bg='trash', cb=imageCallback, value= 0)], # Screen mode 1 is delete confirmation [Button(( 0,35,320, 33), bg='delete'), Button(( 32,86,120,100), bg='yn', fg='yes', cb=deleteCallback, value=True), Button((168,86,120,100), bg='yn', fg='no', cb=deleteCallback, value=False)], # Screen mode 2 is 'No Images' [Button((0, 0,320,240), cb=doneCallback), # Full screen = button Button((0,188,320, 52), bg='done'), # Fake 'Done' button Button((0, 53,320, 80), bg='empty')], # 'Empty' message # Screen mode 3 is viewfinder / snapshot [Button(( 0,188,156, 52), bg='gear', cb=viewCallback, value=0), Button(( 0, 0,320,240) , cb=viewCallback, value=2), Button(( 88, 51,157,102)), # 'Working' label (when enabled) Button((148, 110,22, 22))], # Spinner (when enabled) # Remaining screens are settings modes # Screen mode 4 is graphic effect [Button(( 0,188,320, 52), bg='done', cb=doneCallback), Button(( 0, 0, 80, 52), bg='prev', cb=settingCallback, value=-1), Button((240, 0, 80, 52), bg='next', cb=settingCallback, value= 1), Button(( 0, 70, 80, 52), bg='prev', cb=fxCallback , value=-1), Button((240, 70, 80, 52), bg='next', cb=fxCallback , value= 1), Button(( 0, 67,320, 91), bg='fx-none'), Button(( 0, 11,320, 29), bg='fx')], # Screen mode 5 is quit confirmation [Button(( 0,188,320, 52), bg='done' , cb=doneCallback), Button(( 0, 0, 80, 52), bg='prev' , cb=settingCallback, value=-1), Button((240, 0, 80, 52), bg='next' , cb=settingCallback, value= 1), Button((110, 60,100,120), bg='quit-ok', cb=quitCallback), Button(( 0, 10,320, 35), bg='quit')] ] # Assorted utility functions ----------------------------------------------- def setFxMode(n): global fxMode fxMode = n camera.image_effect = fxData[fxMode] buttons[4][5].setBg('fx-' + fxData[fxMode]) def saveSettings(): try: outfile = open('cam.pkl', 'wb') # Use a dictionary (rather than pickling 'raw' values) so # the number & order of things can change without breaking. d = { 'fx' : fxMode} pickle.dump(d, outfile) outfile.close() except: pass def loadSettings(): try: infile = open('cam.pkl', 'rb') d = pickle.load(infile) infile.close() if 'fx' in d: setFxMode( d['fx']) except: pass # Scan files in a directory, locating JPEGs with names matching the # software's convention (IMG_XXXX.JPG), returning a tuple with the # lowest and highest indices (or None if no matching files). def imgRange(path): min = 9999 max = 0 try: for file in os.listdir(path): if fnmatch.fnmatch(file, 'IMG_[0-9][0-9][0-9][0-9].JPG'): i = int(file[4:8]) if(i < min): min = i if(i > max): max = i finally: return None if min > max else (min, max) # Busy indicator. To use, run in separate thread, set global 'busy' # to False when done. def spinner(): global busy, screenMode, screenModePrior buttons[screenMode][2].setBg('working') buttons[screenMode][2].draw(screen) pygame.display.update() busy = True n = 0 while busy is True: buttons[screenMode][3].setBg('work-' + str(n)) buttons[screenMode][3].draw(screen) pygame.display.update() n = (n + 1) % 5 time.sleep(0.15) buttons[screenMode][2].setBg(None) buttons[screenMode][3].setBg(None) screenModePrior = -1 # Force refresh def dodge(a, b, alpha): return min(int(a*255/(256-b*alpha)), 255) def drawing(im1, blur=25, alpha=1.0): im3 = im1.convert("L") im2 = im3.copy() im2 = ImageOps.invert(im2) for i in range(blur): im2 = im2.filter(ImageFilter.BLUR) width, height = im1.size for x in range(width): for y in range(height): a = im3.getpixel((x, y)) b = im2.getpixel((x, y)) im3.putpixel((x, y), dodge(a, b, alpha)) return im3 def takePicture(): global busy, gid, loadIdx, saveIdx, scaled, uid, printer if not os.path.isdir(pathData): try: os.makedirs(pathData) # Set new directory ownership to pi user, mode to 755 os.chown(pathData, uid, gid) os.chmod(pathData, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) except OSError as e: # errno = 2 if can't create folder #print errno.errorcode[e.errno] return saveIdx = 1 # Scan for next available image slot while True: filename = pathData + '/IMG_' + '%04d' % saveIdx + '.JPG' if not os.path.isfile(filename): break saveIdx += 1 if saveIdx > 9999: saveIdx = 0 t = threading.Thread(target=spinner) scaled = None camera.resolution = sizeData[0] camera.crop = sizeData[2] try: camera.capture(filename, use_video_port=False, format='jpeg', thumbnail=None) t.start() # Set image file ownership to pi user, mode to 644 os.chmod(filename, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH) img = pygame.image.load(filename) pilImg = PILImage.open(filename) wpercent = (700.0/float(pilImg.size[0])) hsize = int((float(pilImg.size[1])*float(wpercent))) pilImg = pilImg.resize((700,hsize), PILImage.ANTIALIAS).crop((0, 0, 700, 384)).transpose(PILImage.ROTATE_90).convert("L") #pilImg.save(filename, "JPEG") sharpness = ImageEnhance.Sharpness(pilImg) pilImg = sharpness.enhance(3) brightness = ImageEnhance.Brightness(pilImg) pilImg = brightness.enhance(1.2) contrast = ImageEnhance.Contrast(pilImg) pilImg = contrast.enhance(2) pilImg.save(filename, "JPEG") scaled = pygame.transform.scale(img, sizeData[1]) except Exception, e: traceback.print_exc(file=sys.stdout) finally: # Add error handling/indicator (disk full, etc.) camera.resolution = sizeData[1] camera.crop = (0.0, 0.0, 1.0, 1.0) busy = False t.join() try: if scaled: if scaled.get_height() < 240: # Letterbox screen.fill(0) screen.blit(scaled, ((320 - scaled.get_width() ) / 2, (240 - scaled.get_height()) / 2)) pygame.display.update() printer.printImage(pilImg,True) time.sleep(0.3) printer.feed(2) time.sleep(2.5) loadIdx = saveIdx except Exception, e: traceback.print_exc(file=sys.stdout) def showNextImage(direction): global busy, loadIdx t = threading.Thread(target=spinner) t.start() n = loadIdx while True: n += direction if(n > 9999): n = 0 elif(n < 0): n = 9999 if os.path.exists(pathData+'/IMG_'+'%04d'%n+'.JPG'): showImage(n) break busy = False t.join() def showImage(n): global busy, loadIdx, scaled, screenMode, screenModePrior, sizeMode t = threading.Thread(target=spinner) t.start() img = pygame.image.load(pathData + '/IMG_' + '%04d' % n + '.JPG') scaled = pygame.transform.scale(img, sizeData[1]) loadIdx = n busy = False t.join() screenMode = 0 # Photo playback screenModePrior = -1 # Force screen refresh # Initialization ----------------------------------------------------------- printer = Adafruit_Thermal("/dev/ttyAMA0", 19200, timeout=5) # Init framebuffer/touchscreen environment variables os.putenv('SDL_VIDEODRIVER', 'fbcon') os.putenv('SDL_FBDEV' , '/dev/fb1') os.putenv('SDL_MOUSEDRV' , 'TSLIB') os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen') # Get user & group IDs for file & folder creation # (Want these to be 'pi' or other user, not root) s = os.getenv("SUDO_UID") uid = int(s) if s else os.getuid() s = os.getenv("SUDO_GID") gid = int(s) if s else os.getgid() # Buffers for viewfinder data rgb = bytearray(320 * 240 * 3) yuv = bytearray(320 * 240 * 3 / 2) # Init pygame and screen pygame.init() pygame.mouse.set_visible(False) screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN) # Init camera and set up default values camera = picamera.PiCamera() atexit.register(camera.close) camera.resolution = sizeData[1] #camera.crop = sizeData[2] camera.crop = (0.0, 0.0, 1.0, 1.0) # Leave raw format at default YUV, don't touch, don't set to RGB! # Load all icons at startup. for file in os.listdir(iconPath): if fnmatch.fnmatch(file, '*.png'): icons.append(Icon(file.split('.')[0])) # Assign Icons to Buttons, now that they're loaded for s in buttons: # For each screenful of buttons... for b in s: # For each button on screen... for i in icons: # For each icon... if b.bg == i.name: # Compare names; match? b.iconBg = i # Assign Icon to Button b.bg = None # Name no longer used; allow garbage collection if b.fg == i.name: b.iconFg = i b.fg = None loadSettings() # Must come last; fiddles with Button/Icon states # Main loop ---------------------------------------------------------------- while(True): # Process touchscreen input while True: for event in pygame.event.get(): if(event.type is MOUSEBUTTONDOWN): pos = pygame.mouse.get_pos() for b in buttons[screenMode]: if b.selected(pos): break # If in viewfinder or settings modes, stop processing touchscreen # and refresh the display to show the live preview. In other modes # (image playback, etc.), stop and refresh the screen only when # screenMode changes. if screenMode >= 3 or screenMode != screenModePrior: break # Refresh display if screenMode >= 3: # Viewfinder or settings modes stream = io.BytesIO() # Capture into in-memory stream camera.capture(stream, use_video_port=True, format='raw') stream.seek(0) stream.readinto(yuv) # stream -> YUV buffer stream.close() yuv2rgb.convert(yuv, rgb, sizeData[1][0], sizeData[1][1]) img = pygame.image.frombuffer(rgb[0:(sizeData[1][0] * sizeData[1][1] * 3)], sizeData[1], 'RGB') elif screenMode < 2: # Playback mode or delete confirmation img = scaled # Show last-loaded image else: # 'No Photos' mode img = None # You get nothing, good day sir if img is None or img.get_height() < 240: # Letterbox, clear background screen.fill(0) if img: screen.blit(img, ((320 - img.get_width() ) / 2, (240 - img.get_height()) / 2)) pygame.draw.rect(screen,(0,0,20), (0,176,320,64),0) # Overlay buttons on display and update for i,b in enumerate(buttons[screenMode]): b.draw(screen) pygame.display.update() screenModePrior = screenMode
StarcoderdataPython
1642830
#!/tools/lm-venv/py3.6-tf-1.3.0-svail/bin/python import click import math import os import sys import config import shutil import itertools import numpy as np from parallelism import Parallelism from topology import Topology from simulate import Graph import util from hw_component import Core, MemoryHierarchy, Network from model import Model algByte=False #algorithmic ops false proj=False #consider projection layer, turn off for end-2-end validation, as baeline model does not have projection layer validating_v100=True class TimeCalculation: def __init__(self, exp_config): #Model Parameters self.model = Model(exp_config) self.B = self.model.batch_size self.V = self.model.vocab_size self.L = self.model.num_layers self.D = self.model.hidden_dim self.projection = self.model.projection self.S = self.model.seq_len self.G = self.model.num_gates self.NL = self.model.num_non_linear self.A = self.model.num_add self.P = self.model.num_pointwise #Software Parameters self.O = exp_config.sw_config.kernel_launch_overhead self.precision = exp_config.sw_config.precision self.attached = True #Hardware Parameters self.core = Core(exp_config) self.th = self.core.getThroughput() self.FMA_width = self.core.FMA_width self.dataflow = self.core.dataflow self.memoryHierarchy = MemoryHierarchy(exp_config) self.num_levels = self.memoryHierarchy.num_levels self.memLayer = self.memoryHierarchy.memLayer self.tileSpace = self.generateTileSpace() #TODO: move this to config file self.H2Dbw = 12.4*1024*1024*1024 #System Parameters self.num_wafer = exp_config.system_config.num_wafers self.num_workers = exp_config.system_config.num_workers self.network = Network(exp_config) intra_throughput, inter_throughput = self.network.calcThroughput() intra_latency, inter_latency = self.network.calcLatency() inter_derate = exp_config.system_config.inter_derate intra_derate = exp_config.system_config.intra_derate par2cross = exp_config.system_config.par2cross derated_inter_throughput = -1 derated_intra_throughput = -1 #cross-wafercommunications will pass through intra links too if self.num_wafer > 1 and self.num_workers > 1: if intra_derate != 0: derated_inter_throughput = min(intra_throughput/intra_derate, inter_throughput/inter_derate) else: derated_inter_throughput = inter_throughput/inter_derate else: derated_inter_throughput = 0 if self.num_workers > 1 and intra_derate != 0: derated_intra_throughput = intra_throughput/intra_derate else: derated_intra_throughput = 0 self.IBK1, self.LLK1 = ((derated_inter_throughput, inter_latency) if par2cross["kp1"] else (derated_intra_throughput, intra_latency)) self.IBK2, self.LLK2 = ((derated_inter_throughput, inter_latency) if par2cross["kp2"] else (derated_intra_throughput, intra_latency)) self.IBD, self.LLD = ((derated_inter_throughput, inter_latency) if par2cross["dp"] else (derated_intra_throughput, intra_latency)) self.IBL, self.LLL = ((derated_inter_throughput, inter_latency) if par2cross["lp"] else (derated_intra_throughput, intra_latency)) #Scheduling Parameters par = Parallelism(exp_config) par.findParallelStrategy() self.autoPar = par.autoPar self.lp = par.lp self.kp_hidden_dim1 = par.kp_hidden_dim1 self.kp_softmax_dim1 = par.kp_softmax_dim1 self.kp_embedding_dim1 = par.kp_embedding_dim1 self.kp_projection_dim1 = par.kp_projection_dim1 self.kp_hidden_dim2 = par.kp_hidden_dim2 self.kp_softmax_dim2 = par.kp_softmax_dim2 self.kp_embedding_dim2 = par.kp_embedding_dim2 self.kp_projection_dim2 = par.kp_projection_dim2 self.dp = par.dp self.kp_hidden_type = par.kp_hidden_type #1: CR, 2: RC self.kp_softmax_type = par.kp_softmax_type #1: CR, 2: RC self.kp_embedding_type = par.kp_embedding_type #1: CR, 2: RC self.kp_projection_type = par.kp_projection_type #1: CR, 2: RC #Define miniBatch size self.miniB = math.ceil(self.B / self.dp) #Statistics Param self.tot_flop = 0 self.tot_mem = 0 self.tot_time = 0 self.debug = False self.validating_GEMM = False def updateParams(self, debug, m, n, k, t, kp1, kp2, dp, lp, gemm, batch_size, hidden_dim, seq_len, vocab_size, num_layer): self.B = batch_size self.D = hidden_dim self.S = seq_len self.V = vocab_size self.L = num_layer #Define miniBatch size self.dp = dp if dp != None else self.dp self.miniB = math.ceil(self.B / self.dp) self.debug = debug self.validating_GEMM = gemm self.lp = lp if lp != None else self.lp self.kp_hidden_dim1 = kp1 if kp1 != None else self.kp_hidden_dim1 self.kp_hidden_dim1 = kp1 if kp1 != None else self.kp_hidden_dim1 self.kp_hidden_dim2 = kp2 if kp2 != None else self.kp_hidden_dim2 self.kp_hidden_type = (2 if t == 'RC' else (1 if t == 'CR' else self.kp_hidden_type)) #TODO: decide if we want kp1, kp2 to control other layers besides hidden layer self.kp_softmax_dim1 = kp1 if kp1 != None else self.kp_softmax_dim1 self.kp_softmax_dim2 = kp2 if kp2 != None else self.kp_softmax_dim2 self.kp_softmax_type = (2 if t == 'RC' else (1 if t == 'CR' else self.kp_softmax_type)) self.kp_embedding_dim1 = kp1 if kp1 != None else self.kp_embedding_dim1 self.kp_embedding_dim2 = kp2 if kp2 != None else self.kp_embedding_dim2 self.kp_embedding_type = (2 if t == 'RC' else (1 if t == 'CR' else self.kp_embedding_type)) self.kp_projection_dim1 = kp1 if kp1 != None else self.kp_projection_dim1 self.kp_projection_dim2 = kp2 if kp2 != None else self.kp_projection_dim2 self.kp_projection_type = (2 if t == 'RC' else (1 if t == 'CR' else self.kp_projection_type)) #TODO: need to change all equations to be a function of m,n and k #self.D = n//4 print("kp1: {}".format(self.kp_hidden_dim1)) print("kp2: {}".format(self.kp_hidden_dim2)) #TODO: It is a hacky way of capturing assymetry across links within V100 #move this to network topology and distinguish between inter and intra network if validating_v100: self.IBK1 = util.scale_down(self.IBK1, self.kp_hidden_dim1, "kp1") self.IBK2 = util.scale_down(self.IBK2, self.kp_hidden_dim2, "kp2") self.IBD = util.scale_down(self.IBD, self.dp, "dp") self.IBL = util.scale_down(self.IBL, self.lp, "lp") #Number of parameters def tot_param(self): embedding = self.V * self.D hidden = (2 * self.D + 1) * (self.G * self.D) * self.L projection = self.D * self.projection softmax = ((self.projection if proj else self.D) + 1) * self.V tot_param = embedding + hidden + projection + softmax return tot_param def printSysConfig(self, exp_config, output_file): kiloByte = 1024 megaByte = kiloByte * 1024 gigaByte = megaByte * 1024 teraByte = gigaByte * 1024 with open(output_file, "w") as f: f.write("==========================\n") f.write("Hardware Configuration\n") f.write("==========================\n") f.write("Throughput: {:.5f} Tflops\n".format(self.core.operating_throughput/1e12)) for i in range(self.num_levels-1, -1, -1): mem_bw = self.memLayer[i].dynamic_throughput mem_size = self.memLayer[i].size if mem_bw < 1e3 * gigaByte: f.write("L{:} Bandwidth: {:.1f} GB/s\n".format(i, mem_bw/(gigaByte))) else: f.write("L{:} Bandwidth: {:.1f} TB/s\n".format(i, mem_bw/(teraByte))) if mem_size < 1e3 * megaByte: f.write("L{:} Size: {:.10f} MB\n".format(i, mem_size/(megaByte))) elif mem_size < 1e3 * gigaByte: f.write("L{:} Size: {:.1f} GB\n".format(i, mem_size/(gigaByte))) else: f.write("L{:} Size: {:.1f} TB\n".format(i, mem_size/(teraByte))) f.write("Intra-node Bandwidth: {:.1f} GB/s\n".format(self.network.intra_network.throughput/(gigaByte))) f.write("Inter-node Bandwidth: {:.1f} GB/s\n".format(self.network.inter_network.throughput/(gigaByte))) M = self.memLayer[self.num_levels - 1].size tot_mem, embedding_mem, hidden_mem, softmax_mem, projection_mem, wt_mem, act_mem, point_mem = util.getTotMemReq(exp_config, batch_size = self.B, hidden_dim = self.D, vocab_size = self.V, seq_len = self.S, num_layer = self.L, dp = self.dp, lp = self.lp, kp1 = self.kp_hidden_dim1, kp2 = self.kp_hidden_dim2, kp_type = self.kp_hidden_type) f.write("\n\n===========================================\n") f.write("Memory Requirement Breakdown per Data Shard\n") f.write("===========================================\n") f.write("Total Memory: {:.1f} GB\n" "Embedding Memory: {:.1f} GB\n" "Hidden Memory: {:.1f} GB\n" "Softmax Memory: {:.1f} GB\n" "Projection Memory: {:.1f} GB\n" .format(tot_mem/gigaByte, embedding_mem/gigaByte, hidden_mem/gigaByte, softmax_mem/gigaByte, projection_mem/gigaByte)) f.write("\nTotal Memory: {:.1f} GB\n" "Weight Memory: {:.1f} GB\n" "Activation Memory: {:.1f} GB\n" "Pointwise Memory: {:.1f} GB\n" .format(tot_mem/gigaByte, wt_mem/gigaByte, act_mem/gigaByte, point_mem/gigaByte)) f.write("\nMemory Overflow Rate (Total Memory Required per Data Shard / Memory capacity per node): {:.1f}\n".format(float("inf") if M==0 else tot_mem/M)) tot_mem, embedding_mem, hidden_mem, softmax_mem, projection_mem, wt_mem, act_mem, point_mem = util.getMemUsagePerCore(exp_config, batch_size = self.B, hidden_dim = self.D, vocab_size = self.V, seq_len = self.S, num_layer = self.L, dp = self.dp, lp = self.lp, kp1 = self.kp_hidden_dim1, kp2 = self.kp_hidden_dim2, kp_type = self.kp_hidden_type) f.write("\n\n===========================================================\n") f.write("Memory Requirement Breakdown per Data Shard Per Model Shard\n") f.write("===========================================================\n") f.write("Total Memory: {:.1f} GB\n" "Embedding Memory: {:.1f} GB\n" "Hidden Memory: {:.1f} GB\n" "Softmax Memory: {:.1f} GB\n" "Projection Memory: {:.1f} GB" .format(tot_mem/gigaByte, embedding_mem/gigaByte, hidden_mem/gigaByte, softmax_mem/gigaByte, projection_mem/gigaByte)) f.write("\nMemory Overflow Rate (Total Memory Required per Data Shard Per Model Shard/ Memory capacity per node): {:.5f}\n" .format(float("inf") if M == 0 else tot_mem/M)) f.write("\nTotal Memory: {:.1f} GB\n" "Weight Memory: {:.1f} GB\n" "Activation Memory: {:.1f} GB\n" "Pointwise Memory: {:.1f} GB\n" .format(tot_mem/gigaByte, wt_mem/gigaByte, act_mem/gigaByte, point_mem/gigaByte)) f.write("\n\n====================\n") f.write("Parallelism Strategy\n") f.write("====================\n") f.write("dp: {}, lp: {}, kp_hidden_dim1: {}, kp_hidden_dim2: {}," "kp_softmax_dim1: {}, kp_softmax_dim2: {}, kp_embedding1: {}, kp_embedding2: {}," "kp_projection_dim1: {}, kp_proejction_dim2: {}\n" .format(self.dp, self.lp, self.kp_hidden_dim1, self.kp_hidden_dim2, self.kp_softmax_dim1, self.kp_softmax_dim2, self.kp_embedding_dim1, self.kp_embedding_dim2, self.kp_projection_dim1, self.kp_projection_dim2)) f.write("\n\n==============================================================================\n") f.write("Hardware Component Stats\n") f.write("==============================================================================\n") self.core.printStats(f) for i in range(0, self.num_levels): self.memLayer[i].printStats(f) self.network.printStats(f) def roofline(self, flop, mem_access_, name=''): #print("Roofline: entered {}".format(name)) mem_access = [] if isinstance(mem_access_, int): mem_access.append(mem_access_) elif isinstance(mem_access_, float): mem_access.append(int(mem_access_)) elif isinstance(mem_access_, list): mem_access = mem_access_ else: print(mem_access_) print("mem_access_ should be inetger or list, wrong input", flush=True) sys.exit(0) num_level = len(mem_access) time = [0] * num_level comp_int = [0] * num_level inflection_point = [0] * num_level try: assert(mem_access[num_level - 1] > 0) , "last_level_mem = 0" except Exception as e: print("{}: Number of accesses to the last level of memory hierarchy cannot be zero:\n {}".format(name, e), flush=True) sys.exit(0) for i in range(0, num_level): time[i] = 0 mem_bw = self.memLayer[i].getThroughput() mem_latency = self.memLayer[i].getLatency() num_mem = mem_access[i] inflection_point[i] = float("inf") if mem_bw == 0 else self.th / mem_bw comp_int[i] = 0 if num_mem == 0 else flop / num_mem if comp_int[i] < inflection_point[i]: #mem-bound time[i] = (float("inf") if (mem_bw == 0 or num_mem == 0) else (num_mem / mem_bw)) + mem_latency else: #compute-bound time[i] = float("inf") if (self.th == 0) else (flop / self.th) max_time = max(time) if self.debug: print('{}: {}'.format(name, max_time)) print('GEMM flops: {:,}'.format(flop)) for i in range(0, num_level): print("L{}".format(i)) print("inflection_point: {:.2f}".format(inflection_point[i])) print("comp_int: {:.2f}".format(comp_int[i])) print("time: {}".format(time[i])) print() #print("Roofline: exited {}".format(name)) return max_time #Convert GEMM into sqaure tiles # def getGEMMTime(self, A_, B_, C_, name): # # #A = util.power2RoundUp(A_) # #B = util.power2RoundUp(B_) # #C = util.power2RoundUp(C_) # A = A_ # B = B_ # C = C_ # #return False, self.GEMM_wrapper(A, B, C, name) # dim = min(min(A, B), C) # Af = math.ceil(A / dim) # Bf = math.ceil(B / dim) # Cf = math.ceil(C / dim) # time = (Af * Bf * Cf) * self.GEMM_Strassen(dim, name) + (Af * Cf * (Bf-1)) * self.getAddTime(dim, dim, name) # return False, time # def GEMM_Strassen(self, dim, name): # if dim <= 512: # time = self.GEMM_wrapper(dim, dim, dim, name) # return time # else: # time = 7 * self.GEMM_Strassen(dim // 2, name) #+ 18 * self.getAddTime(dim // 2, dim // 2, name) # return time # # def getAddTime(self, A, B, name): # ADD_flop = A * B # ADD_gmem = 3 * A * B * self.precision # ADD_time = self.roofline(ADD_flop, ADD_gmem, name='FMA addition') + self.O # return ADD_time def getGEMMTime(self, dim1, dim2, dim3, name): tile2time = {} orderSpace = self.generateOrder(dim1, dim2, dim3, name) for order_dims in orderSpace: if self.debug: print("===============================================================") print("order: {}".format(order_dims)) print("===============================================================") for tile_dims in self.tileSpace: if self.debug: print("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++") print("tile: {}".format(tile_dims)) print("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++") GEMM_flop, mem_access = self.GEMM(order_dims, tile_dims, name) GEMM_time = self.roofline(GEMM_flop,mem_access, name) + self.O tile2time[(order_dims, tile_dims)] = GEMM_time best_tile = min(tile2time, key=tile2time.get) best_time = tile2time[best_tile] if self.debug: print("{}: Best Time: {:,}, Best Order: {}, Best Tile: {}\n".format(name, best_time, best_tile[0], best_tile[1])) return best_time, best_tile[0], best_tile[1] def generateOrder(self, dim1, dim2, dim3, name): if self.dataflow =="best": # best stationary if dim1 >= max(dim2, dim3): self.dataflow = "wst" elif dim2 >= max(dim1, dim3): self.dataflow = "ost" elif dim3 >= max(dim1, dim2): self.dataflow = "ast" order=[] if self.dataflow == "wst": #weight stationary order.append((dim2, dim3, dim1)) if dim2 != dim3: order.append((dim3, dim2, dim1)) elif self.dataflow == "ast": #activation stationary order.append((dim1, dim2, dim3)) if dim2 != dim1: order.append((dim2, dim1, dim3)) elif self.dataflow == "ost": #output stationary order.append((dim1, dim3, dim2)) if dim1 != dim3: order.append((dim3, dim1, dim2)) elif self.dataflow == "none": # not stationary if dim1 != dim2 and dim2 != dim3 and dim1 != dim3: order=list(itertools.permutations([dim1, dim2, dim3])) elif dim1 == dim2 and dim2 != dim3: order = [(dim1, dim2, dim3), (dim1, dim3, dim2), (dim3, dim1, dim2)] elif dim1 == dim3 and dim2 != dim1: order = [(dim1, dim2, dim3), (dim1, dim3, dim2), (dim2, dim1, dim3)] elif dim2 == dim3 and dim1 != dim2: order = [(dim1, dim2, dim3), (dim2, dim1, dim3), (dim2, dim3, dim1)] return order def generateTileSpace(self): tile_space = [] tiles = [None] * self.num_levels for level in range(0, self.num_levels-1): memory = self.memLayer[level] #tiles[level] = self.getTileDims(memory) tiles[level] = memory.getTileDims() if self.num_levels == 1: tile_space = [] elif self.num_levels == 2: tile_space = tiles[0] elif self.num_levels == 3: tile_space = [(x,y) for x in tiles[0] for y in tiles[1]] elif self.num_levels == 4: tile_space = [(x,y,z) for x in tiles[0] for y in tiles[1] for z in tiles[2]] else: raise NotImplementedError() return tile_space def getTileSize(self, lid): memory = self.memLayer[lid] memory.calcTileDim() tile_dim = memory.getTileDim() return tile_dim, tile_dim, tile_dim #Count the number of accesses from level-1 to level # input matrix A(dim1, dim2) and B(dim2, dim3) # output matrix C(dim1, dim3) def getNumAccesses(self, level, dim1, dim2, dim3, tile_dim, num_repeat, name): #tile1,tile2,tile3 = self.getTileSize(level-1) tile1, tile2, tile3 = tile_dim orig_size = tile1*tile2 + tile1*tile3 + tile2*tile3 short_tile_cond = [0,0,0] if tile1 > dim1: tile1 = dim1 short_tile_cond[0] = 1 if tile2 > dim2: tile2 = dim2 short_tile_cond[1] = 1 if tile3 > dim3: tile3 = dim3 short_tile_cond[2] = 1 if short_tile_cond[2] == 0 and (short_tile_cond[0] | short_tile_cond[1]) == 1: if level <= 1: tile3 = math.floor((orig_size - tile1 * tile2) / (tile1 + tile2)) else: #store bypasses cache, directly goes to memory tile3 = math.floor((orig_size - tile1 * tile2) / tile2) if tile3 > dim3: tile3 = dim3 #Uncomment if tile3 needs to be pow of 2 #tile3 = int(math.pow(2, math.floor(math.log2(tile3)))) elif short_tile_cond[0] == 0 and (short_tile_cond[1] | short_tile_cond[2]) == 1: if level <= 1: tile1 = math.floor((orig_size - tile3 * tile2) / (tile3 + tile2)) else: #store bypasses cache, directly goes to memory tile1 = math.floor((orig_size - tile3 * tile2) / tile2) if tile1 > dim1: tile1 = dim1 elif short_tile_cond[1] == 0 and (short_tile_cond[0] & short_tile_cond[2]) == 1: if level <= 1: tile2 = math.floor((orig_size - tile3 * tile1) / (tile3 + tile1)) else: tile2 = math.floor((orig_size) / (tile1 + tile3)) if tile2 > dim2: tile2 = dim2 reload_A = 1 reload_B = 1 reload_C = 1 if tile1 > 0 and tile2 > 0 and tile3 > 0: reload_A = math.ceil(dim3 / tile3) reload_B = math.ceil(dim1 / tile1) #do not access the slow memory on every write,acculmuate in fast memory reload_C = (1 if level > 1 else math.ceil(dim2 / tile2)) num_mem = num_repeat * (dim1 * dim2 * reload_A + dim2 * dim3 * reload_B + dim1 * dim3 * reload_C) * self.precision if self.debug: print(name) print("Matrix dimension at Level {}: {:,} x {:,} x {:,}".format(level, dim1, dim2, dim3)) print("Tile dimension at Level {}: {:,} x {:,} x {:,}".format(level-1, tile1, tile2, tile3)) print("reload_A: {}, reload_B: {}, reload_C: {}".format(reload_A, reload_B, reload_C)) print("num_repeat: {}".format(num_repeat)) print("Bytes Accessed: {:,}".format(num_mem)) print("") return num_mem, tile1, tile2, tile3 #This is the main function that captures the memory hierarchy impact #on the number of accesses to global memory considering not everything fits in #L2 cache and also captures the effect of shared memory def GEMM(self, order_dims, tile_dims, name): dim1_ = order_dims[0] dim2_ = order_dims[1] dim3_ = order_dims[2] #dim1 = util.power2RoundUp(dim1_) #dim2 = util.power2RoundUp(dim2_) #dim3 = util.power2RoundUp(dim3_) dim1 = dim1_ dim2 = dim2_ dim3 = dim3_ GEMM_flop = dim1 * dim3 * (dim2 + dim2 - 1) #dim2 multiply #dim2-1 add #X1 = self.L2_tile_dim #X2 = self.shared_mem_tile_dim #X3 = self.reg_tile_dim num_accesses = [0] * self.num_levels if (algByte): num_accesses[self.num_levels - 1] = (dim1 * dim2 + dim2 * dim3 + dim1 * dim3) * self.precision else: num_repeat = 1 for level in range(self.num_levels - 1, 0, -1): num_accesses[level], tile1, tile2, tile3 = self.getNumAccesses(level, dim1, dim2, dim3, tile_dims[level-1], num_repeat, name) try: num_repeat *= math.ceil(dim1/tile1) * math.ceil(dim2/tile2) * math.ceil(dim3/tile3) except: num_repeat *= 1 dim1 = tile1 if tile1 != 0 else dim1 dim2 = tile2 if tile2 != 0 else dim2 dim3 = tile3 if tile3 != 0 else dim3 #Number of accesses to level0 (for every 2N^3 computation, 3N^2 memory accesses happen, where N is the width of the systolic engine) reuse = 1 dim1 = dim1_ dim2 = dim2_ dim3 = dim3_ if self.dataflow == "none": reuse = 1 elif self.dataflow == "best": reuse = max(math.ceil(dim1/self.FMA_width), math.ceil(dim3/self.FMA_width), math.ceil(dim2/self.FMA_width)) elif self.dataflow == "wst": #wt stationary reuse = math.ceil(dim1/self.FMA_width) elif self.dataflow == "ast": #act statinary reuse = math.ceil(dim3/self.FMA_width) elif self.dataflow == "ost": #output stationary reuse = math.ceil(dim2/self.FMA_width) else: raise NotImplementedError() #TODO: make sure to model underutilized systolic array #TODO: support FMA_width_x and FMA_width_y num_accesses[0] = GEMM_flop * ((2 * reuse + 1) / (2 * reuse)) * 1/self.FMA_width * self.precision #num_accesses[0] = GEMM_flop * ((2 * reuse + self.FMA_width) / (2 * reuse)) * 1/self.FMA_width * self.precision #TODO: do we still need these in new hierarchical version? # if X3 == 0: # GEMM_smem = GEMM_rmem # GEMM_rmem = 0 # if X2 == 0: # GEMM_l2mem = GEMM_smem # GEMM_smem = 0 # if X1 == 0: # GEMM_gmem = GEMM_l2mem # GEMM_l2mem = 0 # try: # GEMM_l2mem = GEMM_smem # GEMM_smem = 0 # if X1 == 0: # GEMM_gmem = GEMM_l2mem # GEMM_l2mem = 0 return GEMM_flop, num_accesses #Column-Row MM def getCf_kp1(self): #Multiply assert(self.kp_hidden_type == 1) assert(self.kp_hidden_dim1 > 1) assert(self.kp_hidden_dim1 % 4 == 0 or self.kp_hidden_dim1 == 2) #4 bc it is LSTM cell assert((2 * self.D) % self.kp_hidden_dim1 == 0) GEMM_time, reduction_time = self.getDistGEMM_f_kp1(self.miniB, 2 * self.D, self.G * self.D, self.kp_hidden_dim1, "Cf_kp1") #Pointwise ops: all the linear/non-linear ops after MM point_flop = self.miniB * (self.G * self.D / self.kp_hidden_dim1) * 5 #4 refers to the number of pointwise ops (mul + add +tanh + mul + tanh) on #the critical path point_mem = (self.precision * self.miniB * (self.G * self.D / self.kp_hidden_dim1) * (3 * 3 + 2 * 2 )) # 3(3 memory access per operation with two input and one output) # 3(mul + add + mul) on critical path # 2(2 memory access per operation with one input and one output) # 1(tanh) on critical path data_size = 4 * self.miniB * (self.G * self.D / self.kp_hidden_dim1) * self.precision # 4 refers to the number of pointwise ops (mul + add + mul + tanh) on the # critical path whose inputs are located across different GPUs #NOTE:Assuming all communications can happpen in parallel mem_transfer = self.roofline(0, 2 * data_size, name="Cf_kp1: memory accesses before and after data transfer over network") #2: one read from the source and one write to the destination memory data_transfer = data_size / self.IBK1 point_comm = mem_transfer + data_transfer point_time = self.roofline(point_flop, point_mem, name='pointwise_cf_kp1') + 5 * self.O + point_comm return GEMM_time + reduction_time + point_time def getCb_kp1(self): #TODO:Add local accumulation of weights at every time step #Pointwise point_flop = ((self.miniB) * (self.G * self.D / self.kp_hidden_dim1) * 5 + (2 * self.D * self.G * self.D / self.kp_hidden_dim1)) # local accumulation of wts #4 refers to the number of pointwise ops (mul + add +tanh + mul) on #the critical path point_mem = (self.precision * self.miniB * (self.G * self.D / self.kp_hidden_dim1) * (3 * 3 + 2 * 2) + (2 * self.precision * self.D * self.G * self.D / self.kp_hidden_dim1) * 3) # local accumulation of wts # 3(3 memory access per operation with two input and one output) # 3(mul + add + mul) on critical path return GEMM_time + reduction_time + point_time def getCb_kp1(self): #TODO:Add local accumulation of weights at every time step #Pointwise point_flop = ((self.miniB) * (self.G * self.D / self.kp_hidden_dim1) * 5 + (2 * self.D * self.G * self.D / self.kp_hidden_dim1)) # local accumulation of wts #4 refers to the number of pointwise ops (mul + add +tanh + mul) on #the critical path point_mem = (self.precision * self.miniB * (self.G * self.D / self.kp_hidden_dim1) * (3 * 3 + 2 * 2) + (2 * self.precision * self.D * self.G * self.D / self.kp_hidden_dim1) * 3) # local accumulation of wts # 3(3 memory access per operation with two input and one output) # 3(mul + add + mul) on critical path # 2(2 memory access per operation with one input and one output) # 1(tanh) on critical path data_size = 4 * self.miniB * (self.G * self.D / self.kp_hidden_dim1) * self.precision mem_transfer = self.roofline(0, 2 * data_size, name='Cb_kp1: memory accesses before and after data transfer over network') data_transfer = data_size / self.IBK1 point_comm = mem_transfer + data_transfer #3 refers to the number of pointwise ops (mul + tanh + mul) on # critical path whose inputs are located across different GPUs #NOTE:Assuming all communications can happpen in parallel point_time = self.roofline(point_flop, point_mem, name='pointwise_Cb_kp1') + 5 * self.O + point_comm #GEMM_wrt_act and wt is calculated under getDistGEMM_b_kp1 GEMM_time, reduction_time = self.getDistGEMM_b_kp1(self.miniB, 2 * self.D, self.G * self.D, self.kp_hidden_dim1, "Cb_kp1") if self.debug: print("(gr) Hidden point_flop: {:,}, point_mem: {:,}\n".format(int(point_flop/1e9), int(point_mem/1e9))) return GEMM_time + reduction_time + point_time #Row-Column MM def getCf_kp2(self): #Multiply assert(self.kp_hidden_type == 2) assert(self.kp_hidden_dim1 > 1 or self.kp_hidden_dim2 > 1) assert(self.kp_hidden_dim2 % self.G == 0 or self.kp_hidden_dim2 == 2 or self.kp_hidden_dim2 == 1) assert(self.miniB % self.kp_hidden_dim1 == 0) assert(self.G * self.D % self.kp_hidden_dim2 == 0) GEMM_time, reduction_time = self.getDistGEMM_f_kp2(self.miniB, 2 * self.D, self.G * self.D, self.kp_hidden_dim1,self.kp_hidden_dim2, "Cf_kp2") #Pointwise ops point_flop = (self.miniB/self.kp_hidden_dim1) * (self.G * self.D / self.kp_hidden_dim2) * 5 #4 refers to the number of pointwise ops (mul + add +tanh + mul) on #the critical path point_mem = int(self.precision * (self.miniB / self.kp_hidden_dim1) * (self.G * self.D / self.kp_hidden_dim2) * (3 * 3 + 2 * 2 )) # 3(3 memory access per operation with two input and one output) # 3(mul + add + mul) on critical path # 2(2 memory access per operation with one input and one output) # 1(tanh) on critical path data_size = ((self.miniB / self.kp_hidden_dim1) * (self.G * self.D / self.kp_hidden_dim2) * 4 * self.precision) #4 refers to the number of pointwise ops (mul + add + tanh + mul) whose inputs #across different GPU point_comm = 0 if (self.kp_softmax_dim2 > 1): mem_transfer = self.roofline(0, 2 * data_size, name='Cf_kp2: memory accesses before and after data transfer over network') data_transfer = data_size / self.IBK2 point_comm = mem_transfer + data_transfer point_time = self.roofline(point_flop, point_mem, name='pointwise_Cf_kp2') + 5 * self.O + point_comm return GEMM_time + reduction_time + point_time def getCb_kp2(self): #Pointwise ops point_flop = ((self.miniB / self.kp_hidden_dim1) * (self.G * self.D / self.kp_hidden_dim2) * 5 + (2 * self.D * self.G * self.D / self.kp_hidden_dim2)) # local accumulation of wts #4 refers to the number of pointwise ops (mul + add +tanh + mul) on #the critical path # kp_hidden_dim2 is for the reduction sum operation after doing outer product # for (B,4D)x(4D,2D).This is outerproduct due to the data distribution. point_mem = int((self.precision * (self.miniB / self.kp_hidden_dim1) * (self.G * self.D / self.kp_hidden_dim2) * (3 * 3 + 2 * 2)) + (2 * self.precision * self.D * self.G * self.D / self.kp_hidden_dim2) * 3) # local accumulation of wts # 3(3 memory access per operation with two input and one output) # 3(mul + add + mul) on critical path # 2(2 memory access per operation with one input and one output) # 1(tanh) on critical path data_size = int(self.miniB * (self.G * self.D / self.kp_hidden_dim2) * 4 * self.precision) #3 refers to the number of pointwise ops (mul + add +tanh + mul) on #3 refers to the number of hops to gather i,f, o and c in each GPU #in order to perform (B,4D)x(4D,2D) point_comm = 0 if (self.kp_softmax_dim2 > 1): mem_transfer = self.roofline(0, 2 * data_size, name='Cb_kp2:memory accesses before and after data transfer over network') data_transfer = data_size / self.IBK2 point_comm = mem_transfer + data_transfer point_time = self.roofline(point_flop, point_mem, name='pointwise_Cb_kp2') + 5 * self.O + point_comm GEMM_time, reduction_time = self.getDistGEMM_b_kp2(self.miniB, 2 * self.D, self.G * self.D, self.kp_hidden_dim1,self.kp_hidden_dim2, "Cb_kp2") if self.debug: print("(gr) Hidden point_flop: {:,}, point_mem: {:,}\n".format(int(point_flop/1e9), int(point_mem/1e9))) return GEMM_time + reduction_time + point_time def getCf(self, m, n, k): #Add Biad adds """Get LSTM Cell Time on Forward Path""" GEMM_time = self.getGEMMTime(m, k, n, "Cf") point_flop = m * n * 5 #1: add bias #5: add nonlinearities, there is one more than the number of gates (self.G) #1: pointwise muliply and add point_mem = (self.precision * m * n * (3 * 3 + 2 * 2 )) #3: 3 memory accesses for operands with two inputs and one output #2: 1 for bias add + 1 for pointwise mul #2: 2 memory accesses for operands with one input and one output #1: 5/4 non-linearities per gate point_time = self.roofline(point_flop, point_mem, name='pointwise_Cf') + 5 * self.O if self.debug: gigaByte = 1024 * 1024 * 1024 print("Hidden point_flop: {:,}, point_mem: {:,}\n".format(int(point_flop/1e9), int(point_mem/gigaByte))) print("Hidden point_time: {:,}\n".format(point_time)) if self.validating_GEMM: return GEMM_time else: return GEMM_time[0] + point_time def getCb(self): """Get LSTM Cell Time on Backward Path""" grad_act_time,_,_ = self.getGEMMTime(self. miniB, self.G * self.D, 2 * self.D, "Cb_act") grad_wt_time,_,_ = self.getGEMMTime(2 * self.D, self.miniB, self.G * self.D, "Cb_wt") GEMM_time = grad_act_time + grad_wt_time point_flop = ((self.miniB * self.D * 5) + (2 * self.D * self.G * self.D)) # local accumulation of wts point_mem = ((self.precision * self.miniB * self.D * (3 * 3 + 2 * 2)) + (2 * self.precision * self.D * self.G * self.D) * 3) #local accumulation of wts point_time = self.roofline(point_flop, point_mem, name='pointwise_Cb') + 5 * self.O if self.debug: print("(gr) Hidden/ point_flop: {:,}, point_mem: {:,} ".format(int(point_flop/1e9), int(point_mem/1e9))) print("Hidden point_time: {:,}\n".format(point_time)) return GEMM_time + point_time #Reduction and all-gather time estimation def getR(self, Dim0 = None, Dim1 = None, p = None, ib = None, ll = None, partial = None, allReduce = None, name = None): """Get partail or full reduction or allGather latency""" """Partial reduction means each gpu is only collecting a shard of reduced data""" """allReduce= False measures allGather latency otherwise allReduce""" """Partial: True, All-reduce:True, half All-reduce""" """Partial: True, All-reduce:False, All-gather""" """Partial: False, All-reduce:True, All-reduce""" """Partial: False, All-reduce:False, All-gather""" if (Dim0 == None): #for data parallel reduction, Dim0 = (2 * self.D // self.kp_hidden_dim) if (self.kp_hidden_type == 1) else (2 * self.D // self.kp_hidden_dim2 if (self.kp_hidden_type == 2) else (2 * self.D)) if (Dim1 == None): Dim1 = self.G * self.D if (p == None): p = self.dp if (ib == None): ib = self.IBD if (ll == None): ll = self.LLD if (partial == None): partial = False if (allReduce == None): allReduce = True if (p == 1): return 0 #If small data transfers, just broadcast #NOTE: Keep threshold zero to avoid if loop threshold = 0 data_tranfer = 0 data_prep = 0 #FIXME: Here I assumed point-2-point links exist across all nodes #Implement brodcast timing under ring topology if (self.precision * Dim0 * Dim1 < threshold): factor = (1/p if partial else 1) data_transfer = (((self.precision * Dim0 * Dim1)/ib + ll) * factor if p > 1 else 0) data_prep_comp = Dim0 * Dim1 * (p-1) * factor data_prep_mem = int((3 * self.precision * Dim0 * Dim1) * (p - 1) * factor) data_prep = self.roofline(data_prep_comp, data_prep_mem, name='R-prepTime') else: #Assuming point-2-point link between consecutive data partitions #In other words, the network topology assumed is Ring, #therefore all (dp-1) transfers can happen in parallel, #To assume different toplogy data_transfer formulation should change #e.g. assuming bus, data_transfer formulation would change as follows: #data_transfer = ((self.precision * self.D * self.D) * (self.dp /self.dp)) * # (self.G * 2) * (2 * (self.dp - 1))) / self.IBD factor = (1 if partial or not allReduce else 2) mem_access = self.roofline(0, int(2 * self.precision * Dim0 * Dim1 / p), name='Reduction: memory accesses before and after data transfer over network') data_transfer = float("inf") if (ib == 0) else ((((self.precision * Dim0 * Dim1) / p) / ib) + mem_access + ll) * factor * (p - 1) #dt = ((self.precision * Dim0 * Dim1) / p) * factor * (p - 1) #First round accumlates the updates as going around the ring data_prep_comp = (Dim0 * Dim1) / p data_prep_mem = int(3 * self.precision * Dim0 * Dim1 / p) data_prep = ((self.roofline(data_prep_comp, data_prep_mem, name='R-prepTime') + self.O) * (p - 1)) #all-gather-concat data_concat_mem = 3 * Dim0 * Dim1 * self.precision concat_time = ((self.roofline(0, data_concat_mem, name='all-gather-concat') + self.O)) #print("R1: {}, factor: {}\n".format(dt,factor)) if self.debug: print("Bandwidth: {:,} GB/s".format(ib/(1024*1024*1024))) print("data_transfer_time: {:,}, data_prep_time: {:,}, concat_time: {:,}".format(data_transfer, (data_prep if allReduce else 0), (concat_time if not allReduce else 0))) print("(data_prep) allReduce_flop: {:,}, allReduce_mem: {:,}".format(int(data_prep_comp), int(data_prep_mem))) print("(data_transfer) {:,}".format(int(self.precision * Dim0 * Dim1 / (p)))) return data_transfer + (data_prep if allReduce else 0) + (concat_time if not allReduce else 0) def gradClipping(self, Dim0 = None, Dim1 = None, name = None): if (Dim0 == None): Dim0 = 2 * self.D if (Dim1 == None): Dim1 = self.G * self.D if (name == None): name = "Hidden" #t_list[i] * clip_norm / max(global_norm, clip_norm) #where: #global_norm = sqrt(sum([l2norm(t)**2 for t in t_list])) norm_comp = Dim0 * Dim1 * 2 #1: power 2 #1: summ norm_mem = (Dim0 * Dim1 * 1) * self.precision #1: one read per element and power it by 2 in local registers anfd #summing to local acc clip_comp = Dim0 * Dim1 * 2 #1: pointwise mul #1: pointwise div clip_mem = (Dim0 * Dim1 * 2) * self.precision #1: one read for pointwise mul #1: one write for pointwise div gradclip_mem = norm_mem + clip_mem gradclip_comp = norm_comp + clip_comp gradclip_time = self.roofline(gradclip_comp, gradclip_mem, name='pointwise-grad-clipping') if self.debug: print("({}) gradclip_flop: {:,}, gradclip_mem: {:,}".format(name, gradclip_comp, gradclip_mem)) print("({}) gradclip_time: {:,}\n".format(name, gradclip_time)) return gradclip_time def applyGrad(self, Dim0 = None, Dim1 = None, name = None): if (Dim0 == None): Dim0 = 2 * self.D if (Dim1 == None): Dim1 = self.G * self.D if (name == None): name = "Hidden" applyGrad_comp = Dim0 * Dim1 * 3 #3: one pointwise division by scalar after reducing all the gradients, # one final addition of gradients to the weights # one multiply by learning rate applyGrad_mem = ((1 * Dim0 * Dim1 * self.precision) + (2 * Dim0 * Dim1 * self.precision) + (1 * Dim0 * Dim1 * self.precision)) #1: read for pointiwse div #2: 1 reads and one write for pointwise add #1: one write for multiplication by lr applyGrad_time = self.roofline(applyGrad_comp, applyGrad_mem, name='pointwise-applyGrad') clip_time = self.gradClipping(Dim0, Dim1, name) grad_time = applyGrad_time + clip_time if self.debug: print("({}) applyGrad_flop: {:,}, applyGrad_mem: {:,}".format(name, applyGrad_comp, applyGrad_mem)) print("({}) applyGrad_time: {:,}\n".format(name, applyGrad_time)) return grad_time def getDistGEMM_f_kp1(self, m, k, n, dim1, name): GEMM_time = self.getGEMMTime(m , k//dim1, n , name) #Sum-Reduce within each row for use in the next time step reduction_time = self.getR(Dim0 = m, Dim1 = n, p = dim1, ib = self.IBK1, ll = self.LLK1, partial = True, allReduce = True, name = name) if self.validating_GEMM: print("GEMM_time: {}, Reduction_time:{}".format(GEMM_time[0], reduction_time)) return GEMM_time[0] + reduction_time, GEMM_time[1], GEMM_time[2] else: return GEMM_time[0], reduction_time def getDistGEMM_b_kp1(self, m, k, n, dim1, name): #calculate grad wrt. act (A'. W^T) #gather whole(A') before MM #A' is distibuted as columns across different nodes reduction_time = self.getR(Dim0 = m, Dim1 = n, p = dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name = name) #Multiply full grad_activation with shards of weights grad_wt_time,_,_ = self.getGEMMTime(k, (m // dim1), n, name + "wt") #Multiply full grad-activation with shards of activations grad_act_time,_,_ = self.getGEMMTime(m, (n // dim1), k, name + "act") GEMM_time = grad_wt_time + grad_act_time return GEMM_time, reduction_time def getDistGEMM_f_kp2(self, m, k, n, dim1, dim2, name): GEMM_time = self.getGEMMTime(m // dim1, k, n // dim2, name) reduction_time = self.getR(Dim0 = m // dim1, Dim1 = n, p = dim2, ib = self.IBK2, ll = self.LLK2, partial = False, allReduce = False, name = name) if self.validating_GEMM: print("GEMM_time: {}, Reduction_time:{}".format(GEMM_time[0], reduction_time)) return GEMM_time[0] + reduction_time, GEMM_time[1], GEMM_time[2] else: return GEMM_time[0], reduction_time def getDistGEMM_b_kp2(self, m, k, n, dim1, dim2, name): ###################################################################################### #calculate grad wrt. weights (A^T. grad(A')) #gather row(A^T) reduction_time_wt1 = self.getR(Dim0 = k, Dim1 = m, p = dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name = name)/2 #To calculate grad wrt weights (A^T, grad(A')), #gather column grad(A') reduction_time_wt2 = self.getR(Dim0 = m, Dim1 = n / dim2, p = dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name = name) ######################################################################################## #calculate grad wrt. act (grad(A'). w^T) #gather row grad(A') reduction_time_act1 = self.getR(Dim0 = m / dim1, Dim1 = n, p = dim2, ib = self.IBK2, ll = self.LLK2, partial = False, allReduce = False, name = name) #calculate grad wrt. act (grad(A'). w^T) #gather col(w^T) reduction_time_act2 = self.getR(Dim0 = k, Dim1 = n, p = dim2, ib = self.IBK2, ll = self.LLK2, partial = False, allReduce = False, name = name)/2 reduction_time = reduction_time_wt1 + reduction_time_wt2 + reduction_time_act1 +reduction_time_act2 #Multiply full grad_activation with shards of weights grad_wt_time,_,_ = self.getGEMMTime(k / dim1, m, n / dim2, name + "wt") #Multiply full grad-activation with shards of activations grad_act_time,_,_ = self.getGEMMTime(m / dim1, n, k / dim2, name + "act") GEMM_time = grad_wt_time + grad_act_time return GEMM_time, reduction_time def getDataParallelReduction(self, k, n, dim1, dim2, name): #k = 2 * self.D #n = 4 * self.D #dim1 = self.kp_hidden_dim1 #dim2 = self.kp_hidden_dim2 reduction_time_wt_kp = 0 reduction_time_wt_dp = 0 apply_grad_time = 0 if self.kp_hidden_type == 1: #CR reduction_time_wt_kp = 0 reduction_time_wt_dp = self.getR(Dim0 = k/dim1, Dim1 = n, p = self.dp, ib = self.IBD, ll = self.LLD, partial = False, allReduce = True, name = name) apply_grad_time = self.applyGrad(Dim0 = k/dim1, Dim1 = n, name = name) elif self.kp_hidden_type == 2: #RC reduction_time_wt_dp = self.getR(Dim0 = k/dim1, Dim1 = n/dim2, p = self.dp, ib = self.IBD, ll = self.LLD, partial = False, allReduce = True, name = name) #gather col(w) reduction_time_wt_kp = self.getR(Dim0 = k, Dim1 = n/dim2, p = dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name = name) apply_grad_time = self.applyGrad(Dim0 = k, Dim1 = n/dim2, name = name) else: reduction_time_wt_kp = 0 reduction_time_wt_dp = self.getR(Dim0 = k, Dim1 = n, p = self.dp, ib = self.IBD, ll = self.LLD, partial = False, allReduce = True, name = name) apply_grad_time = self.applyGrad(Dim0 = k, Dim1 = n, name = name) reduction_time = reduction_time_wt_kp + reduction_time_wt_dp + apply_grad_time return reduction_time def getProjection_f(self): GEMM_time,_,_ = self.getGEMMTime(self.miniB, self.D, self.projection, "projection") return GEMM_time def getProjection_b(self): grad_wt_time,_,_ = self.getGEMMTime(self.projection, self.miniB, self.D, "projection_b_wt") grad_act_time,_,_ = self.getGEMMTime(self.miniB, self.projection, self.D, "projection_b_act") GEMM_time = grad_wt_time + grad_act_time return GEMM_time def getProjection_f_kp1(self): assert(self.kp_projection_type == 1) assert(self.kp_projection_dim1 > 1) assert(self.D % self.kp_projection_dim1 == 0) GEMM_time, reduction_time = self.getDistGEMM_f_kp1(self.miniB, self.D, self.projection, self.kp_projection_dim1, "projection_f") return GEMM_time + reduction_time def getProjection_b_kp1(self): assert(self.kp_projection_type == 1) assert(self.kp_projection_dim1 > 1) assert(self.D % self.kp_projection_dim1 == 0) GEMM_time, reduction_time = self.getDistGEMM_b_kp1(self.miniB, self.D, self.projection, self.kp_projection_dim1, "projection_b") return GEMM_time + reduction_time def getProjection_f_kp2(self): assert(self.kp_projection_type == 2) assert(self.kp_projection_dim1 > 1 or self.kp_projection_dim2 > 1) assert((self.miniB) % self.kp_projection_dim1 == 0) assert(self.projection % self.kp_projection_dim2 == 0) GEMM_time, reduction_time = self.getDistGEMM_f_kp2(self.miniB, self.D, self.projection, self.kp_projection_dim1, self.kp_projection_dim2, "projection_f") return GEMM_time + reduction_time def getProjection_b_kp2(self): assert(self.kp_projection_type == 2) assert(self.kp_projection_dim1 > 1 or self.kp_projection_dim2 > 1) assert((self.miniB) % self.kp_projection_dim1 == 0) assert(self.projection % self.kp_projection_dim2 == 0) GEMM_time, reduction_time = self.getDistGEMM_f_kp2(self.miniB, self.D, self.projection, self.kp_projection_dim1, self.kp_projection_dim2, "projection_b") return GEMM_time + reduction_time def getSoftmax_f(self): GEMM_time,_,_ = self.getGEMMTime(self.miniB, (self.projection if proj else self.D), self.V, "softmax_f") #Final matrix after GEMM has (B, V) dimensionality #We get exponential on each of the elements in a row #and then normalize them across the row #Therefore for each row we do V sigmoids and V-1 additions and V divisions #For each row, we read V sigmoids and write one #Up to here is 3 operations point_flop = (self.miniB * (3 * self.V - 1)) point_mem = self.precision * self.miniB * (7 * self.V) #2: one read and one write for sigmoid #1: one read for reduction #1: one write for extension #2: for pointwise division point_time = self.roofline(point_flop, point_mem, name='pointwise-softmax-f') + 4 * self.O if self.debug: print("Softmax point_flop: {:,}, point_mem: {:,}".format(int(point_flop/1e9), int(point_mem/1e9))) print("point_time: {:,}\n".format( point_time)) return GEMM_time + point_time #FIXME: where is the reduction time? def getSoftmax_b(self): grad_wt_time,_,_ = self.getGEMMTime((self.projection if proj else self.D), self.miniB, self.V, "softmax_b_wt") grad_act_time,_,_ = self.getGEMMTime(self.miniB, self.V, (self.projection if proj else self.D), "softmax_b_act") GEMM_time = grad_wt_time + grad_act_time point_flop = self.miniB * self.V * 5 #1: one for one of the divisions, grad(A) (y=A/B) #2: one for division and multiplication, grad(B) #1: one for addition, copies turn into add #1: one for sigmoid point_mem = self.precision * self.miniB * self.V * 11 #3: grad(A) in pointwise division #3: grad(B) in pointwise division #3: addition in copy backprop #2: sigmoid point_time = self.roofline(point_flop, point_mem, name='pointwise-softmax-b') + 4 * self.O if self.debug: print("(gr) Softmax point_flop: {:,}, point_mem: {:,}".format(int(point_flop/1e9), int(point_mem/1e9))) print("(gr) Softmax point_time: {:,}\n".format(point_time)) return GEMM_time + point_time #Column-Row MM def getSoftmax_f_kp1(self): #Multiply assert(self.kp_softmax_type == 1) assert(self.kp_softmax_dim1 > 1) assert((self.projection if proj else self.D) % self.kp_softmax_dim1 == 0) GEMM_time, reduction_time = self.getDistGEMM_f_kp1(self.miniB, self.projection if proj else self.D, self.V, self.kp_softmax_dim1, "softmax_f_kp1") #Final matrix after GEMM has (B, V) dimensionality #We get exponential on each of the elements in a row #and then reduce all elements in the row to one #Therefore for each row we do V sigmoids and V-1 additions and V divisions #For each row, we read V sigmoids and write one #After GEMM reduction, each matrix has the full (B,V) #but each needs to only operate on 1/dim1 rows to get the reduction point_flop = (self.miniB / self.kp_softmax_dim1) * self.V * 3 point_mem = self.precision * (self.miniB / self.kp_softmax_dim1) * self.V * 7 #2: sigmoid #1: one read for reduction, the accumulate is a register #1: one for write/extend the reduction result into all cells #3: division needs one for read and one for write. point_comm = self.getR(Dim0 = self.miniB, Dim1 = 1, p = self.kp_softmax_dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name="getSoftmax_f_kp1") #communicating partail sum per row from one GPU to all others to perform sum reduce point_time = self.roofline(point_flop, point_mem, name='pointwise-softmax-f-kp1') + self.O + point_comm if self.debug: print("Softmax point_flop: {:,}, point_mem: {:,}".format(int(point_flop/1e9), int(point_mem/1e9))) print("Softmax GEMM_time: {:,}, point_time: {:,}\n".format(GEMM_time, point_time)) return GEMM_time + reduction_time + point_time def getSoftmax_b_kp1(self): point_flop = (self.miniB) * (self.V / self.kp_softmax_dim1) * 5 #1: one for one of the divisions, grad(A) (y=A/B) #2: one for division and multiplication, grad(B) #1: one for addition, copies turn into add #1: one for sigmoid point_mem = self.precision * (self.miniB) * ((11 * self.V )/ self.kp_softmax_dim1) #3: grad(A) in pointwise division #3: grad(B) in pointwise division #3: addition in copy backprop #2: sigmoid point_comm = 0 point_time = self.roofline(point_flop, point_mem, name='pointwise-softmax-b-kp1') + self.O + point_comm GEMM_time, reduction_time = self.getDistGEMM_b_kp1(self.miniB, self.projection if proj else self.D, self.V, self.kp_softmax_dim1, "softmax_b_kp1") if self.debug: print("(gr) Softmax point_flop: {:,}, point_mem: {:,}\n".format(int(point_flop/1e9), int(point_mem/1e9))) return reduction_time + GEMM_time + point_time #Row-Column MM def getSoftmax_f_kp2(self): #Multiply assert(self.kp_softmax_type == 2) assert(self.kp_softmax_dim1 > 1 or self.kp_softmax_dim2 > 1) assert((self.miniB) % self.kp_softmax_dim1 == 0) assert((self.projection if proj else self.D) % self.kp_softmax_dim2 == 0) GEMM_time, reduction_time = self.getDistGEMM_f_kp2(self.miniB, self.projection if proj else self.D, self.V, self.kp_softmax_dim1, self.kp_softmax_dim2, "softmax_f_kp2") #Final matrix after GEMM has (B X S, V) dimensionality #We get exponential on each of the elements in a row #and then reduce all elements in the row to one #Therefore for each row we do V sigmoids and V-1 additions and V divisions #For each row, we read V sigmoids and write one point_flop = (self.miniB / self.kp_softmax_dim1) * (self.V / self.kp_softmax_dim2) * 3 point_mem = self.precision * (self.miniB / self.kp_softmax_dim1) * (self.V / self.kp_softmax_dim2) * 7 #2: sigmoid #1: one read for reduction, the accumulate is a register #1: one for write/broadcast the reduction result into all cells #3: division needs one for read and one for write. data_size = self.precision * (self.miniB / self.kp_softmax_dim1) * (self.kp_softmax_dim2) point_comm = 0 if (self.kp_softmax_dim2 > 1): mem_transfer = self.roofline(0, 2 * data_size, name='memory accesses before and after data transfer over network') data_transfer = data_size / self.IBK2 point_comm = mem_transfer + data_transfer point_time = self.roofline(point_flop, point_mem, name='pointwise-Softmax_f_kp2') + self.O + point_comm if self.debug: print("Softmax point_flop: {:,}, point_mem: {:,}".format(int(point_flop/1e9), int(point_mem/1e9))) print("Softmax GEMM_time: {:,}, point_time: {:,}\n".format(GEMM_time, point_time)) return GEMM_time + point_time + reduction_time def getSoftmax_b_kp2(self): point_flop = (self.miniB / self.kp_softmax_dim1) * (self.V / self.kp_softmax_dim2) * 5 #1: one for one of the divisions, grad(A) (y=A/B) #2: one for division and multiplication, grad(B) #1: one for addition, copies turn into add #1: one for sigmoid point_mem = self.precision * (self.miniB / self.kp_softmax_dim1) * ((11 * self.V )/ self.kp_softmax_dim2) #3: grad(A) in pointwise division #3: grad(B) in pointwise division #3: addition in copy backprop #2: sigmoid point_comm = 0 point_time = self.roofline(point_flop, point_mem, name='pointwise-Softmax_b_kp2') + self.O + point_comm GEMM_time, reduction_time = self.getDistGEMM_b_kp2(self.miniB, self.projection if proj else self.D, self.V, self.kp_softmax_dim1, self.kp_softmax_dim2, "softmax_b_kp2") if self.debug: print("(gr) Softmax point_flop: {:,}, point_mem: {:,}\n".format(int(point_flop/1e9), int(point_mem/1e9))) return reduction_time + GEMM_time + point_time def getEmbedding_f(self): embedding_mem = 2 * (self.miniB * self.D * self.precision) #embedding_time = (embedding_mem)/ (self.mem_bw) + self.mem_latency + self.O embedding_time = self.roofline(0, embedding_mem, name='embedding_f') + self.O embedding_transfer_time = 2 * self.miniB * self.D * self.precision / self.H2Dbw if self.debug: print("Embedding_mem: {:,}".format(int(embedding_mem/1e9))) return embedding_time + embedding_transfer_time def getEmbedding_b(self): #p2p_data_transfer = (self.precision * self.miniB * self.D) #data_transfer_time = 0 if (self.dp == 1) else (float("inf") if (self.IBD == 0) else (((p2p_data_transfer) / self.IBD + self.LLD) * 2 * (self.dp -1 ))) embedding_mem = 2 * self.miniB * self.D * self.precision #embedding_mem_time = (embedding_mem / self.mem_bw) + self.mem_latency + self.O embedding_mem_time = self.roofline(0, embedding_mem, name='embedding_b') + self.O if self.debug: print("(gr) Embedding_mem: {:,}".format(int(embedding_mem/1e9))) #return data_transfer_time + embedding_mem_time return embedding_mem_time def getEmbedding_f_kp1(self): #Each GPU has only a portion of the activations since each GPU had only a row of the weights reduction_time_act= self.getR(Dim0 = self.miniB, Dim1 = self.D, p = self.kp_embedding_dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name="getEmbedding_f_kp1") embedding_mem = 2 * (self.miniB * self.D * self.precision) #embedding_time = (embedding_mem)/ (self.mem_bw) + self.mem_latency + self.O embedding_time = self.roofline(0, embedding_mem, name='embedding_f') + self.O if self.debug: print("Embedding_mem: {:,}".format(int(embedding_mem/1e9))) return embedding_time + reduction_time_act def getEmbedding_b_kp1(self): #Activations from previous row arrive in column fasion, they need to be gathered #before applying them to the local portion of the embeddings reduction_time_act= self.getR(Dim0 = self.miniB, Dim1 = self.D, p = self.kp_embedding_dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name="getEmbedding_b_kp1") #Each GPU would read through the entire actication and write as many at most as many of B rows embedding_mem = 2 * self.miniB * self.D * self.precision embedding_mem_time = self.roofline(0, embedding_mem, name='embedding_b') + self.O if self.debug: print("(gr) Embedding_mem: {:,}".format(int(embedding_mem/1e9))) return embedding_mem_time + reduction_time_act def getEmbedding_f_kp2(self): embedding_mem = 2 * ((self.miniB / self.kp_embedding_dim1) * (self.D / self.kp_embedding_dim2) * self.precision) embedding_time = self.roofline(0, embedding_mem, name='embedding_f') + self.O if self.debug: print("Embedding_mem: {:,}".format(int(embedding_mem/1e9))) return embedding_time def getEmbedding_b_kp2(self): #Every GPU will update a little tile of the embedding #need to be gathered after the update across the rows of each column reduction_time_act= self.getR(Dim0 = self.miniB, Dim1 = self.D / self.kp_embedding_dim2, p = self.kp_embedding_dim1, ib = self.IBK1, ll = self.LLK1, partial = False, allReduce = False, name="getEmbedding_b_kp2") embedding_mem = 2 * (self.miniB / self.kp_embedding_dim1) * (self.D / self.kp_embedding_dim2) * self.precision embedding_mem_time = self.roofline(0, embedding_mem, name='embedding_b') + self.O if self.debug: print("(gr) Embedding_mem: {:,}".format(int(embedding_mem/1e9))) return embedding_mem_time + reduction_time_act def getInterLayerCommLatency(self, dim1, dim2): w = 0 if self.lp > 1: w_size = self.precision * dim1 * dim2 transfer_time = w_size / self.IBL + self.LLL mem_time = self.roofline(0, 2 * w_size, name='inter_layer') #2: read from memory of previous layer and write to the memory of the next layer w = mem_time + transfer_time return w def dprint(self, string): if self.debug: print(string) def readjust_type(self): if self.kp_hidden_dim1 == 1 and self.kp_hidden_dim2 == 1: self.kp_hidden_type = -1 if self.kp_softmax_dim1 == 1 and self.kp_softmax_dim2 == 1: self.kp_softmax_type = -1 if self.kp_embedding_dim1 == 1 and self.kp_embedding_dim2 == 1: self.kp_embedding_type = -1 def calcTime(self): B = self.miniB D = self.D G = self.G L = self.L S = self.S V = self.V lp = self.lp dp = self.dp self.readjust_type() if self.kp_hidden_type == -1: Cf = self.getCf(m = B, k = 2*D, n = G*D) Cb = self.getCb() Tf = self.getInterLayerCommLatency(B, D) elif self.kp_hidden_type == 1: #CR Cf = self.getCf_kp1() Cb = self.getCb_kp1() Tf = self.getInterLayerCommLatency(B, D / self.kp_hidden_dim1) elif self.kp_hidden_type == 2: #RC Cf = self.getCf_kp2() Cb = self.getCb_kp2() Tf = self.getInterLayerCommLatency(B / self.kp_hidden_dim1, D / self.kp_hidden_dim2) else: print("Incorrect distributed GEMM type, 1: Column-Row, 2: Row-Column") sys.exit() if self.lp == 1: Tf = 0 Tb = Tf if self.kp_softmax_type == -1: Sf = self.getSoftmax_f() Sb = self.getSoftmax_b() elif self.kp_softmax_type == 1: #CR Sf = self.getSoftmax_f_kp1() Sb = self.getSoftmax_b_kp1() elif self.kp_softmax_type== 2: #RC Sf = self.getSoftmax_f_kp2() Sb = self.getSoftmax_b_kp2() else: print("Incorrect distributed GEMM type, 1: Column-Row, 2: Row-Column") sys.exit() if self.kp_embedding_type == -1: Ef = self.getEmbedding_f() Eb = self.getEmbedding_b() elif self.kp_embedding_type == 1: #CR Ef = self.getEmbedding_f_kp1() Eb = self.getEmbedding_b_kp1() elif self.kp_embedding_type== 2: #RC Ef = self.getEmbedding_f_kp2() Eb = self.getEmbedding_b_kp2() else: print("Incorrect distributed GEMM type, 1: Column-Row, 2: Row-Column") sys.exit() Rc = self.getDataParallelReduction(k = 2 * D, n = G * D, dim1 = self.kp_hidden_dim1, dim2 = self.kp_hidden_dim2, name = "Hidden Reduction") Rs = self.getDataParallelReduction(k = D, n = V, dim1 = self.kp_softmax_dim1, dim2 = self.kp_softmax_dim2, name = "Softmax Reduction") Re = self.getDataParallelReduction(k = V, n = D, dim1 = self.kp_embedding_dim1, dim2 = self.kp_embedding_dim2, name = "Embedding Reduction") if self.debug: print("dp: {}, lp: {}, kp_hidden_dim1: {}, kp_hidden_dim2: {}, kp_softmax_dim1: {}, kp_softmax_dim2: {}, kp_embedding_dim1: {}, kp_embedding_dim2: {}, kp_hidden_type: {}, kp_softmax_type: {}, kp_embedding_type: {}\n". format(dp, lp, self.kp_hidden_dim1, self.kp_hidden_dim2, self.kp_softmax_dim1, self.kp_softmax_dim2, self.kp_embedding_dim1, self.kp_embedding_dim2, self.kp_hidden_type, self.kp_softmax_type, self.kp_embedding_type)) print("Cf: {} Cb: {} softmax_f: {} softmax_b: {} embedding_f: {} embedding_b: {} " "Rs: {} Rc: {} Re: {}\n".format(Cf, Cb, Sf, Sb, Ef, Eb, Rs, Rc, Re)) g = Graph(num_seq = S, num_layer = L, lp = lp, Ef = Ef, Cf = Cf, Sf = Sf, Tf = Tf, Eb = Eb, Cb = Cb, Sb = Sb, Tb = Tb, Re = Re, Rc = Rc, Rs = Rs) fw_roots = g.construct_fwd_graph() bw_roots = g.construct_bwd_graph() time_fw = g.simulate(fw_roots[0], 0) time_bw = g.simulate(bw_roots[g.num_seq - 1], g.lp - 1) self.tot_time = time_fw + time_bw tot_param = self.tot_param() return self.tot_time, tot_param def getTime(self): return self.tot_time def callPerf(exp_config, exp_dir, debug): exp_path = os.path.expandvars(os.path.expanduser(exp_config)) exp_config = config.parse_config(exp_path) #try: # #print("Removing directory:" + exp_dir) # shutil.rmtree(exp_dir) #except: # pass #os.makedirs(exp_dir) TC = TimeCalculation(exp_config) TC.debug = debug tot_time, tot_param = TC.calcTime() output_file = exp_dir + "/summary.txt" TC.printSysConfig(exp_config, output_file) with open(output_file, "a+") as f: f.write("Time: {0:.8f}\n".format(tot_time)) f.write("Params (Billion): {0:.8f}\n".format(tot_param/1e9)) @click.command("standalone") @click.option("--args_input", help="Shall it read the args from the input command (True) or from exp_config (False)", default=False, type=bool, required=False) @click.option("--exp_config", help="Path to experiment config", required=True) @click.option("--exp_dir", help="Checkpoint/log directory", required=True) @click.option("--debug", help="debug", default=False, type=bool) @click.option("--m", help="input dimension", default=32768, type=int, required=False) #only use for GEMM validation. This allows arbitrary choice of dimension. For LSTM, dimensions are fixed at m=mini_batch, k=2*D and n=4*D. @click.option("--n", help="output dimension", default=32768, type=int, required=False) #only use for GEMM validation @click.option("--k", help="input dimension", default=32768, type=int, required=False) #only use for GEMM validation @click.option("--t", help="parallelism strategy (RC or CR)", default='None', type=str, required=False) #only use for GEMM validation @click.option("--kp1", help="RC:parallelism along input dimension, CR: parallelism along inner dimension", default=None, type=int, required=False) #only use for GEMM validation @click.option("--kp2", help="RC:parallelism along output dimension", default=None, type=int, required=False) #only use for GEMM validation @click.option("--gemm", help="report ONLY GEMM time", default=False, type=bool, required=False) #only use for GEMM validation @click.option("--batch_size", help="Total Batch Size", default=2048, type=int, required=False) @click.option("--hidden_dim", help="Hidden Dimension per LSTM layer", default=19968, type=int, required=False) @click.option("--seq_len", help="Number of times to unroll LSTM", default=20, type=int, required=False) @click.option("--vocab_size", help="Vocabulary Size", default=800000, type=int, required=False) @click.option("--num_layer", help="number of lstm layers", default=2, type=int, required=False) @click.option("--dp", help="data parallelism", default=None, type=int, required=False) #only use for GEMM validation @click.option("--lp", help="layer parallelism", default=None, type=int, required=False) #only use for GEMM validation def main(exp_config, exp_dir, debug, m, n, k, t, kp1, kp2, gemm, batch_size, hidden_dim, seq_len, vocab_size, num_layer, dp, lp, args_input=False): exp_path = os.path.expandvars(os.path.expanduser(exp_config)) exp_config = config.parse_config(exp_path) output_file = exp_dir + "/summary.txt" ##Output dir should be created manually TC = TimeCalculation(exp_config) if args_input: TC.updateParams(debug, m, n, k, t, kp1, kp2, dp, lp, gemm, batch_size, hidden_dim, seq_len, vocab_size, num_layer) #Report GEMM time on fw path if TC.validating_GEMM: if kp1 == 1 and kp2 ==1: #no parallelism gemm_time = TC.getCf(m, k, n) elif t == 'CR': gemm_time = TC.getDistGEMM_f_kp1(m, k, n, kp1, "Cf_CR") elif t == 'RC': gemm_time = TC.getDistGEMM_f_kp2(m, k, n, kp1, kp2, "Cf_RC") else: print("Incorrect parallelism type, CR: Column-Row, RC: Row-Column") sys.exit() with open(output_file, "w") as f: f.write("Best Order: {}\n".format(gemm_time[1])) f.write("Best Tile: {}\n".format(gemm_time[2])) f.write("Time: {}\n".format(gemm_time[0])) return tot_time, tot_param = TC.calcTime() TC.printSysConfig(exp_config, output_file) with open(output_file, "a+") as f: f.write("\n\n==============================================\n") f.write("Performance Results\n") f.write("==============================================\n") f.write("Time: {0:.8f}\n".format(tot_time)) f.write("Params (Billion): {0:.8f}\n".format(tot_param/1e9)) if __name__ == "__main__": main()
StarcoderdataPython
1714150
<filename>make.py #!python # coding: utf-8 import os os.system('tools\\makeExeFile.bat')
StarcoderdataPython
3241743
from dataclasses import dataclass from dataclasses_json import dataclass_json @dataclass_json @dataclass class ReplyTTSBroadCastDTO: name: str voice_data: str pass
StarcoderdataPython
1609327
AFFINE = True # use affine transformation when using batch or layer normalization HIDDEN_LARGE = 1024 # nodes per hidden layer HIDDEN_SMALL = 512 # when using fewer features HIDDEN_TINY = 128 # for estimating values model LAYERS_FULL = 4 # number of layers in fully connected network LAYERS_EMBEDDING = 2 # number of layers in embedding network
StarcoderdataPython
23511
<reponame>ouyang-w-19/decogo # MINLP written by GAMS Convert at 04/21/18 13:55:18 # # Equation counts # Total E G L N X C B # 617 367 103 147 0 0 0 0 # # Variable counts # x b i s1s s2s sc si # Total cont binary integer sos1 sos2 scont sint # 499 472 27 0 0 0 0 0 # FX 6 6 0 0 0 0 0 0 # # Nonzero counts # Total const NL DLL # 1636 1333 303 0 # # Reformulation has removed 1 variable and 1 equation from pyomo.environ import * model = m = ConcreteModel() m.b2 = Var(within=Binary,bounds=(0,1),initialize=0) m.b3 = Var(within=Binary,bounds=(0,1),initialize=0) m.b4 = Var(within=Binary,bounds=(0,1),initialize=0) m.b5 = Var(within=Binary,bounds=(0,1),initialize=0) m.b6 = Var(within=Binary,bounds=(0,1),initialize=0) m.b7 = Var(within=Binary,bounds=(0,1),initialize=0) m.b8 = Var(within=Binary,bounds=(0,1),initialize=0) m.b9 = Var(within=Binary,bounds=(0,1),initialize=0) m.b10 = Var(within=Binary,bounds=(0,1),initialize=0) m.b11 = Var(within=Binary,bounds=(0,1),initialize=0) m.b12 = Var(within=Binary,bounds=(0,1),initialize=0) m.b13 = Var(within=Binary,bounds=(0,1),initialize=0) m.b14 = Var(within=Binary,bounds=(0,1),initialize=0) m.b15 = Var(within=Binary,bounds=(0,1),initialize=0) m.b16 = Var(within=Binary,bounds=(0,1),initialize=0) m.b17 = Var(within=Binary,bounds=(0,1),initialize=0) m.b18 = Var(within=Binary,bounds=(0,1),initialize=0) m.b19 = Var(within=Binary,bounds=(0,1),initialize=0) m.b20 = Var(within=Binary,bounds=(0,1),initialize=0) m.b21 = Var(within=Binary,bounds=(0,1),initialize=0) m.b22 = Var(within=Binary,bounds=(0,1),initialize=0) m.b23 = Var(within=Binary,bounds=(0,1),initialize=0) m.b24 = Var(within=Binary,bounds=(0,1),initialize=0) m.b25 = Var(within=Binary,bounds=(0,1),initialize=0) m.b26 = Var(within=Binary,bounds=(0,1),initialize=0) m.b27 = Var(within=Binary,bounds=(0,1),initialize=0) m.b28 = Var(within=Binary,bounds=(0,1),initialize=0) m.x29 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x30 = Var(within=Reals,bounds=(None,None),initialize=0) m.x31 = Var(within=Reals,bounds=(None,None),initialize=0) m.x32 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x33 = Var(within=Reals,bounds=(None,None),initialize=0) m.x34 = Var(within=Reals,bounds=(None,None),initialize=0) m.x35 = Var(within=Reals,bounds=(None,None),initialize=0) m.x36 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x37 = Var(within=Reals,bounds=(None,None),initialize=0) m.x38 = Var(within=Reals,bounds=(None,None),initialize=0) m.x39 = Var(within=Reals,bounds=(None,None),initialize=0) m.x40 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x41 = Var(within=Reals,bounds=(None,None),initialize=0) m.x42 = Var(within=Reals,bounds=(None,None),initialize=0) m.x43 = Var(within=Reals,bounds=(None,None),initialize=0) m.x44 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x45 = Var(within=Reals,bounds=(None,None),initialize=0) m.x46 = Var(within=Reals,bounds=(None,None),initialize=0) m.x47 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x48 = Var(within=Reals,bounds=(None,None),initialize=0) m.x49 = Var(within=Reals,bounds=(None,None),initialize=0) m.x50 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x51 = Var(within=Reals,bounds=(None,None),initialize=0) m.x52 = Var(within=Reals,bounds=(None,None),initialize=0) m.x53 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x54 = Var(within=Reals,bounds=(None,None),initialize=0) m.x55 = Var(within=Reals,bounds=(None,None),initialize=0) m.x56 = Var(within=Reals,bounds=(None,None),initialize=0) m.x57 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x58 = Var(within=Reals,bounds=(None,None),initialize=0) m.x59 = Var(within=Reals,bounds=(None,None),initialize=0) m.x60 = Var(within=Reals,bounds=(None,None),initialize=0) m.x61 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x62 = Var(within=Reals,bounds=(None,None),initialize=0) m.x63 = Var(within=Reals,bounds=(None,None),initialize=0) m.x64 = Var(within=Reals,bounds=(None,None),initialize=0) m.x65 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x66 = Var(within=Reals,bounds=(None,None),initialize=0) m.x67 = Var(within=Reals,bounds=(None,None),initialize=0) m.x68 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x69 = Var(within=Reals,bounds=(None,None),initialize=0) m.x70 = Var(within=Reals,bounds=(None,None),initialize=0) m.x71 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x72 = Var(within=Reals,bounds=(None,None),initialize=0) m.x73 = Var(within=Reals,bounds=(None,None),initialize=0) m.x74 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x75 = Var(within=Reals,bounds=(None,None),initialize=0) m.x76 = Var(within=Reals,bounds=(None,None),initialize=0) m.x77 = Var(within=Reals,bounds=(None,None),initialize=0) m.x78 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x79 = Var(within=Reals,bounds=(None,None),initialize=0) m.x80 = Var(within=Reals,bounds=(None,None),initialize=0) m.x81 = Var(within=Reals,bounds=(None,None),initialize=0) m.x82 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x83 = Var(within=Reals,bounds=(None,None),initialize=0) m.x84 = Var(within=Reals,bounds=(None,None),initialize=0) m.x85 = Var(within=Reals,bounds=(None,None),initialize=0) m.x86 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x87 = Var(within=Reals,bounds=(None,None),initialize=0) m.x88 = Var(within=Reals,bounds=(None,None),initialize=0) m.x89 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x90 = Var(within=Reals,bounds=(None,None),initialize=0) m.x91 = Var(within=Reals,bounds=(None,None),initialize=0) m.x92 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x93 = Var(within=Reals,bounds=(None,None),initialize=0) m.x94 = Var(within=Reals,bounds=(None,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,5),initialize=0) m.x96 = Var(within=Reals,bounds=(0,5),initialize=0) m.x97 = Var(within=Reals,bounds=(0,5),initialize=0) m.x98 = Var(within=Reals,bounds=(0,5),initialize=0) m.x99 = Var(within=Reals,bounds=(0,2.4),initialize=0) m.x100 = Var(within=Reals,bounds=(0,5),initialize=0) m.x101 = Var(within=Reals,bounds=(0,2.4),initialize=0) m.x102 = Var(within=Reals,bounds=(0,5),initialize=0) m.x103 = Var(within=Reals,bounds=(0,2.4),initialize=0) m.x104 = Var(within=Reals,bounds=(0,5),initialize=0) m.x105 = Var(within=Reals,bounds=(0,2.4),initialize=0) m.x106 = Var(within=Reals,bounds=(0,5),initialize=0) m.x107 = Var(within=Reals,bounds=(0,2.4),initialize=0) m.x108 = Var(within=Reals,bounds=(0,5),initialize=0) m.x109 = Var(within=Reals,bounds=(0,2.4),initialize=0) m.x110 = Var(within=Reals,bounds=(0,5),initialize=0) m.x111 = Var(within=Reals,bounds=(0,5),initialize=0) m.x112 = Var(within=Reals,bounds=(0,5),initialize=0) m.x113 = Var(within=Reals,bounds=(0,5),initialize=0) m.x114 = Var(within=Reals,bounds=(0,1.16),initialize=0) m.x115 = Var(within=Reals,bounds=(0,5),initialize=0) m.x116 = Var(within=Reals,bounds=(0,1.16),initialize=0) m.x117 = Var(within=Reals,bounds=(0,5),initialize=0) m.x118 = Var(within=Reals,bounds=(0,1.16),initialize=0) m.x119 = Var(within=Reals,bounds=(0,5),initialize=0) m.x120 = Var(within=Reals,bounds=(0,5),initialize=0) m.x121 = Var(within=Reals,bounds=(0,5),initialize=0) m.x122 = Var(within=Reals,bounds=(3.5,3.5),initialize=3.5) m.x123 = Var(within=Reals,bounds=(2,5),initialize=2) m.x124 = Var(within=Reals,bounds=(2,5),initialize=2) m.x125 = Var(within=Reals,bounds=(2,5),initialize=2) m.x126 = Var(within=Reals,bounds=(2,5),initialize=2) m.x127 = Var(within=Reals,bounds=(2,5),initialize=2) m.x128 = Var(within=Reals,bounds=(4.1,4.1),initialize=4.1) m.x129 = Var(within=Reals,bounds=(2.5,5),initialize=2.5) m.x130 = Var(within=Reals,bounds=(2.5,5),initialize=2.5) m.x131 = Var(within=Reals,bounds=(2.5,5),initialize=2.5) m.x132 = Var(within=Reals,bounds=(2.5,5),initialize=2.5) m.x133 = Var(within=Reals,bounds=(2.5,5),initialize=2.5) m.x134 = Var(within=Reals,bounds=(4,4),initialize=4) m.x135 = Var(within=Reals,bounds=(2,6),initialize=2) m.x136 = Var(within=Reals,bounds=(2,6),initialize=2) m.x137 = Var(within=Reals,bounds=(2,6),initialize=2) m.x138 = Var(within=Reals,bounds=(2,6),initialize=2) m.x139 = Var(within=Reals,bounds=(2,6),initialize=2) m.x140 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x141 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x142 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x143 = Var(within=Reals,bounds=(None,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x145 = Var(within=Reals,bounds=(None,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x147 = Var(within=Reals,bounds=(None,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x149 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x150 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x151 = Var(within=Reals,bounds=(None,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x153 = Var(within=Reals,bounds=(None,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x155 = Var(within=Reals,bounds=(None,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,0.8),initialize=0) m.x157 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x158 = Var(within=Reals,bounds=(0,0.5),initialize=0) m.x159 = Var(within=Reals,bounds=(None,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,0.5),initialize=0) m.x161 = Var(within=Reals,bounds=(None,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,0.5),initialize=0) m.x163 = Var(within=Reals,bounds=(None,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,0.5),initialize=0) m.x165 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x166 = Var(within=Reals,bounds=(0,0.5),initialize=0) m.x167 = Var(within=Reals,bounds=(None,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,0.5),initialize=0) m.x169 = Var(within=Reals,bounds=(None,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,0.7),initialize=0) m.x171 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x172 = Var(within=Reals,bounds=(0,0.7),initialize=0) m.x173 = Var(within=Reals,bounds=(None,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,0.7),initialize=0) m.x175 = Var(within=Reals,bounds=(None,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,0.7),initialize=0) m.x177 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x178 = Var(within=Reals,bounds=(0,0.7),initialize=0) m.x179 = Var(within=Reals,bounds=(None,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,0.7),initialize=0) m.x181 = Var(within=Reals,bounds=(None,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,0.58),initialize=0) m.x183 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x184 = Var(within=Reals,bounds=(0,0.58),initialize=0) m.x185 = Var(within=Reals,bounds=(None,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,0.58),initialize=0) m.x187 = Var(within=Reals,bounds=(None,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,0.58),initialize=0) m.x189 = Var(within=Reals,bounds=(-1000,1000),initialize=0) m.x190 = Var(within=Reals,bounds=(0,0.58),initialize=0) m.x191 = Var(within=Reals,bounds=(None,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,0.58),initialize=0) m.x193 = Var(within=Reals,bounds=(None,None),initialize=0) m.x194 = Var(within=Reals,bounds=(62,65),initialize=62) m.x195 = Var(within=Reals,bounds=(62,65),initialize=62) m.x196 = Var(within=Reals,bounds=(62,65),initialize=62) m.x197 = Var(within=Reals,bounds=(92.5,95),initialize=92.5) m.x198 = Var(within=Reals,bounds=(92.5,95),initialize=92.5) m.x199 = Var(within=Reals,bounds=(92.5,95),initialize=92.5) m.x200 = Var(within=Reals,bounds=(105,109),initialize=105) m.x201 = Var(within=Reals,bounds=(105,109),initialize=105) m.x202 = Var(within=Reals,bounds=(105,109),initialize=105) m.x203 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x204 = Var(within=Reals,bounds=(-125,125),initialize=0) m.x205 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x206 = Var(within=Reals,bounds=(-125,125),initialize=0) m.x207 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x208 = Var(within=Reals,bounds=(-125,125),initialize=0) m.x209 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x210 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x211 = Var(within=Reals,bounds=(-100,100),initialize=0) m.x212 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x213 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x214 = Var(within=Reals,bounds=(-100,100),initialize=0) m.x215 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x216 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x217 = Var(within=Reals,bounds=(-100,100),initialize=0) m.x218 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x219 = Var(within=Reals,bounds=(-125,125),initialize=0) m.x220 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x221 = Var(within=Reals,bounds=(-125,125),initialize=0) m.x222 = Var(within=Reals,bounds=(0,1000),initialize=0) m.x223 = Var(within=Reals,bounds=(-125,125),initialize=0) m.x224 = Var(within=Reals,bounds=(49,49),initialize=49) m.x225 = Var(within=Reals,bounds=(-49,1000),initialize=0) m.x226 = Var(within=Reals,bounds=(49,49),initialize=49) m.x227 = Var(within=Reals,bounds=(-49,1000),initialize=0) m.x228 = Var(within=Reals,bounds=(49,49),initialize=49) m.x229 = Var(within=Reals,bounds=(-49,1000),initialize=0) m.x230 = Var(within=Reals,bounds=(-65,1000),initialize=0) m.x231 = Var(within=Reals,bounds=(-65,1000),initialize=0) m.x232 = Var(within=Reals,bounds=(-65,1000),initialize=0) m.x233 = Var(within=Reals,bounds=(-95,1000),initialize=0) m.x234 = Var(within=Reals,bounds=(-95,1000),initialize=0) m.x235 = Var(within=Reals,bounds=(-95,1000),initialize=0) m.x236 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x237 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x238 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x239 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x240 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x241 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x242 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x243 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x244 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2) m.x245 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25) m.x246 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25) m.x247 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25) m.x248 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25) m.x249 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25) m.x250 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25) m.x251 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4) m.x252 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4) m.x253 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4) m.x254 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4) m.x255 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4) m.x256 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4) m.x257 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24) m.x258 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24) m.x259 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24) m.x260 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24) m.x261 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24) m.x262 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24) m.x263 = Var(within=Reals,bounds=(0.6,1),initialize=0.6) m.x264 = Var(within=Reals,bounds=(0.6,1),initialize=0.6) m.x265 = Var(within=Reals,bounds=(0.6,1),initialize=0.6) m.x266 = Var(within=Reals,bounds=(0.8,1),initialize=0.8) m.x267 = Var(within=Reals,bounds=(0.8,1),initialize=0.8) m.x268 = Var(within=Reals,bounds=(0.8,1),initialize=0.8) m.x269 = Var(within=Reals,bounds=(0.85,1),initialize=0.85) m.x270 = Var(within=Reals,bounds=(0.85,1),initialize=0.85) m.x271 = Var(within=Reals,bounds=(0.85,1),initialize=0.85) m.x272 = Var(within=Reals,bounds=(0.7,1),initialize=0.7) m.x273 = Var(within=Reals,bounds=(0.7,1),initialize=0.7) m.x274 = Var(within=Reals,bounds=(0.7,1),initialize=0.7) m.x275 = Var(within=Reals,bounds=(100,1000),initialize=100) m.x276 = Var(within=Reals,bounds=(100,1000),initialize=100) m.x277 = Var(within=Reals,bounds=(100,1000),initialize=100) m.x278 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x279 = Var(within=Reals,bounds=(None,None),initialize=0) m.x280 = Var(within=Reals,bounds=(None,None),initialize=0) m.x281 = Var(within=Reals,bounds=(None,None),initialize=0) m.x282 = Var(within=Reals,bounds=(None,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x284 = Var(within=Reals,bounds=(None,None),initialize=0) m.x285 = Var(within=Reals,bounds=(None,None),initialize=0) m.x286 = Var(within=Reals,bounds=(None,None),initialize=0) m.x287 = Var(within=Reals,bounds=(None,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x289 = Var(within=Reals,bounds=(None,None),initialize=0) m.x290 = Var(within=Reals,bounds=(None,None),initialize=0) m.x291 = Var(within=Reals,bounds=(None,None),initialize=0) m.x292 = Var(within=Reals,bounds=(None,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x294 = Var(within=Reals,bounds=(None,None),initialize=0) m.x295 = Var(within=Reals,bounds=(None,None),initialize=0) m.x296 = Var(within=Reals,bounds=(None,None),initialize=0) m.x297 = Var(within=Reals,bounds=(None,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x299 = Var(within=Reals,bounds=(None,None),initialize=0) m.x300 = Var(within=Reals,bounds=(None,None),initialize=0) m.x301 = Var(within=Reals,bounds=(None,None),initialize=0) m.x302 = Var(within=Reals,bounds=(None,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x304 = Var(within=Reals,bounds=(None,None),initialize=0) m.x305 = Var(within=Reals,bounds=(None,None),initialize=0) m.x306 = Var(within=Reals,bounds=(None,None),initialize=0) m.x307 = Var(within=Reals,bounds=(None,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x309 = Var(within=Reals,bounds=(None,None),initialize=0) m.x310 = Var(within=Reals,bounds=(None,None),initialize=0) m.x311 = Var(within=Reals,bounds=(None,None),initialize=0) m.x312 = Var(within=Reals,bounds=(None,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x314 = Var(within=Reals,bounds=(None,None),initialize=0) m.x315 = Var(within=Reals,bounds=(None,None),initialize=0) m.x316 = Var(within=Reals,bounds=(None,None),initialize=0) m.x317 = Var(within=Reals,bounds=(None,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0) m.x319 = Var(within=Reals,bounds=(None,None),initialize=0) m.x320 = Var(within=Reals,bounds=(None,None),initialize=0) m.x321 = Var(within=Reals,bounds=(None,None),initialize=0) m.x322 = Var(within=Reals,bounds=(None,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0) m.x324 = Var(within=Reals,bounds=(None,None),initialize=0) m.x325 = Var(within=Reals,bounds=(None,None),initialize=0) m.x326 = Var(within=Reals,bounds=(None,None),initialize=0) m.x327 = Var(within=Reals,bounds=(None,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0) m.x329 = Var(within=Reals,bounds=(None,None),initialize=0) m.x330 = Var(within=Reals,bounds=(None,None),initialize=0) m.x331 = Var(within=Reals,bounds=(None,None),initialize=0) m.x332 = Var(within=Reals,bounds=(None,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0) m.x334 = Var(within=Reals,bounds=(None,None),initialize=0) m.x335 = Var(within=Reals,bounds=(None,None),initialize=0) m.x336 = Var(within=Reals,bounds=(None,None),initialize=0) m.x337 = Var(within=Reals,bounds=(None,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0) m.x339 = Var(within=Reals,bounds=(None,None),initialize=0) m.x340 = Var(within=Reals,bounds=(None,None),initialize=0) m.x341 = Var(within=Reals,bounds=(None,None),initialize=0) m.x342 = Var(within=Reals,bounds=(None,None),initialize=0) m.x343 = Var(within=Reals,bounds=(None,None),initialize=0) m.x344 = Var(within=Reals,bounds=(None,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0) m.x346 = Var(within=Reals,bounds=(None,None),initialize=0) m.x347 = Var(within=Reals,bounds=(None,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0) m.x349 = Var(within=Reals,bounds=(None,None),initialize=0) m.x350 = Var(within=Reals,bounds=(None,None),initialize=0) m.x351 = Var(within=Reals,bounds=(None,None),initialize=0) m.x352 = Var(within=Reals,bounds=(None,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0) m.x354 = Var(within=Reals,bounds=(None,None),initialize=0) m.x355 = Var(within=Reals,bounds=(None,None),initialize=0) m.x356 = Var(within=Reals,bounds=(None,None),initialize=0) m.x357 = Var(within=Reals,bounds=(None,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0) m.x359 = Var(within=Reals,bounds=(None,None),initialize=0) m.x360 = Var(within=Reals,bounds=(None,None),initialize=0) m.x361 = Var(within=Reals,bounds=(None,None),initialize=0) m.x362 = Var(within=Reals,bounds=(None,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0) m.x364 = Var(within=Reals,bounds=(None,None),initialize=0) m.x365 = Var(within=Reals,bounds=(None,None),initialize=0) m.x366 = Var(within=Reals,bounds=(None,None),initialize=0) m.x367 = Var(within=Reals,bounds=(None,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0) m.x369 = Var(within=Reals,bounds=(None,None),initialize=0) m.x370 = Var(within=Reals,bounds=(None,None),initialize=0) m.x371 = Var(within=Reals,bounds=(None,None),initialize=0) m.x372 = Var(within=Reals,bounds=(None,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0) m.x374 = Var(within=Reals,bounds=(None,None),initialize=0) m.x375 = Var(within=Reals,bounds=(None,None),initialize=0) m.x376 = Var(within=Reals,bounds=(None,None),initialize=0) m.x377 = Var(within=Reals,bounds=(None,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0) m.x379 = Var(within=Reals,bounds=(None,None),initialize=0) m.x380 = Var(within=Reals,bounds=(None,None),initialize=0) m.x381 = Var(within=Reals,bounds=(None,None),initialize=0) m.x382 = Var(within=Reals,bounds=(None,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0) m.x384 = Var(within=Reals,bounds=(None,None),initialize=0) m.x385 = Var(within=Reals,bounds=(None,None),initialize=0) m.x386 = Var(within=Reals,bounds=(None,None),initialize=0) m.x387 = Var(within=Reals,bounds=(None,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0) m.x389 = Var(within=Reals,bounds=(None,None),initialize=0) m.x390 = Var(within=Reals,bounds=(None,None),initialize=0) m.x391 = Var(within=Reals,bounds=(None,None),initialize=0) m.x392 = Var(within=Reals,bounds=(None,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0) m.x394 = Var(within=Reals,bounds=(None,None),initialize=0) m.x395 = Var(within=Reals,bounds=(None,None),initialize=0) m.x396 = Var(within=Reals,bounds=(None,None),initialize=0) m.x397 = Var(within=Reals,bounds=(None,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0) m.x399 = Var(within=Reals,bounds=(None,None),initialize=0) m.x400 = Var(within=Reals,bounds=(None,None),initialize=0) m.x401 = Var(within=Reals,bounds=(None,None),initialize=0) m.x402 = Var(within=Reals,bounds=(None,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0) m.x404 = Var(within=Reals,bounds=(None,None),initialize=0) m.x405 = Var(within=Reals,bounds=(None,None),initialize=0) m.x406 = Var(within=Reals,bounds=(None,None),initialize=0) m.x407 = Var(within=Reals,bounds=(None,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0) m.x409 = Var(within=Reals,bounds=(None,None),initialize=0) m.x410 = Var(within=Reals,bounds=(None,None),initialize=0) m.x411 = Var(within=Reals,bounds=(None,None),initialize=0) m.x412 = Var(within=Reals,bounds=(None,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,25),initialize=0) m.x414 = Var(within=Reals,bounds=(0,25),initialize=0) m.x415 = Var(within=Reals,bounds=(0,25),initialize=0) m.x416 = Var(within=Reals,bounds=(0,25),initialize=0) m.x417 = Var(within=Reals,bounds=(0,25),initialize=0) m.x418 = Var(within=Reals,bounds=(0,25),initialize=0) m.x419 = Var(within=Reals,bounds=(0,25),initialize=0) m.x420 = Var(within=Reals,bounds=(0,25),initialize=0) m.x421 = Var(within=Reals,bounds=(0,25),initialize=0) m.x422 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x423 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x424 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x425 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x426 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x427 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x428 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x429 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x430 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x431 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x432 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x433 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x434 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x435 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x436 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x437 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x438 = Var(within=Reals,bounds=(0,0.64),initialize=0) m.x439 = Var(within=Reals,bounds=(0,0.512),initialize=0) m.x440 = Var(within=Reals,bounds=(0,0.25),initialize=0) m.x441 = Var(within=Reals,bounds=(0,0.125),initialize=0) m.x442 = Var(within=Reals,bounds=(0,0.25),initialize=0) m.x443 = Var(within=Reals,bounds=(0,0.125),initialize=0) m.x444 = Var(within=Reals,bounds=(0,0.25),initialize=0) m.x445 = Var(within=Reals,bounds=(0,0.125),initialize=0) m.x446 = Var(within=Reals,bounds=(0,0.25),initialize=0) m.x447 = Var(within=Reals,bounds=(0,0.125),initialize=0) m.x448 = Var(within=Reals,bounds=(0,0.25),initialize=0) m.x449 = Var(within=Reals,bounds=(0,0.125),initialize=0) m.x450 = Var(within=Reals,bounds=(0,0.25),initialize=0) m.x451 = Var(within=Reals,bounds=(0,0.125),initialize=0) m.x452 = Var(within=Reals,bounds=(0,0.49),initialize=0) m.x453 = Var(within=Reals,bounds=(0,0.343),initialize=0) m.x454 = Var(within=Reals,bounds=(0,0.49),initialize=0) m.x455 = Var(within=Reals,bounds=(0,0.343),initialize=0) m.x456 = Var(within=Reals,bounds=(0,0.49),initialize=0) m.x457 = Var(within=Reals,bounds=(0,0.343),initialize=0) m.x458 = Var(within=Reals,bounds=(0,0.49),initialize=0) m.x459 = Var(within=Reals,bounds=(0,0.343),initialize=0) m.x460 = Var(within=Reals,bounds=(0,0.49),initialize=0) m.x461 = Var(within=Reals,bounds=(0,0.343),initialize=0) m.x462 = Var(within=Reals,bounds=(0,0.49),initialize=0) m.x463 = Var(within=Reals,bounds=(0,0.343),initialize=0) m.x464 = Var(within=Reals,bounds=(0,0.3364),initialize=0) m.x465 = Var(within=Reals,bounds=(0,0.195112),initialize=0) m.x466 = Var(within=Reals,bounds=(0,0.3364),initialize=0) m.x467 = Var(within=Reals,bounds=(0,0.195112),initialize=0) m.x468 = Var(within=Reals,bounds=(0,0.3364),initialize=0) m.x469 = Var(within=Reals,bounds=(0,0.195112),initialize=0) m.x470 = Var(within=Reals,bounds=(0,0.3364),initialize=0) m.x471 = Var(within=Reals,bounds=(0,0.195112),initialize=0) m.x472 = Var(within=Reals,bounds=(0,0.3364),initialize=0) m.x473 = Var(within=Reals,bounds=(0,0.195112),initialize=0) m.x474 = Var(within=Reals,bounds=(0,0.3364),initialize=0) m.x475 = Var(within=Reals,bounds=(0,0.195112),initialize=0) m.x476 = Var(within=Reals,bounds=(0.36,1),initialize=0.36) m.x477 = Var(within=Reals,bounds=(0.216,1),initialize=0.216) m.x478 = Var(within=Reals,bounds=(0.36,1),initialize=0.36) m.x479 = Var(within=Reals,bounds=(0.216,1),initialize=0.216) m.x480 = Var(within=Reals,bounds=(0.36,1),initialize=0.36) m.x481 = Var(within=Reals,bounds=(0.216,1),initialize=0.216) m.x482 = Var(within=Reals,bounds=(0.64,1),initialize=0.64) m.x483 = Var(within=Reals,bounds=(0.512,1),initialize=0.512) m.x484 = Var(within=Reals,bounds=(0.64,1),initialize=0.64) m.x485 = Var(within=Reals,bounds=(0.512,1),initialize=0.512) m.x486 = Var(within=Reals,bounds=(0.64,1),initialize=0.64) m.x487 = Var(within=Reals,bounds=(0.512,1),initialize=0.512) m.x488 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225) m.x489 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125) m.x490 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225) m.x491 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125) m.x492 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225) m.x493 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125) m.x494 = Var(within=Reals,bounds=(0.49,1),initialize=0.49) m.x495 = Var(within=Reals,bounds=(0.343,1),initialize=0.343) m.x496 = Var(within=Reals,bounds=(0.49,1),initialize=0.49) m.x497 = Var(within=Reals,bounds=(0.343,1),initialize=0.343) m.x498 = Var(within=Reals,bounds=(0.49,1),initialize=0.49) m.x499 = Var(within=Reals,bounds=(0.343,1),initialize=0.343) m.obj = Objective(expr= m.x278 + m.x283 + m.x288 + m.x293 + m.x298 + m.x303 + m.x308 + m.x313 + m.x318 + m.x323 + m.x328 + m.x333 + m.x338 + m.x345 + m.x348 + m.x353 + m.x358 + m.x363 + m.x368 + m.x373 + m.x378 + m.x383 + m.x388 + m.x393 + m.x398 + m.x403 + m.x408, sense=minimize) m.c2 = Constraint(expr= m.x141 + 27.42831624*m.x143 + 37.5407324*m.x145 - 57.2814121*m.x147 == 0) m.c3 = Constraint(expr= m.x149 + 27.42831624*m.x151 - 57.2814121*m.x153 + 37.5407324*m.x155 == 0) m.c4 = Constraint(expr= m.x157 + 27.42831624*m.x159 - 57.2814121*m.x161 + 37.5407324*m.x163 == 0) m.c5 = Constraint(expr= - 57.2814121*m.x147 + m.x165 + 27.42831624*m.x167 + 37.5407324*m.x169 == 0) m.c6 = Constraint(expr= - 57.2814121*m.x153 + m.x171 + 37.5407324*m.x173 + 27.42831624*m.x175 == 0) m.c7 = Constraint(expr= - 57.2814121*m.x161 + m.x177 + 37.5407324*m.x179 + 27.42831624*m.x181 == 0) m.c8 = Constraint(expr= - 57.2814121*m.x147 + m.x183 + 37.5407324*m.x185 + 27.42831624*m.x187 == 0) m.c9 = Constraint(expr= - 57.2814121*m.x153 + m.x189 + 27.42831624*m.x191 + 37.5407324*m.x193 == 0) m.c10 = Constraint(expr= m.x29 + 27.42831624*m.x30 + 37.5407324*m.x31 - 57.2814121*m.x161 == 0) m.c11 = Constraint(expr= m.x32 - 76.45219958*m.x33 + 43.14087708*m.x34 + 50.37356589*m.x35 == 0) m.c12 = Constraint(expr= m.x36 + 50.37356589*m.x37 - 76.45219958*m.x38 + 43.14087708*m.x39 == 0) m.c13 = Constraint(expr= m.x40 + 43.14087708*m.x41 + 50.37356589*m.x42 - 76.45219958*m.x43 == 0) m.c14 = Constraint(expr= - 76.45219958*m.x33 + m.x44 + 43.14087708*m.x45 + 50.37356589*m.x46 == 0) m.c15 = Constraint(expr= - 76.45219958*m.x38 + m.x47 + 50.37356589*m.x48 + 43.14087708*m.x49 == 0) m.c16 = Constraint(expr= - 76.45219958*m.x43 + m.x50 + 43.14087708*m.x51 + 50.37356589*m.x52 == 0) m.c17 = Constraint(expr= m.x53 + 58.31011875*m.x54 - 69.39622571*m.x55 - 25.39911174*m.x56 == 0) m.c18 = Constraint(expr= m.x57 - 25.39911174*m.x58 + 58.31011875*m.x59 - 69.39622571*m.x60 == 0) m.c19 = Constraint(expr= m.x61 - 69.39622571*m.x62 + 58.31011875*m.x63 - 25.39911174*m.x64 == 0) m.c20 = Constraint(expr= - 69.39622571*m.x55 + m.x65 + 58.31011875*m.x66 - 25.39911174*m.x67 == 0) m.c21 = Constraint(expr= - 69.39622571*m.x60 + m.x68 - 25.39911174*m.x69 + 58.31011875*m.x70 == 0) m.c22 = Constraint(expr= - 69.39622571*m.x62 + m.x71 + 58.31011875*m.x72 - 25.39911174*m.x73 == 0) m.c23 = Constraint(expr= m.x74 - 2.03724124*m.x75 + 63.61644904*m.x76 - 34.92732674*m.x77 == 0) m.c24 = Constraint(expr= m.x78 - 2.03724124*m.x79 - 34.92732674*m.x80 + 63.61644904*m.x81 == 0) m.c25 = Constraint(expr= m.x82 - 2.03724124*m.x83 - 34.92732674*m.x84 + 63.61644904*m.x85 == 0) m.c26 = Constraint(expr= - 34.92732674*m.x77 + m.x86 + 63.61644904*m.x87 - 2.03724124*m.x88 == 0) m.c27 = Constraint(expr= - 34.92732674*m.x80 + m.x89 + 63.61644904*m.x90 - 2.03724124*m.x91 == 0) m.c28 = Constraint(expr= - 34.92732674*m.x84 + m.x92 - 2.03724124*m.x93 + 63.61644904*m.x94 == 0) m.c29 = Constraint(expr= m.x95 + m.x96 + m.x97 >= 0.875) m.c30 = Constraint(expr= - m.x98 + m.x99 == 0) m.c31 = Constraint(expr= - m.x100 + m.x101 == 0) m.c32 = Constraint(expr= - m.x102 + m.x103 == 0) m.c33 = Constraint(expr= - m.x104 + m.x105 == 0) m.c34 = Constraint(expr= - m.x106 + m.x107 == 0) m.c35 = Constraint(expr= - m.x108 + m.x109 == 0) m.c36 = Constraint(expr= m.x104 - m.x110 == 0) m.c37 = Constraint(expr= m.x106 - m.x111 == 0) m.c38 = Constraint(expr= m.x108 - m.x112 == 0) m.c39 = Constraint(expr= - m.x113 + m.x114 == 0) m.c40 = Constraint(expr= - m.x115 + m.x116 == 0) m.c41 = Constraint(expr= - m.x117 + m.x118 == 0) m.c42 = Constraint(expr= m.x119 == 0.296666667) m.c43 = Constraint(expr= m.x120 == 0.294444444) m.c44 = Constraint(expr= m.x121 == 0.283888889) m.c45 = Constraint(expr= m.x95 - m.x99 == 0) m.c46 = Constraint(expr= m.x96 - m.x101 == 0) m.c47 = Constraint(expr= m.x97 - m.x103 == 0) m.c48 = Constraint(expr= 3600*m.x98 - 3600*m.x105 + 1800*m.x122 - 1800*m.x123 == 0) m.c49 = Constraint(expr= 3600*m.x100 - 3600*m.x107 + 1800*m.x124 - 1800*m.x125 == 0) m.c50 = Constraint(expr= 3600*m.x102 - 3600*m.x109 + 1800*m.x126 - 1800*m.x127 == 0) m.c51 = Constraint(expr= 3600*m.x110 - 3600*m.x114 + 720*m.x128 - 720*m.x129 == 0) m.c52 = Constraint(expr= 3600*m.x111 - 3600*m.x116 + 720*m.x130 - 720*m.x131 == 0) m.c53 = Constraint(expr= 3600*m.x112 - 3600*m.x118 + 720*m.x132 - 720*m.x133 == 0) m.c54 = Constraint(expr= 3600*m.x113 - 3600*m.x119 + 1600*m.x134 - 1600*m.x135 == 0) m.c55 = Constraint(expr= 3600*m.x115 - 3600*m.x120 + 1600*m.x136 - 1600*m.x137 == 0) m.c56 = Constraint(expr= 3600*m.x117 - 3600*m.x121 + 1600*m.x138 - 1600*m.x139 == 0) m.c57 = Constraint(expr= - m.x123 + m.x124 == 0) m.c58 = Constraint(expr= - m.x125 + m.x126 == 0) m.c59 = Constraint(expr= - m.x129 + m.x130 == 0) m.c60 = Constraint(expr= - m.x131 + m.x132 == 0) m.c61 = Constraint(expr= - m.x135 + m.x136 == 0) m.c62 = Constraint(expr= - m.x137 + m.x138 == 0) m.c63 = Constraint(expr= - 0.2*m.b2 + m.x140 >= 0) m.c64 = Constraint(expr= - 0.2*m.b3 + m.x142 >= 0) m.c65 = Constraint(expr= - 0.2*m.b4 + m.x144 >= 0) m.c66 = Constraint(expr= - 0.2*m.b5 + m.x146 >= 0) m.c67 = Constraint(expr= - 0.2*m.b6 + m.x148 >= 0) m.c68 = Constraint(expr= - 0.2*m.b7 + m.x150 >= 0) m.c69 = Constraint(expr= - 0.2*m.b8 + m.x152 >= 0) m.c70 = Constraint(expr= - 0.2*m.b9 + m.x154 >= 0) m.c71 = Constraint(expr= - 0.2*m.b10 + m.x156 >= 0) m.c72 = Constraint(expr= - 0.25*m.b11 + m.x158 >= 0) m.c73 = Constraint(expr= - 0.25*m.b12 + m.x160 >= 0) m.c74 = Constraint(expr= - 0.25*m.b13 + m.x162 >= 0) m.c75 = Constraint(expr= - 0.25*m.b14 + m.x164 >= 0) m.c76 = Constraint(expr= - 0.25*m.b15 + m.x166 >= 0) m.c77 = Constraint(expr= - 0.25*m.b16 + m.x168 >= 0) m.c78 = Constraint(expr= - 0.4*m.b17 + m.x170 >= 0) m.c79 = Constraint(expr= - 0.4*m.b18 + m.x172 >= 0) m.c80 = Constraint(expr= - 0.4*m.b19 + m.x174 >= 0) m.c81 = Constraint(expr= - 0.4*m.b20 + m.x176 >= 0) m.c82 = Constraint(expr= - 0.4*m.b21 + m.x178 >= 0) m.c83 = Constraint(expr= - 0.4*m.b22 + m.x180 >= 0) m.c84 = Constraint(expr= - 0.24*m.b23 + m.x182 >= 0) m.c85 = Constraint(expr= - 0.24*m.b24 + m.x184 >= 0) m.c86 = Constraint(expr= - 0.24*m.b25 + m.x186 >= 0) m.c87 = Constraint(expr= - 0.24*m.b26 + m.x188 >= 0) m.c88 = Constraint(expr= - 0.24*m.b27 + m.x190 >= 0) m.c89 = Constraint(expr= - 0.24*m.b28 + m.x192 >= 0) m.c90 = Constraint(expr= - 0.8*m.b2 + m.x140 <= 0) m.c91 = Constraint(expr= - 0.8*m.b3 + m.x142 <= 0) m.c92 = Constraint(expr= - 0.8*m.b4 + m.x144 <= 0) m.c93 = Constraint(expr= - 0.8*m.b5 + m.x146 <= 0) m.c94 = Constraint(expr= - 0.8*m.b6 + m.x148 <= 0) m.c95 = Constraint(expr= - 0.8*m.b7 + m.x150 <= 0) m.c96 = Constraint(expr= - 0.8*m.b8 + m.x152 <= 0) m.c97 = Constraint(expr= - 0.8*m.b9 + m.x154 <= 0) m.c98 = Constraint(expr= - 0.8*m.b10 + m.x156 <= 0) m.c99 = Constraint(expr= - 0.5*m.b11 + m.x158 <= 0) m.c100 = Constraint(expr= - 0.5*m.b12 + m.x160 <= 0) m.c101 = Constraint(expr= - 0.5*m.b13 + m.x162 <= 0) m.c102 = Constraint(expr= - 0.5*m.b14 + m.x164 <= 0) m.c103 = Constraint(expr= - 0.5*m.b15 + m.x166 <= 0) m.c104 = Constraint(expr= - 0.5*m.b16 + m.x168 <= 0) m.c105 = Constraint(expr= - 0.7*m.b17 + m.x170 <= 0) m.c106 = Constraint(expr= - 0.7*m.b18 + m.x172 <= 0) m.c107 = Constraint(expr= - 0.7*m.b19 + m.x174 <= 0) m.c108 = Constraint(expr= - 0.7*m.b20 + m.x176 <= 0) m.c109 = Constraint(expr= - 0.7*m.b21 + m.x178 <= 0) m.c110 = Constraint(expr= - 0.7*m.b22 + m.x180 <= 0) m.c111 = Constraint(expr= - 0.58*m.b23 + m.x182 <= 0) m.c112 = Constraint(expr= - 0.58*m.b24 + m.x184 <= 0) m.c113 = Constraint(expr= - 0.58*m.b25 + m.x186 <= 0) m.c114 = Constraint(expr= - 0.58*m.b26 + m.x188 <= 0) m.c115 = Constraint(expr= - 0.58*m.b27 + m.x190 <= 0) m.c116 = Constraint(expr= - 0.58*m.b28 + m.x192 <= 0) m.c117 = Constraint(expr= - m.x122 + m.x194 == 60) m.c118 = Constraint(expr= - m.x124 + m.x195 == 60) m.c119 = Constraint(expr= - m.x126 + m.x196 == 60) m.c120 = Constraint(expr= - m.x128 + m.x197 == 90) m.c121 = Constraint(expr= - m.x130 + m.x198 == 90) m.c122 = Constraint(expr= - m.x132 + m.x199 == 90) m.c123 = Constraint(expr= - m.x134 + m.x200 == 103) m.c124 = Constraint(expr= - m.x136 + m.x201 == 103) m.c125 = Constraint(expr= - m.x138 + m.x202 == 103) m.c126 = Constraint(expr= - m.x194 + m.x203 - m.x204 == 0) m.c127 = Constraint(expr= - m.x195 + m.x205 - m.x206 == 0) m.c128 = Constraint(expr= - m.x196 + m.x207 - m.x208 == 0) m.c129 = Constraint(expr= m.x209 - m.x210 - m.x211 == 0) m.c130 = Constraint(expr= m.x212 - m.x213 - m.x214 == 0) m.c131 = Constraint(expr= m.x215 - m.x216 - m.x217 == 0) m.c132 = Constraint(expr= - m.x200 + m.x218 - m.x219 == 0) m.c133 = Constraint(expr= - m.x201 + m.x220 - m.x221 == 0) m.c134 = Constraint(expr= - m.x202 + m.x222 - m.x223 == 0) m.c135 = Constraint(expr= m.x203 - m.x224 - m.x225 == 0) m.c136 = Constraint(expr= m.x205 - m.x226 - m.x227 == 0) m.c137 = Constraint(expr= m.x207 - m.x228 - m.x229 == 0) m.c138 = Constraint(expr= - m.x194 + m.x209 - m.x230 == 0) m.c139 = Constraint(expr= - m.x195 + m.x212 - m.x231 == 0) m.c140 = Constraint(expr= - m.x196 + m.x215 - m.x232 == 0) m.c141 = Constraint(expr= - m.x197 + m.x218 - m.x233 == 0) m.c142 = Constraint(expr= - m.x198 + m.x220 - m.x234 == 0) m.c143 = Constraint(expr= - m.x199 + m.x222 - m.x235 == 0) m.c144 = Constraint(expr= 0.2*m.b2 - m.x140 + m.x236 <= 0.2) m.c145 = Constraint(expr= 0.2*m.b3 - m.x142 + m.x237 <= 0.2) m.c146 = Constraint(expr= 0.2*m.b4 - m.x144 + m.x238 <= 0.2) m.c147 = Constraint(expr= 0.2*m.b5 - m.x146 + m.x239 <= 0.2) m.c148 = Constraint(expr= 0.2*m.b6 - m.x148 + m.x240 <= 0.2) m.c149 = Constraint(expr= 0.2*m.b7 - m.x150 + m.x241 <= 0.2) m.c150 = Constraint(expr= 0.2*m.b8 - m.x152 + m.x242 <= 0.2) m.c151 = Constraint(expr= 0.2*m.b9 - m.x154 + m.x243 <= 0.2) m.c152 = Constraint(expr= 0.2*m.b10 - m.x156 + m.x244 <= 0.2) m.c153 = Constraint(expr= 0.25*m.b11 - m.x158 + m.x245 <= 0.25) m.c154 = Constraint(expr= 0.25*m.b12 - m.x160 + m.x246 <= 0.25) m.c155 = Constraint(expr= 0.25*m.b13 - m.x162 + m.x247 <= 0.25) m.c156 = Constraint(expr= 0.25*m.b14 - m.x164 + m.x248 <= 0.25) m.c157 = Constraint(expr= 0.25*m.b15 - m.x166 + m.x249 <= 0.25) m.c158 = Constraint(expr= 0.25*m.b16 - m.x168 + m.x250 <= 0.25) m.c159 = Constraint(expr= 0.4*m.b17 - m.x170 + m.x251 <= 0.4) m.c160 = Constraint(expr= 0.4*m.b18 - m.x172 + m.x252 <= 0.4) m.c161 = Constraint(expr= 0.4*m.b19 - m.x174 + m.x253 <= 0.4) m.c162 = Constraint(expr= 0.4*m.b20 - m.x176 + m.x254 <= 0.4) m.c163 = Constraint(expr= 0.4*m.b21 - m.x178 + m.x255 <= 0.4) m.c164 = Constraint(expr= 0.4*m.b22 - m.x180 + m.x256 <= 0.4) m.c165 = Constraint(expr= 0.24*m.b23 - m.x182 + m.x257 <= 0.24) m.c166 = Constraint(expr= 0.24*m.b24 - m.x184 + m.x258 <= 0.24) m.c167 = Constraint(expr= 0.24*m.b25 - m.x186 + m.x259 <= 0.24) m.c168 = Constraint(expr= 0.24*m.b26 - m.x188 + m.x260 <= 0.24) m.c169 = Constraint(expr= 0.24*m.b27 - m.x190 + m.x261 <= 0.24) m.c170 = Constraint(expr= 0.24*m.b28 - m.x192 + m.x262 <= 0.24) m.c171 = Constraint(expr= - m.x140 + m.x236 >= 0) m.c172 = Constraint(expr= - m.x142 + m.x237 >= 0) m.c173 = Constraint(expr= - m.x144 + m.x238 >= 0) m.c174 = Constraint(expr= - m.x146 + m.x239 >= 0) m.c175 = Constraint(expr= - m.x148 + m.x240 >= 0) m.c176 = Constraint(expr= - m.x150 + m.x241 >= 0) m.c177 = Constraint(expr= - m.x152 + m.x242 >= 0) m.c178 = Constraint(expr= - m.x154 + m.x243 >= 0) m.c179 = Constraint(expr= - m.x156 + m.x244 >= 0) m.c180 = Constraint(expr= - m.x158 + m.x245 >= 0) m.c181 = Constraint(expr= - m.x160 + m.x246 >= 0) m.c182 = Constraint(expr= - m.x162 + m.x247 >= 0) m.c183 = Constraint(expr= - m.x164 + m.x248 >= 0) m.c184 = Constraint(expr= - m.x166 + m.x249 >= 0) m.c185 = Constraint(expr= - m.x168 + m.x250 >= 0) m.c186 = Constraint(expr= - m.x170 + m.x251 >= 0) m.c187 = Constraint(expr= - m.x172 + m.x252 >= 0) m.c188 = Constraint(expr= - m.x174 + m.x253 >= 0) m.c189 = Constraint(expr= - m.x176 + m.x254 >= 0) m.c190 = Constraint(expr= - m.x178 + m.x255 >= 0) m.c191 = Constraint(expr= - m.x180 + m.x256 >= 0) m.c192 = Constraint(expr= - m.x182 + m.x257 >= 0) m.c193 = Constraint(expr= - m.x184 + m.x258 >= 0) m.c194 = Constraint(expr= - m.x186 + m.x259 >= 0) m.c195 = Constraint(expr= - m.x188 + m.x260 >= 0) m.c196 = Constraint(expr= - m.x190 + m.x261 >= 0) m.c197 = Constraint(expr= - m.x192 + m.x262 >= 0) m.c198 = Constraint(expr= - 0.6*m.b2 + m.x236 <= 0.2) m.c199 = Constraint(expr= - 0.6*m.b3 + m.x237 <= 0.2) m.c200 = Constraint(expr= - 0.6*m.b4 + m.x238 <= 0.2) m.c201 = Constraint(expr= - 0.6*m.b5 + m.x239 <= 0.2) m.c202 = Constraint(expr= - 0.6*m.b6 + m.x240 <= 0.2) m.c203 = Constraint(expr= - 0.6*m.b7 + m.x241 <= 0.2) m.c204 = Constraint(expr= - 0.6*m.b8 + m.x242 <= 0.2) m.c205 = Constraint(expr= - 0.6*m.b9 + m.x243 <= 0.2) m.c206 = Constraint(expr= - 0.6*m.b10 + m.x244 <= 0.2) m.c207 = Constraint(expr= - 0.25*m.b11 + m.x245 <= 0.25) m.c208 = Constraint(expr= - 0.25*m.b12 + m.x246 <= 0.25) m.c209 = Constraint(expr= - 0.25*m.b13 + m.x247 <= 0.25) m.c210 = Constraint(expr= - 0.25*m.b14 + m.x248 <= 0.25) m.c211 = Constraint(expr= - 0.25*m.b15 + m.x249 <= 0.25) m.c212 = Constraint(expr= - 0.25*m.b16 + m.x250 <= 0.25) m.c213 = Constraint(expr= - 0.3*m.b17 + m.x251 <= 0.4) m.c214 = Constraint(expr= - 0.3*m.b18 + m.x252 <= 0.4) m.c215 = Constraint(expr= - 0.3*m.b19 + m.x253 <= 0.4) m.c216 = Constraint(expr= - 0.3*m.b20 + m.x254 <= 0.4) m.c217 = Constraint(expr= - 0.3*m.b21 + m.x255 <= 0.4) m.c218 = Constraint(expr= - 0.3*m.b22 + m.x256 <= 0.4) m.c219 = Constraint(expr= - 0.34*m.b23 + m.x257 <= 0.24) m.c220 = Constraint(expr= - 0.34*m.b24 + m.x258 <= 0.24) m.c221 = Constraint(expr= - 0.34*m.b25 + m.x259 <= 0.24) m.c222 = Constraint(expr= - 0.34*m.b26 + m.x260 <= 0.24) m.c223 = Constraint(expr= - 0.34*m.b27 + m.x261 <= 0.24) m.c224 = Constraint(expr= - 0.34*m.b28 + m.x262 <= 0.24) m.c225 = Constraint(expr= - 0.4*m.b2 + m.x263 <= 0.6) m.c226 = Constraint(expr= - 0.4*m.b3 + m.x264 <= 0.6) m.c227 = Constraint(expr= - 0.4*m.b4 + m.x265 <= 0.6) m.c228 = Constraint(expr= - 0.2*m.b11 + m.x266 <= 0.8) m.c229 = Constraint(expr= - 0.2*m.b12 + m.x267 <= 0.8) m.c230 = Constraint(expr= - 0.2*m.b13 + m.x268 <= 0.8) m.c231 = Constraint(expr= - 0.15*m.b17 + m.x269 <= 0.85) m.c232 = Constraint(expr= - 0.15*m.b18 + m.x270 <= 0.85) m.c233 = Constraint(expr= - 0.15*m.b19 + m.x271 <= 0.85) m.c234 = Constraint(expr= - 0.3*m.b23 + m.x272 <= 0.7) m.c235 = Constraint(expr= - 0.3*m.b24 + m.x273 <= 0.7) m.c236 = Constraint(expr= - 0.3*m.b25 + m.x274 <= 0.7) m.c237 = Constraint(expr= m.b2 - m.b5 >= 0) m.c238 = Constraint(expr= m.b3 - m.b6 >= 0) m.c239 = Constraint(expr= m.b4 - m.b7 >= 0) m.c240 = Constraint(expr= m.b5 - m.b8 >= 0) m.c241 = Constraint(expr= m.b6 - m.b9 >= 0) m.c242 = Constraint(expr= m.b7 - m.b10 >= 0) m.c243 = Constraint(expr= m.b11 - m.b14 >= 0) m.c244 = Constraint(expr= m.b12 - m.b15 >= 0) m.c245 = Constraint(expr= m.b13 - m.b16 >= 0) m.c246 = Constraint(expr= m.b17 - m.b20 >= 0) m.c247 = Constraint(expr= m.b18 - m.b21 >= 0) m.c248 = Constraint(expr= m.b19 - m.b22 >= 0) m.c249 = Constraint(expr= m.b23 - m.b26 >= 0) m.c250 = Constraint(expr= m.b24 - m.b27 >= 0) m.c251 = Constraint(expr= m.b25 - m.b28 >= 0) m.c252 = Constraint(expr= m.x99 - m.x140 - m.x146 - m.x152 == 0) m.c253 = Constraint(expr= m.x101 - m.x142 - m.x148 - m.x154 == 0) m.c254 = Constraint(expr= m.x103 - m.x144 - m.x150 - m.x156 == 0) m.c255 = Constraint(expr= m.x105 - m.x158 - m.x164 - m.x170 - m.x176 == 0) m.c256 = Constraint(expr= m.x107 - m.x160 - m.x166 - m.x172 - m.x178 == 0) m.c257 = Constraint(expr= m.x109 - m.x162 - m.x168 - m.x174 - m.x180 == 0) m.c258 = Constraint(expr= m.x114 - m.x182 - m.x188 == 0) m.c259 = Constraint(expr= m.x116 - m.x184 - m.x190 == 0) m.c260 = Constraint(expr= m.x118 - m.x186 - m.x192 == 0) m.c261 = Constraint(expr= - 2000*m.b2 + m.x141 - m.x225 >= -2000) m.c262 = Constraint(expr= - 2000*m.b3 + m.x149 - m.x227 >= -2000) m.c263 = Constraint(expr= - 2000*m.b4 + m.x157 - m.x229 >= -2000) m.c264 = Constraint(expr= - 2000*m.b5 + m.x165 - m.x225 >= -2000) m.c265 = Constraint(expr= - 2000*m.b6 + m.x171 - m.x227 >= -2000) m.c266 = Constraint(expr= - 2000*m.b7 + m.x177 - m.x229 >= -2000) m.c267 = Constraint(expr= - 2000*m.b8 + m.x183 - m.x225 >= -2000) m.c268 = Constraint(expr= - 2000*m.b9 + m.x189 - m.x227 >= -2000) m.c269 = Constraint(expr= - 2000*m.b10 + m.x29 - m.x229 >= -2000) m.c270 = Constraint(expr= - 2000*m.b11 + m.x32 - m.x230 >= -2000) m.c271 = Constraint(expr= - 2000*m.b12 + m.x36 - m.x231 >= -2000) m.c272 = Constraint(expr= - 2000*m.b13 + m.x40 - m.x232 >= -2000) m.c273 = Constraint(expr= - 2000*m.b14 + m.x44 - m.x230 >= -2000) m.c274 = Constraint(expr= - 2000*m.b15 + m.x47 - m.x231 >= -2000) m.c275 = Constraint(expr= - 2000*m.b16 + m.x50 - m.x232 >= -2000) m.c276 = Constraint(expr= - 2000*m.b17 + m.x53 - m.x230 >= -2000) m.c277 = Constraint(expr= - 2000*m.b18 + m.x57 - m.x231 >= -2000) m.c278 = Constraint(expr= - 2000*m.b19 + m.x61 - m.x232 >= -2000) m.c279 = Constraint(expr= - 2000*m.b20 + m.x65 - m.x230 >= -2000) m.c280 = Constraint(expr= - 2000*m.b21 + m.x68 - m.x231 >= -2000) m.c281 = Constraint(expr= - 2000*m.b22 + m.x71 - m.x232 >= -2000) m.c282 = Constraint(expr= - 2000*m.b23 + m.x74 - m.x233 >= -2000) m.c283 = Constraint(expr= - 2000*m.b24 + m.x78 - m.x234 >= -2000) m.c284 = Constraint(expr= - 2000*m.b25 + m.x82 - m.x235 >= -2000) m.c285 = Constraint(expr= - 2000*m.b26 + m.x86 - m.x233 >= -2000) m.c286 = Constraint(expr= - 2000*m.b27 + m.x89 - m.x234 >= -2000) m.c287 = Constraint(expr= - 2000*m.b28 + m.x92 - m.x235 >= -2000) m.c288 = Constraint(expr= 1049*m.b2 + m.x141 - m.x225 <= 1049) m.c289 = Constraint(expr= 1049*m.b3 + m.x149 - m.x227 <= 1049) m.c290 = Constraint(expr= 1049*m.b4 + m.x157 - m.x229 <= 1049) m.c291 = Constraint(expr= 1049*m.b5 + m.x165 - m.x225 <= 1049) m.c292 = Constraint(expr= 1049*m.b6 + m.x171 - m.x227 <= 1049) m.c293 = Constraint(expr= 1049*m.b7 + m.x177 - m.x229 <= 1049) m.c294 = Constraint(expr= 1049*m.b8 + m.x183 - m.x225 <= 1049) m.c295 = Constraint(expr= 1049*m.b9 + m.x189 - m.x227 <= 1049) m.c296 = Constraint(expr= 1049*m.b10 + m.x29 - m.x229 <= 1049) m.c297 = Constraint(expr= 1065*m.b11 + m.x32 - m.x230 <= 1065) m.c298 = Constraint(expr= 1065*m.b12 + m.x36 - m.x231 <= 1065) m.c299 = Constraint(expr= 1065*m.b13 + m.x40 - m.x232 <= 1065) m.c300 = Constraint(expr= 1065*m.b14 + m.x44 - m.x230 <= 1065) m.c301 = Constraint(expr= 1065*m.b15 + m.x47 - m.x231 <= 1065) m.c302 = Constraint(expr= 1065*m.b16 + m.x50 - m.x232 <= 1065) m.c303 = Constraint(expr= 1065*m.b17 + m.x53 - m.x230 <= 1065) m.c304 = Constraint(expr= 1065*m.b18 + m.x57 - m.x231 <= 1065) m.c305 = Constraint(expr= 1065*m.b19 + m.x61 - m.x232 <= 1065) m.c306 = Constraint(expr= 1065*m.b20 + m.x65 - m.x230 <= 1065) m.c307 = Constraint(expr= 1065*m.b21 + m.x68 - m.x231 <= 1065) m.c308 = Constraint(expr= 1065*m.b22 + m.x71 - m.x232 <= 1065) m.c309 = Constraint(expr= 1095*m.b23 + m.x74 - m.x233 <= 1095) m.c310 = Constraint(expr= 1095*m.b24 + m.x78 - m.x234 <= 1095) m.c311 = Constraint(expr= 1095*m.b25 + m.x82 - m.x235 <= 1095) m.c312 = Constraint(expr= 1095*m.b26 + m.x86 - m.x233 <= 1095) m.c313 = Constraint(expr= 1095*m.b27 + m.x89 - m.x234 <= 1095) m.c314 = Constraint(expr= 1095*m.b28 + m.x92 - m.x235 <= 1095) m.c315 = Constraint(expr= - m.x197 + m.x210 >= 0) m.c316 = Constraint(expr= - m.x198 + m.x213 >= 0) m.c317 = Constraint(expr= - m.x199 + m.x216 >= 0) m.c318 = Constraint(expr= m.x200 - m.x275 >= 0) m.c319 = Constraint(expr= m.x201 - m.x276 >= 0) m.c320 = Constraint(expr= m.x202 - m.x277 >= 0) m.c321 = Constraint(expr= - 0.309838295393634*m.x278 + 13.94696158*m.x279 + 24.46510819*m.x280 - 7.28623839*m.x281 - 23.57687014*m.x282 <= 0) m.c322 = Constraint(expr= - 0.309838295393634*m.x283 + 13.94696158*m.x284 + 24.46510819*m.x285 - 7.28623839*m.x286 - 23.57687014*m.x287 <= 0) m.c323 = Constraint(expr= - 0.309838295393634*m.x288 + 13.94696158*m.x289 + 24.46510819*m.x290 - 7.28623839*m.x291 - 23.57687014*m.x292 <= 0) m.c324 = Constraint(expr= - 0.309838295393634*m.x293 + 13.94696158*m.x294 + 24.46510819*m.x295 - 7.28623839*m.x296 - 23.57687014*m.x297 <= 0) m.c325 = Constraint(expr= - 0.309838295393634*m.x298 + 13.94696158*m.x299 + 24.46510819*m.x300 - 7.28623839*m.x301 - 23.57687014*m.x302 <= 0) m.c326 = Constraint(expr= - 0.309838295393634*m.x303 + 13.94696158*m.x304 + 24.46510819*m.x305 - 7.28623839*m.x306 - 23.57687014*m.x307 <= 0) m.c327 = Constraint(expr= - 0.309838295393634*m.x308 + 13.94696158*m.x309 + 24.46510819*m.x310 - 7.28623839*m.x311 - 23.57687014*m.x312 <= 0) m.c328 = Constraint(expr= - 0.309838295393634*m.x313 + 13.94696158*m.x314 + 24.46510819*m.x315 - 7.28623839*m.x316 - 23.57687014*m.x317 <= 0) m.c329 = Constraint(expr= - 0.309838295393634*m.x318 + 13.94696158*m.x319 + 24.46510819*m.x320 - 7.28623839*m.x321 - 23.57687014*m.x322 <= 0) m.c330 = Constraint(expr= - 0.309838295393634*m.x323 + 29.29404529*m.x324 - 108.39408287*m.x325 + 442.21990639*m.x326 - 454.58448169*m.x327 <= 0) m.c331 = Constraint(expr= - 0.309838295393634*m.x328 + 29.29404529*m.x329 - 108.39408287*m.x330 + 442.21990639*m.x331 - 454.58448169*m.x332 <= 0) m.c332 = Constraint(expr= - 0.309838295393634*m.x333 + 29.29404529*m.x334 - 108.39408287*m.x335 + 442.21990639*m.x336 - 454.58448169*m.x337 <= 0) m.c333 = Constraint(expr= - 0.309838295393634*m.x338 + 29.29404529*m.x339 - 108.39408287*m.x340 + 442.21990639*m.x341 - 454.58448169*m.x342 <= 0) m.c334 = Constraint(expr= 442.21990639*m.x343 - 454.58448169*m.x344 - 0.309838295393634*m.x345 + 29.29404529*m.x346 - 108.39408287*m.x347 <= 0) m.c335 = Constraint(expr= - 0.309838295393634*m.x348 + 29.29404529*m.x349 - 108.39408287*m.x350 + 442.21990639*m.x351 - 454.58448169*m.x352 <= 0) m.c336 = Constraint(expr= - 0.309838295393634*m.x353 + 25.92674585*m.x354 + 18.13482123*m.x355 + 22.12766012*m.x356 - 42.68950769*m.x357 <= 0) m.c337 = Constraint(expr= - 0.309838295393634*m.x358 + 25.92674585*m.x359 + 18.13482123*m.x360 + 22.12766012*m.x361 - 42.68950769*m.x362 <= 0) m.c338 = Constraint(expr= - 0.309838295393634*m.x363 + 25.92674585*m.x364 + 18.13482123*m.x365 + 22.12766012*m.x366 - 42.68950769*m.x367 <= 0) m.c339 = Constraint(expr= - 0.309838295393634*m.x368 + 25.92674585*m.x369 + 18.13482123*m.x370 + 22.12766012*m.x371 - 42.68950769*m.x372 <= 0) m.c340 = Constraint(expr= - 0.309838295393634*m.x373 + 25.92674585*m.x374 + 18.13482123*m.x375 + 22.12766012*m.x376 - 42.68950769*m.x377 <= 0) m.c341 = Constraint(expr= - 0.309838295393634*m.x378 + 25.92674585*m.x379 + 18.13482123*m.x380 + 22.12766012*m.x381 - 42.68950769*m.x382 <= 0) m.c342 = Constraint(expr= - 0.309838295393634*m.x383 + 17.4714791*m.x384 - 39.98407808*m.x385 + 134.55943082*m.x386 - 135.88441782*m.x387 <= 0) m.c343 = Constraint(expr= - 0.309838295393634*m.x388 + 17.4714791*m.x389 - 39.98407808*m.x390 + 134.55943082*m.x391 - 135.88441782*m.x392 <= 0) m.c344 = Constraint(expr= - 0.309838295393634*m.x393 + 17.4714791*m.x394 - 39.98407808*m.x395 + 134.55943082*m.x396 - 135.88441782*m.x397 <= 0) m.c345 = Constraint(expr= - 0.309838295393634*m.x398 + 17.4714791*m.x399 - 39.98407808*m.x400 + 134.55943082*m.x401 - 135.88441782*m.x402 <= 0) m.c346 = Constraint(expr= - 0.309838295393634*m.x403 + 17.4714791*m.x404 - 39.98407808*m.x405 + 134.55943082*m.x406 - 135.88441782*m.x407 <= 0) m.c347 = Constraint(expr= - 0.309838295393634*m.x408 + 17.4714791*m.x409 - 39.98407808*m.x410 + 134.55943082*m.x411 - 135.88441782*m.x412 <= 0) m.c348 = Constraint(expr=m.x98**2 - m.x413 == 0) m.c349 = Constraint(expr= m.x204 - 5*m.x413 == 0) m.c350 = Constraint(expr=m.x100**2 - m.x414 == 0) m.c351 = Constraint(expr= m.x206 - 5*m.x414 == 0) m.c352 = Constraint(expr=m.x102**2 - m.x415 == 0) m.c353 = Constraint(expr= m.x208 - 5*m.x415 == 0) m.c354 = Constraint(expr=m.x104**2 - m.x416 == 0) m.c355 = Constraint(expr= m.x211 - 4*m.x416 == 0) m.c356 = Constraint(expr=m.x106**2 - m.x417 == 0) m.c357 = Constraint(expr= m.x214 - 4*m.x417 == 0) m.c358 = Constraint(expr=m.x108**2 - m.x418 == 0) m.c359 = Constraint(expr= m.x217 - 4*m.x418 == 0) m.c360 = Constraint(expr=m.x113**2 - m.x419 == 0) m.c361 = Constraint(expr= m.x219 - 5*m.x419 == 0) m.c362 = Constraint(expr=m.x115**2 - m.x420 == 0) m.c363 = Constraint(expr= m.x221 - 5*m.x420 == 0) m.c364 = Constraint(expr=m.x117**2 - m.x421 == 0) m.c365 = Constraint(expr= m.x223 - 5*m.x421 == 0) m.c366 = Constraint(expr=m.x140**2 - m.x422 == 0) m.c367 = Constraint(expr= m.x143 - m.x422 == 0) m.c368 = Constraint(expr=m.x140**3 - m.x423 == 0) m.c369 = Constraint(expr= m.x282 - m.x423 == 0) m.c370 = Constraint(expr=m.x142**2 - m.x424 == 0) m.c371 = Constraint(expr= m.x151 - m.x424 == 0) m.c372 = Constraint(expr=m.x142**3 - m.x425 == 0) m.c373 = Constraint(expr= m.x287 - m.x425 == 0) m.c374 = Constraint(expr=m.x144**2 - m.x426 == 0) m.c375 = Constraint(expr= m.x159 - m.x426 == 0) m.c376 = Constraint(expr=m.x144**3 - m.x427 == 0) m.c377 = Constraint(expr= m.x292 - m.x427 == 0) m.c378 = Constraint(expr=m.x146**2 - m.x428 == 0) m.c379 = Constraint(expr= m.x167 - m.x428 == 0) m.c380 = Constraint(expr=m.x146**3 - m.x429 == 0) m.c381 = Constraint(expr= m.x297 - m.x429 == 0) m.c382 = Constraint(expr=m.x148**2 - m.x430 == 0) m.c383 = Constraint(expr= m.x175 - m.x430 == 0) m.c384 = Constraint(expr=m.x148**3 - m.x431 == 0) m.c385 = Constraint(expr= m.x302 - m.x431 == 0) m.c386 = Constraint(expr=m.x150**2 - m.x432 == 0) m.c387 = Constraint(expr= m.x181 - m.x432 == 0) m.c388 = Constraint(expr=m.x150**3 - m.x433 == 0) m.c389 = Constraint(expr= m.x307 - m.x433 == 0) m.c390 = Constraint(expr=m.x152**2 - m.x434 == 0) m.c391 = Constraint(expr= m.x187 - m.x434 == 0) m.c392 = Constraint(expr=m.x152**3 - m.x435 == 0) m.c393 = Constraint(expr= m.x312 - m.x435 == 0) m.c394 = Constraint(expr=m.x154**2 - m.x436 == 0) m.c395 = Constraint(expr= m.x191 - m.x436 == 0) m.c396 = Constraint(expr=m.x154**3 - m.x437 == 0) m.c397 = Constraint(expr= m.x317 - m.x437 == 0) m.c398 = Constraint(expr=m.x156**2 - m.x438 == 0) m.c399 = Constraint(expr= m.x30 - m.x438 == 0) m.c400 = Constraint(expr=m.x156**3 - m.x439 == 0) m.c401 = Constraint(expr= m.x322 - m.x439 == 0) m.c402 = Constraint(expr=m.x158**2 - m.x440 == 0) m.c403 = Constraint(expr= m.x35 - m.x440 == 0) m.c404 = Constraint(expr=m.x158**3 - m.x441 == 0) m.c405 = Constraint(expr= m.x327 - m.x441 == 0) m.c406 = Constraint(expr=m.x160**2 - m.x442 == 0) m.c407 = Constraint(expr= m.x37 - m.x442 == 0) m.c408 = Constraint(expr=m.x160**3 - m.x443 == 0) m.c409 = Constraint(expr= m.x332 - m.x443 == 0) m.c410 = Constraint(expr=m.x162**2 - m.x444 == 0) m.c411 = Constraint(expr= m.x42 - m.x444 == 0) m.c412 = Constraint(expr=m.x162**3 - m.x445 == 0) m.c413 = Constraint(expr= m.x337 - m.x445 == 0) m.c414 = Constraint(expr=m.x164**2 - m.x446 == 0) m.c415 = Constraint(expr= m.x46 - m.x446 == 0) m.c416 = Constraint(expr=m.x164**3 - m.x447 == 0) m.c417 = Constraint(expr= m.x342 - m.x447 == 0) m.c418 = Constraint(expr=m.x166**2 - m.x448 == 0) m.c419 = Constraint(expr= m.x48 - m.x448 == 0) m.c420 = Constraint(expr=m.x166**3 - m.x449 == 0) m.c421 = Constraint(expr= m.x344 - m.x449 == 0) m.c422 = Constraint(expr=m.x168**2 - m.x450 == 0) m.c423 = Constraint(expr= m.x52 - m.x450 == 0) m.c424 = Constraint(expr=m.x168**3 - m.x451 == 0) m.c425 = Constraint(expr= m.x352 - m.x451 == 0) m.c426 = Constraint(expr=m.x170**2 - m.x452 == 0) m.c427 = Constraint(expr= m.x56 - m.x452 == 0) m.c428 = Constraint(expr=m.x170**3 - m.x453 == 0) m.c429 = Constraint(expr= m.x357 - m.x453 == 0) m.c430 = Constraint(expr=m.x172**2 - m.x454 == 0) m.c431 = Constraint(expr= m.x58 - m.x454 == 0) m.c432 = Constraint(expr=m.x172**3 - m.x455 == 0) m.c433 = Constraint(expr= m.x362 - m.x455 == 0) m.c434 = Constraint(expr=m.x174**2 - m.x456 == 0) m.c435 = Constraint(expr= m.x64 - m.x456 == 0) m.c436 = Constraint(expr=m.x174**3 - m.x457 == 0) m.c437 = Constraint(expr= m.x367 - m.x457 == 0) m.c438 = Constraint(expr=m.x176**2 - m.x458 == 0) m.c439 = Constraint(expr= m.x67 - m.x458 == 0) m.c440 = Constraint(expr=m.x176**3 - m.x459 == 0) m.c441 = Constraint(expr= m.x372 - m.x459 == 0) m.c442 = Constraint(expr=m.x178**2 - m.x460 == 0) m.c443 = Constraint(expr= m.x69 - m.x460 == 0) m.c444 = Constraint(expr=m.x178**3 - m.x461 == 0) m.c445 = Constraint(expr= m.x377 - m.x461 == 0) m.c446 = Constraint(expr=m.x180**2 - m.x462 == 0) m.c447 = Constraint(expr= m.x73 - m.x462 == 0) m.c448 = Constraint(expr=m.x180**3 - m.x463 == 0) m.c449 = Constraint(expr= m.x382 - m.x463 == 0) m.c450 = Constraint(expr=m.x182**2 - m.x464 == 0) m.c451 = Constraint(expr= m.x76 - m.x464 == 0) m.c452 = Constraint(expr=m.x182**3 - m.x465 == 0) m.c453 = Constraint(expr= m.x387 - m.x465 == 0) m.c454 = Constraint(expr=m.x184**2 - m.x466 == 0) m.c455 = Constraint(expr= m.x81 - m.x466 == 0) m.c456 = Constraint(expr=m.x184**3 - m.x467 == 0) m.c457 = Constraint(expr= m.x392 - m.x467 == 0) m.c458 = Constraint(expr=m.x186**2 - m.x468 == 0) m.c459 = Constraint(expr= m.x85 - m.x468 == 0) m.c460 = Constraint(expr=m.x186**3 - m.x469 == 0) m.c461 = Constraint(expr= m.x397 - m.x469 == 0) m.c462 = Constraint(expr=m.x188**2 - m.x470 == 0) m.c463 = Constraint(expr= m.x87 - m.x470 == 0) m.c464 = Constraint(expr=m.x188**3 - m.x471 == 0) m.c465 = Constraint(expr= m.x402 - m.x471 == 0) m.c466 = Constraint(expr=m.x190**2 - m.x472 == 0) m.c467 = Constraint(expr= m.x90 - m.x472 == 0) m.c468 = Constraint(expr=m.x190**3 - m.x473 == 0) m.c469 = Constraint(expr= m.x407 - m.x473 == 0) m.c470 = Constraint(expr=m.x192**2 - m.x474 == 0) m.c471 = Constraint(expr= m.x94 - m.x474 == 0) m.c472 = Constraint(expr=m.x192**3 - m.x475 == 0) m.c473 = Constraint(expr= m.x412 - m.x475 == 0) m.c474 = Constraint(expr=m.x140*m.x263 - m.x145 == 0) m.c475 = Constraint(expr=m.x263*m.x422 - m.x281 == 0) m.c476 = Constraint(expr=m.x146*m.x263 - m.x169 == 0) m.c477 = Constraint(expr=m.x263*m.x428 - m.x296 == 0) m.c478 = Constraint(expr=m.x152*m.x263 - m.x185 == 0) m.c479 = Constraint(expr=m.x263*m.x434 - m.x311 == 0) m.c480 = Constraint(expr=m.x263**2 - m.x476 == 0) m.c481 = Constraint(expr= m.x147 - m.x476 == 0) m.c482 = Constraint(expr=m.x140*m.x476 - m.x280 == 0) m.c483 = Constraint(expr=m.x146*m.x476 - m.x295 == 0) m.c484 = Constraint(expr=m.x152*m.x476 - m.x310 == 0) m.c485 = Constraint(expr=m.x263**3 - m.x477 == 0) m.c486 = Constraint(expr=m.b2*m.x477 - m.x279 == 0) m.c487 = Constraint(expr=m.b5*m.x477 - m.x294 == 0) m.c488 = Constraint(expr=m.b8*m.x477 - m.x309 == 0) m.c489 = Constraint(expr=m.x142*m.x264 - m.x155 == 0) m.c490 = Constraint(expr=m.x264*m.x424 - m.x286 == 0) m.c491 = Constraint(expr=m.x148*m.x264 - m.x173 == 0) m.c492 = Constraint(expr=m.x264*m.x430 - m.x301 == 0) m.c493 = Constraint(expr=m.x154*m.x264 - m.x193 == 0) m.c494 = Constraint(expr=m.x264*m.x436 - m.x316 == 0) m.c495 = Constraint(expr=m.x264**2 - m.x478 == 0) m.c496 = Constraint(expr= m.x153 - m.x478 == 0) m.c497 = Constraint(expr=m.x142*m.x478 - m.x285 == 0) m.c498 = Constraint(expr=m.x148*m.x478 - m.x300 == 0) m.c499 = Constraint(expr=m.x154*m.x478 - m.x315 == 0) m.c500 = Constraint(expr=m.x264**3 - m.x479 == 0) m.c501 = Constraint(expr=m.b3*m.x479 - m.x284 == 0) m.c502 = Constraint(expr=m.b6*m.x479 - m.x299 == 0) m.c503 = Constraint(expr=m.b9*m.x479 - m.x314 == 0) m.c504 = Constraint(expr=m.x144*m.x265 - m.x163 == 0) m.c505 = Constraint(expr=m.x265*m.x426 - m.x291 == 0) m.c506 = Constraint(expr=m.x150*m.x265 - m.x179 == 0) m.c507 = Constraint(expr=m.x265*m.x432 - m.x306 == 0) m.c508 = Constraint(expr=m.x156*m.x265 - m.x31 == 0) m.c509 = Constraint(expr=m.x265*m.x438 - m.x321 == 0) m.c510 = Constraint(expr=m.x265**2 - m.x480 == 0) m.c511 = Constraint(expr= m.x161 - m.x480 == 0) m.c512 = Constraint(expr=m.x144*m.x480 - m.x290 == 0) m.c513 = Constraint(expr=m.x150*m.x480 - m.x305 == 0) m.c514 = Constraint(expr=m.x156*m.x480 - m.x320 == 0) m.c515 = Constraint(expr=m.x265**3 - m.x481 == 0) m.c516 = Constraint(expr=m.b4*m.x481 - m.x289 == 0) m.c517 = Constraint(expr=m.b7*m.x481 - m.x304 == 0) m.c518 = Constraint(expr=m.b10*m.x481 - m.x319 == 0) m.c519 = Constraint(expr=m.x158*m.x266 - m.x34 == 0) m.c520 = Constraint(expr=m.x266*m.x440 - m.x326 == 0) m.c521 = Constraint(expr=m.x164*m.x266 - m.x45 == 0) m.c522 = Constraint(expr=m.x266*m.x446 - m.x341 == 0) m.c523 = Constraint(expr=m.x266**2 - m.x482 == 0) m.c524 = Constraint(expr= m.x33 - m.x482 == 0) m.c525 = Constraint(expr=m.x158*m.x482 - m.x325 == 0) m.c526 = Constraint(expr=m.x164*m.x482 - m.x340 == 0) m.c527 = Constraint(expr=m.x266**3 - m.x483 == 0) m.c528 = Constraint(expr=m.b11*m.x483 - m.x324 == 0) m.c529 = Constraint(expr=m.b14*m.x483 - m.x339 == 0) m.c530 = Constraint(expr=m.x160*m.x267 - m.x39 == 0) m.c531 = Constraint(expr=m.x267*m.x442 - m.x331 == 0) m.c532 = Constraint(expr=m.x166*m.x267 - m.x49 == 0) m.c533 = Constraint(expr=m.x267*m.x448 - m.x343 == 0) m.c534 = Constraint(expr=m.x267**2 - m.x484 == 0) m.c535 = Constraint(expr= m.x38 - m.x484 == 0) m.c536 = Constraint(expr=m.x160*m.x484 - m.x330 == 0) m.c537 = Constraint(expr=m.x166*m.x484 - m.x347 == 0) m.c538 = Constraint(expr=m.x267**3 - m.x485 == 0) m.c539 = Constraint(expr=m.b12*m.x485 - m.x329 == 0) m.c540 = Constraint(expr=m.b15*m.x485 - m.x346 == 0) m.c541 = Constraint(expr=m.x162*m.x268 - m.x41 == 0) m.c542 = Constraint(expr=m.x268*m.x444 - m.x336 == 0) m.c543 = Constraint(expr=m.x168*m.x268 - m.x51 == 0) m.c544 = Constraint(expr=m.x268*m.x450 - m.x351 == 0) m.c545 = Constraint(expr=m.x268**2 - m.x486 == 0) m.c546 = Constraint(expr= m.x43 - m.x486 == 0) m.c547 = Constraint(expr=m.x162*m.x486 - m.x335 == 0) m.c548 = Constraint(expr=m.x168*m.x486 - m.x350 == 0) m.c549 = Constraint(expr=m.x268**3 - m.x487 == 0) m.c550 = Constraint(expr=m.b13*m.x487 - m.x334 == 0) m.c551 = Constraint(expr=m.b16*m.x487 - m.x349 == 0) m.c552 = Constraint(expr=m.x170*m.x269 - m.x54 == 0) m.c553 = Constraint(expr=m.x269*m.x452 - m.x356 == 0) m.c554 = Constraint(expr=m.x176*m.x269 - m.x66 == 0) m.c555 = Constraint(expr=m.x269*m.x458 - m.x371 == 0) m.c556 = Constraint(expr=m.x269**2 - m.x488 == 0) m.c557 = Constraint(expr= m.x55 - m.x488 == 0) m.c558 = Constraint(expr=m.x170*m.x488 - m.x355 == 0) m.c559 = Constraint(expr=m.x176*m.x488 - m.x370 == 0) m.c560 = Constraint(expr=m.x269**3 - m.x489 == 0) m.c561 = Constraint(expr=m.b17*m.x489 - m.x354 == 0) m.c562 = Constraint(expr=m.b20*m.x489 - m.x369 == 0) m.c563 = Constraint(expr=m.x172*m.x270 - m.x59 == 0) m.c564 = Constraint(expr=m.x270*m.x454 - m.x361 == 0) m.c565 = Constraint(expr=m.x178*m.x270 - m.x70 == 0) m.c566 = Constraint(expr=m.x270*m.x460 - m.x376 == 0) m.c567 = Constraint(expr=m.x270**2 - m.x490 == 0) m.c568 = Constraint(expr= m.x60 - m.x490 == 0) m.c569 = Constraint(expr=m.x172*m.x490 - m.x360 == 0) m.c570 = Constraint(expr=m.x178*m.x490 - m.x375 == 0) m.c571 = Constraint(expr=m.x270**3 - m.x491 == 0) m.c572 = Constraint(expr=m.b18*m.x491 - m.x359 == 0) m.c573 = Constraint(expr=m.b21*m.x491 - m.x374 == 0) m.c574 = Constraint(expr=m.x174*m.x271 - m.x63 == 0) m.c575 = Constraint(expr=m.x271*m.x456 - m.x366 == 0) m.c576 = Constraint(expr=m.x180*m.x271 - m.x72 == 0) m.c577 = Constraint(expr=m.x271*m.x462 - m.x381 == 0) m.c578 = Constraint(expr=m.x271**2 - m.x492 == 0) m.c579 = Constraint(expr= m.x62 - m.x492 == 0) m.c580 = Constraint(expr=m.x174*m.x492 - m.x365 == 0) m.c581 = Constraint(expr=m.x180*m.x492 - m.x380 == 0) m.c582 = Constraint(expr=m.x271**3 - m.x493 == 0) m.c583 = Constraint(expr=m.b19*m.x493 - m.x364 == 0) m.c584 = Constraint(expr=m.b22*m.x493 - m.x379 == 0) m.c585 = Constraint(expr=m.x182*m.x272 - m.x75 == 0) m.c586 = Constraint(expr=m.x272*m.x464 - m.x386 == 0) m.c587 = Constraint(expr=m.x188*m.x272 - m.x88 == 0) m.c588 = Constraint(expr=m.x272*m.x470 - m.x401 == 0) m.c589 = Constraint(expr=m.x272**2 - m.x494 == 0) m.c590 = Constraint(expr= m.x77 - m.x494 == 0) m.c591 = Constraint(expr=m.x182*m.x494 - m.x385 == 0) m.c592 = Constraint(expr=m.x188*m.x494 - m.x400 == 0) m.c593 = Constraint(expr=m.x272**3 - m.x495 == 0) m.c594 = Constraint(expr=m.b23*m.x495 - m.x384 == 0) m.c595 = Constraint(expr=m.b26*m.x495 - m.x399 == 0) m.c596 = Constraint(expr=m.x184*m.x273 - m.x79 == 0) m.c597 = Constraint(expr=m.x273*m.x466 - m.x391 == 0) m.c598 = Constraint(expr=m.x190*m.x273 - m.x91 == 0) m.c599 = Constraint(expr=m.x273*m.x472 - m.x406 == 0) m.c600 = Constraint(expr=m.x273**2 - m.x496 == 0) m.c601 = Constraint(expr= m.x80 - m.x496 == 0) m.c602 = Constraint(expr=m.x184*m.x496 - m.x390 == 0) m.c603 = Constraint(expr=m.x190*m.x496 - m.x405 == 0) m.c604 = Constraint(expr=m.x273**3 - m.x497 == 0) m.c605 = Constraint(expr=m.b24*m.x497 - m.x389 == 0) m.c606 = Constraint(expr=m.b27*m.x497 - m.x404 == 0) m.c607 = Constraint(expr=m.x186*m.x274 - m.x83 == 0) m.c608 = Constraint(expr=m.x274*m.x468 - m.x396 == 0) m.c609 = Constraint(expr=m.x192*m.x274 - m.x93 == 0) m.c610 = Constraint(expr=m.x274*m.x474 - m.x411 == 0) m.c611 = Constraint(expr=m.x274**2 - m.x498 == 0) m.c612 = Constraint(expr= m.x84 - m.x498 == 0) m.c613 = Constraint(expr=m.x186*m.x498 - m.x395 == 0) m.c614 = Constraint(expr=m.x192*m.x498 - m.x410 == 0) m.c615 = Constraint(expr=m.x274**3 - m.x499 == 0) m.c616 = Constraint(expr=m.b25*m.x499 - m.x394 == 0) m.c617 = Constraint(expr=m.b28*m.x499 - m.x409 == 0)
StarcoderdataPython
3235981
<reponame>Kolawole39/masonite-guides-tutorial from .Handler import Handler, StackLine from .StackOverflowIntegration import StackOverflowIntegration from .SolutionsIntegration import SolutionsIntegration
StarcoderdataPython
1606250
<reponame>Cal-CS-61A-Staff/templar """Base exception for Templar.""" class TemplarError(Exception): """Top-level exception for Templar.""" pass
StarcoderdataPython
4823643
""" Cisco_IOS_XR_ipv4_vrrp_cfg This module contains a collection of YANG definitions for Cisco IOS\-XR ipv4\-vrrp package configuration. This module contains definitions for the following management objects\: vrrp\: VRRP configuration This YANG module augments the Cisco\-IOS\-XR\-snmp\-agent\-cfg module with configuration data. Copyright (c) 2013\-2016 by Cisco Systems, Inc. All rights reserved. """ import re import collections from enum import Enum from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict from ydk.errors import YPYError, YPYModelError class Vrrp(object): """ VRRP configuration .. attribute:: interfaces Interface configuration table **type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces>` .. attribute:: logging VRRP logging options **type**\: :py:class:`Logging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Logging>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.interfaces = Vrrp.Interfaces() self.interfaces.parent = self self.logging = Vrrp.Logging() self.logging.parent = self class Logging(object): """ VRRP logging options .. attribute:: state_change_disable VRRP state change IOS messages disable **type**\: :py:class:`Empty<ydk.types.Empty>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.state_change_disable = None @property def _common_path(self): return '/Cisco-IOS-XR-ipv4-vrrp-cfg:vrrp/Cisco-IOS-XR-ipv4-vrrp-cfg:logging' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.state_change_disable is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Logging']['meta_info'] class Interfaces(object): """ Interface configuration table .. attribute:: interface The interface being configured **type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.interface = YList() self.interface.parent = self self.interface.name = 'interface' class Interface(object): """ The interface being configured .. attribute:: interface_name <key> Interface name to configure **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: bfd BFD configuration **type**\: :py:class:`Bfd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Bfd>` .. attribute:: delay Minimum and Reload Delay **type**\: :py:class:`Delay <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Delay>` .. attribute:: ipv4 IPv4 VRRP configuration **type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4>` .. attribute:: ipv6 IPv6 VRRP configuration **type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6>` .. attribute:: mac_refresh VRRP Slave MAC\-refresh rate in seconds **type**\: int **range:** 0..10000 **units**\: second **default value**\: 60 """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.interface_name = None self.bfd = Vrrp.Interfaces.Interface.Bfd() self.bfd.parent = self self.delay = Vrrp.Interfaces.Interface.Delay() self.delay.parent = self self.ipv4 = Vrrp.Interfaces.Interface.Ipv4() self.ipv4.parent = self self.ipv6 = Vrrp.Interfaces.Interface.Ipv6() self.ipv6.parent = self self.mac_refresh = None class Ipv6(object): """ IPv6 VRRP configuration .. attribute:: slave_virtual_routers The VRRP slave group configuration table **type**\: :py:class:`SlaveVirtualRouters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters>` .. attribute:: version3 Version 3 VRRP configuration **type**\: :py:class:`Version3 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.slave_virtual_routers = Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters() self.slave_virtual_routers.parent = self self.version3 = Vrrp.Interfaces.Interface.Ipv6.Version3() self.version3.parent = self class Version3(object): """ Version 3 VRRP configuration .. attribute:: virtual_routers The VRRP virtual router configuration table **type**\: :py:class:`VirtualRouters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.virtual_routers = Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters() self.virtual_routers.parent = self class VirtualRouters(object): """ The VRRP virtual router configuration table .. attribute:: virtual_router The VRRP virtual router being configured **type**\: list of :py:class:`VirtualRouter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.virtual_router = YList() self.virtual_router.parent = self self.virtual_router.name = 'virtual_router' class VirtualRouter(object): """ The VRRP virtual router being configured .. attribute:: vr_id <key> VRID Virtual Router Identifier **type**\: int **range:** 1..255 .. attribute:: accept_mode_disable Disable Accept Mode for this virtual IPAddress **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: bfd Enable use of Bidirectional Forwarding Detection for this IP **type**\: one of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? ---- **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? ---- .. attribute:: global_ipv6_addresses The table of VRRP virtual global IPv6 addresses **type**\: :py:class:`GlobalIpv6Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.GlobalIpv6Addresses>` .. attribute:: link_local_ipv6_address The VRRP IPv6 virtual linklocal address **type**\: :py:class:`LinkLocalIpv6Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.LinkLocalIpv6Address>` .. attribute:: preempt Preempt Master router if higher priority **type**\: int **range:** 0..3600 **default value**\: 0 .. attribute:: priority Priority value **type**\: int **range:** 1..254 **default value**\: 100 .. attribute:: session_name VRRP Session Name **type**\: str **length:** 0..16 .. attribute:: timer Set advertisement timer **type**\: :py:class:`Timer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Timer>` .. attribute:: tracked_objects Track an object, reducing priority if it goes down **type**\: :py:class:`TrackedObjects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.TrackedObjects>` .. attribute:: tracks Track an item, reducing priority if it goes down **type**\: :py:class:`Tracks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Tracks>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.vr_id = None self.accept_mode_disable = None self.bfd = None self.global_ipv6_addresses = Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.GlobalIpv6Addresses() self.global_ipv6_addresses.parent = self self.link_local_ipv6_address = Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.LinkLocalIpv6Address() self.link_local_ipv6_address.parent = self self.preempt = None self.priority = None self.session_name = None self.timer = Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Timer() self.timer.parent = self self.tracked_objects = Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.TrackedObjects() self.tracked_objects.parent = self self.tracks = Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Tracks() self.tracks.parent = self class GlobalIpv6Addresses(object): """ The table of VRRP virtual global IPv6 addresses .. attribute:: global_ipv6_address A VRRP virtual global IPv6 IP address **type**\: list of :py:class:`GlobalIpv6Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.GlobalIpv6Addresses.GlobalIpv6Address>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.global_ipv6_address = YList() self.global_ipv6_address.parent = self self.global_ipv6_address.name = 'global_ipv6_address' class GlobalIpv6Address(object): """ A VRRP virtual global IPv6 IP address .. attribute:: ip_address <key> VRRP virtual global IPv6 address **type**\: one of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? ---- **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? ---- """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.ip_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.ip_address is None: raise YPYModelError('Key property ip_address is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:global-ipv6-address[Cisco-IOS-XR-ipv4-vrrp-cfg:ip-address = ' + str(self.ip_address) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.ip_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.GlobalIpv6Addresses.GlobalIpv6Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:global-ipv6-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.global_ipv6_address is not None: for child_ref in self.global_ipv6_address: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.GlobalIpv6Addresses']['meta_info'] class Tracks(object): """ Track an item, reducing priority if it goes down .. attribute:: track Object to be tracked **type**\: list of :py:class:`Track <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Tracks.Track>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.track = YList() self.track.parent = self self.track.name = 'track' class Track(object): """ Object to be tracked .. attribute:: interface_name <key> Object to be tracked, interface name for interfaces **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: priority Priority decrement **type**\: int **range:** 1..254 **mandatory**\: True """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.interface_name = None self.priority = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.interface_name is None: raise YPYModelError('Key property interface_name is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:track[Cisco-IOS-XR-ipv4-vrrp-cfg:interface-name = ' + str(self.interface_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.interface_name is not None: return True if self.priority is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Tracks.Track']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracks' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.track is not None: for child_ref in self.track: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Tracks']['meta_info'] class Timer(object): """ Set advertisement timer .. attribute:: advertisement_time_in_msec Advertisement time in milliseconds **type**\: int **range:** 100..3000 **units**\: millisecond .. attribute:: advertisement_time_in_sec Advertisement time in seconds **type**\: int **range:** 1..40 **units**\: second .. attribute:: forced TRUE \- Force configured timer values to be used, required when configured in milliseconds **type**\: bool **default value**\: false .. attribute:: in_msec TRUE \- Advertise time configured in milliseconds, FALSE \- Advertise time configured in seconds **type**\: bool **default value**\: false """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.advertisement_time_in_msec = None self.advertisement_time_in_sec = None self.forced = None self.in_msec = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:timer' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.advertisement_time_in_msec is not None: return True if self.advertisement_time_in_sec is not None: return True if self.forced is not None: return True if self.in_msec is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.Timer']['meta_info'] class TrackedObjects(object): """ Track an object, reducing priority if it goes down .. attribute:: tracked_object Object to be tracked **type**\: list of :py:class:`TrackedObject <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.TrackedObjects.TrackedObject>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.tracked_object = YList() self.tracked_object.parent = self self.tracked_object.name = 'tracked_object' class TrackedObject(object): """ Object to be tracked .. attribute:: object_name <key> Object to be tracked, interface name for interfaces **type**\: str **pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+ .. attribute:: priority_decrement Priority decrement **type**\: int **range:** 1..254 **mandatory**\: True """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.object_name = None self.priority_decrement = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.object_name is None: raise YPYModelError('Key property object_name is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracked-object[Cisco-IOS-XR-ipv4-vrrp-cfg:object-name = ' + str(self.object_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.object_name is not None: return True if self.priority_decrement is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.TrackedObjects.TrackedObject']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracked-objects' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.tracked_object is not None: for child_ref in self.tracked_object: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.TrackedObjects']['meta_info'] class LinkLocalIpv6Address(object): """ The VRRP IPv6 virtual linklocal address .. attribute:: auto_configure TRUE if the virtual linklocal address is to be autoconfigured FALSE if an IPv6 virtual linklocal address is configured **type**\: bool **default value**\: false .. attribute:: ip_address VRRP IPv6 virtual linklocal address **type**\: one of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? ---- **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? ---- """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.auto_configure = None self.ip_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:link-local-ipv6-address' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.auto_configure is not None: return True if self.ip_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter.LinkLocalIpv6Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.vr_id is None: raise YPYModelError('Key property vr_id is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:virtual-router[Cisco-IOS-XR-ipv4-vrrp-cfg:vr-id = ' + str(self.vr_id) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.vr_id is not None: return True if self.accept_mode_disable is not None: return True if self.bfd is not None: return True if self.global_ipv6_addresses is not None and self.global_ipv6_addresses._has_data(): return True if self.link_local_ipv6_address is not None and self.link_local_ipv6_address._has_data(): return True if self.preempt is not None: return True if self.priority is not None: return True if self.session_name is not None: return True if self.timer is not None and self.timer._has_data(): return True if self.tracked_objects is not None and self.tracked_objects._has_data(): return True if self.tracks is not None and self.tracks._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters.VirtualRouter']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:virtual-routers' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.virtual_router is not None: for child_ref in self.virtual_router: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3.VirtualRouters']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:version3' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.virtual_routers is not None and self.virtual_routers._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.Version3']['meta_info'] class SlaveVirtualRouters(object): """ The VRRP slave group configuration table .. attribute:: slave_virtual_router The VRRP slave being configured **type**\: list of :py:class:`SlaveVirtualRouter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.slave_virtual_router = YList() self.slave_virtual_router.parent = self self.slave_virtual_router.name = 'slave_virtual_router' class SlaveVirtualRouter(object): """ The VRRP slave being configured .. attribute:: slave_virtual_router_id <key> Virtual Router ID **type**\: int **range:** 1..255 .. attribute:: accept_mode_disable Disable Accept Mode for this virtual IPAddress **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: follow VRRP Session name for this slave to follow **type**\: str .. attribute:: global_ipv6_addresses The table of VRRP virtual global IPv6 addresses **type**\: :py:class:`GlobalIpv6Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.GlobalIpv6Addresses>` .. attribute:: link_local_ipv6_address The VRRP IPv6 virtual linklocal address **type**\: :py:class:`LinkLocalIpv6Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.LinkLocalIpv6Address>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.slave_virtual_router_id = None self.accept_mode_disable = None self.follow = None self.global_ipv6_addresses = Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.GlobalIpv6Addresses() self.global_ipv6_addresses.parent = self self.link_local_ipv6_address = Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.LinkLocalIpv6Address() self.link_local_ipv6_address.parent = self class LinkLocalIpv6Address(object): """ The VRRP IPv6 virtual linklocal address .. attribute:: auto_configure TRUE if the virtual linklocal address is to be autoconfigured FALSE if an IPv6 virtual linklocal address is configured **type**\: bool **default value**\: false .. attribute:: ip_address VRRP IPv6 virtual linklocal address **type**\: one of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? ---- **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? ---- """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.auto_configure = None self.ip_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:link-local-ipv6-address' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.auto_configure is not None: return True if self.ip_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.LinkLocalIpv6Address']['meta_info'] class GlobalIpv6Addresses(object): """ The table of VRRP virtual global IPv6 addresses .. attribute:: global_ipv6_address A VRRP virtual global IPv6 IP address **type**\: list of :py:class:`GlobalIpv6Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.GlobalIpv6Addresses.GlobalIpv6Address>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.global_ipv6_address = YList() self.global_ipv6_address.parent = self self.global_ipv6_address.name = 'global_ipv6_address' class GlobalIpv6Address(object): """ A VRRP virtual global IPv6 IP address .. attribute:: ip_address <key> VRRP virtual global IPv6 address **type**\: one of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? ---- **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? ---- """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.ip_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.ip_address is None: raise YPYModelError('Key property ip_address is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:global-ipv6-address[Cisco-IOS-XR-ipv4-vrrp-cfg:ip-address = ' + str(self.ip_address) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.ip_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.GlobalIpv6Addresses.GlobalIpv6Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:global-ipv6-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.global_ipv6_address is not None: for child_ref in self.global_ipv6_address: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter.GlobalIpv6Addresses']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.slave_virtual_router_id is None: raise YPYModelError('Key property slave_virtual_router_id is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:slave-virtual-router[Cisco-IOS-XR-ipv4-vrrp-cfg:slave-virtual-router-id = ' + str(self.slave_virtual_router_id) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.slave_virtual_router_id is not None: return True if self.accept_mode_disable is not None: return True if self.follow is not None: return True if self.global_ipv6_addresses is not None and self.global_ipv6_addresses._has_data(): return True if self.link_local_ipv6_address is not None and self.link_local_ipv6_address._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters.SlaveVirtualRouter']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:slave-virtual-routers' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.slave_virtual_router is not None: for child_ref in self.slave_virtual_router: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6.SlaveVirtualRouters']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:ipv6' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.slave_virtual_routers is not None and self.slave_virtual_routers._has_data(): return True if self.version3 is not None and self.version3._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv6']['meta_info'] class Delay(object): """ Minimum and Reload Delay .. attribute:: min_delay Minimum delay in seconds **type**\: int **range:** 0..10000 **units**\: second .. attribute:: reload_delay Reload delay in seconds **type**\: int **range:** 0..10000 **units**\: second """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.min_delay = None self.reload_delay = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:delay' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.min_delay is not None: return True if self.reload_delay is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Delay']['meta_info'] class Ipv4(object): """ IPv4 VRRP configuration .. attribute:: slave_virtual_routers The VRRP slave group configuration table **type**\: :py:class:`SlaveVirtualRouters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters>` .. attribute:: version2 Version 2 VRRP configuration **type**\: :py:class:`Version2 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2>` .. attribute:: version3 Version 3 VRRP configuration **type**\: :py:class:`Version3 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.slave_virtual_routers = Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters() self.slave_virtual_routers.parent = self self.version2 = Vrrp.Interfaces.Interface.Ipv4.Version2() self.version2.parent = self self.version3 = Vrrp.Interfaces.Interface.Ipv4.Version3() self.version3.parent = self class Version3(object): """ Version 3 VRRP configuration .. attribute:: virtual_routers The VRRP virtual router configuration table **type**\: :py:class:`VirtualRouters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.virtual_routers = Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters() self.virtual_routers.parent = self class VirtualRouters(object): """ The VRRP virtual router configuration table .. attribute:: virtual_router The VRRP virtual router being configured **type**\: list of :py:class:`VirtualRouter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.virtual_router = YList() self.virtual_router.parent = self self.virtual_router.name = 'virtual_router' class VirtualRouter(object): """ The VRRP virtual router being configured .. attribute:: vr_id <key> VRID Virtual Router Identifier **type**\: int **range:** 1..255 .. attribute:: accept_mode_disable Disable Accept Mode for this virtual IPAddress **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: bfd Enable use of Bidirectional Forwarding Detection for this IP **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: preempt Preempt Master router if higher priority **type**\: int **range:** 0..3600 **default value**\: 0 .. attribute:: primary_ipv4_address The Primary VRRP IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: priority Priority value **type**\: int **range:** 1..254 **default value**\: 100 .. attribute:: secondary_ipv4_addresses The table of VRRP secondary IPv4 addresses **type**\: :py:class:`SecondaryIpv4Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses>` .. attribute:: session_name VRRP Session Name **type**\: str **length:** 0..16 .. attribute:: timer Set advertisement timer **type**\: :py:class:`Timer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Timer>` .. attribute:: tracked_objects Track an object, reducing priority if it goes down **type**\: :py:class:`TrackedObjects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.TrackedObjects>` .. attribute:: tracks Track an item, reducing priority if it goes down **type**\: :py:class:`Tracks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Tracks>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.vr_id = None self.accept_mode_disable = None self.bfd = None self.preempt = None self.primary_ipv4_address = None self.priority = None self.secondary_ipv4_addresses = Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses() self.secondary_ipv4_addresses.parent = self self.session_name = None self.timer = Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Timer() self.timer.parent = self self.tracked_objects = Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.TrackedObjects() self.tracked_objects.parent = self self.tracks = Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Tracks() self.tracks.parent = self class Timer(object): """ Set advertisement timer .. attribute:: advertisement_time_in_msec Advertisement time in milliseconds **type**\: int **range:** 100..3000 **units**\: millisecond .. attribute:: advertisement_time_in_sec Advertisement time in seconds **type**\: int **range:** 1..40 **units**\: second .. attribute:: forced TRUE \- Force configured timer values to be used, required when configured in milliseconds **type**\: bool **default value**\: false .. attribute:: in_msec TRUE \- Advertise time configured in milliseconds, FALSE \- Advertise time configured in seconds **type**\: bool **default value**\: false """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.advertisement_time_in_msec = None self.advertisement_time_in_sec = None self.forced = None self.in_msec = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:timer' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.advertisement_time_in_msec is not None: return True if self.advertisement_time_in_sec is not None: return True if self.forced is not None: return True if self.in_msec is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Timer']['meta_info'] class SecondaryIpv4Addresses(object): """ The table of VRRP secondary IPv4 addresses .. attribute:: secondary_ipv4_address A VRRP secondary IPv4 address **type**\: list of :py:class:`SecondaryIpv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses.SecondaryIpv4Address>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.secondary_ipv4_address = YList() self.secondary_ipv4_address.parent = self self.secondary_ipv4_address.name = 'secondary_ipv4_address' class SecondaryIpv4Address(object): """ A VRRP secondary IPv4 address .. attribute:: ip_address <key> VRRP Secondary IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.ip_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.ip_address is None: raise YPYModelError('Key property ip_address is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:secondary-ipv4-address[Cisco-IOS-XR-ipv4-vrrp-cfg:ip-address = ' + str(self.ip_address) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.ip_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses.SecondaryIpv4Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:secondary-ipv4-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.secondary_ipv4_address is not None: for child_ref in self.secondary_ipv4_address: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses']['meta_info'] class TrackedObjects(object): """ Track an object, reducing priority if it goes down .. attribute:: tracked_object Object to be tracked **type**\: list of :py:class:`TrackedObject <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.TrackedObjects.TrackedObject>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.tracked_object = YList() self.tracked_object.parent = self self.tracked_object.name = 'tracked_object' class TrackedObject(object): """ Object to be tracked .. attribute:: object_name <key> Object to be tracked, interface name for interfaces **type**\: str **pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+ .. attribute:: priority_decrement Priority decrement **type**\: int **range:** 1..254 **mandatory**\: True """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.object_name = None self.priority_decrement = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.object_name is None: raise YPYModelError('Key property object_name is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracked-object[Cisco-IOS-XR-ipv4-vrrp-cfg:object-name = ' + str(self.object_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.object_name is not None: return True if self.priority_decrement is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.TrackedObjects.TrackedObject']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracked-objects' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.tracked_object is not None: for child_ref in self.tracked_object: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.TrackedObjects']['meta_info'] class Tracks(object): """ Track an item, reducing priority if it goes down .. attribute:: track Object to be tracked **type**\: list of :py:class:`Track <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Tracks.Track>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.track = YList() self.track.parent = self self.track.name = 'track' class Track(object): """ Object to be tracked .. attribute:: interface_name <key> Object to be tracked, interface name for interfaces **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: priority Priority decrement **type**\: int **range:** 1..254 **mandatory**\: True """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.interface_name = None self.priority = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.interface_name is None: raise YPYModelError('Key property interface_name is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:track[Cisco-IOS-XR-ipv4-vrrp-cfg:interface-name = ' + str(self.interface_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.interface_name is not None: return True if self.priority is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Tracks.Track']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracks' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.track is not None: for child_ref in self.track: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter.Tracks']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.vr_id is None: raise YPYModelError('Key property vr_id is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:virtual-router[Cisco-IOS-XR-ipv4-vrrp-cfg:vr-id = ' + str(self.vr_id) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.vr_id is not None: return True if self.accept_mode_disable is not None: return True if self.bfd is not None: return True if self.preempt is not None: return True if self.primary_ipv4_address is not None: return True if self.priority is not None: return True if self.secondary_ipv4_addresses is not None and self.secondary_ipv4_addresses._has_data(): return True if self.session_name is not None: return True if self.timer is not None and self.timer._has_data(): return True if self.tracked_objects is not None and self.tracked_objects._has_data(): return True if self.tracks is not None and self.tracks._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters.VirtualRouter']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:virtual-routers' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.virtual_router is not None: for child_ref in self.virtual_router: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3.VirtualRouters']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:version3' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.virtual_routers is not None and self.virtual_routers._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version3']['meta_info'] class SlaveVirtualRouters(object): """ The VRRP slave group configuration table .. attribute:: slave_virtual_router The VRRP slave being configured **type**\: list of :py:class:`SlaveVirtualRouter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters.SlaveVirtualRouter>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.slave_virtual_router = YList() self.slave_virtual_router.parent = self self.slave_virtual_router.name = 'slave_virtual_router' class SlaveVirtualRouter(object): """ The VRRP slave being configured .. attribute:: slave_virtual_router_id <key> Virtual Router ID **type**\: int **range:** 1..255 .. attribute:: accept_mode_disable Disable Accept Mode for this virtual IPAddress **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: follow VRRP Session name for this slave to follow **type**\: str .. attribute:: primary_ipv4_address The Primary VRRP IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: secondary_ipv4_addresses The table of VRRP secondary IPv4 addresses **type**\: :py:class:`SecondaryIpv4Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters.SlaveVirtualRouter.SecondaryIpv4Addresses>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.slave_virtual_router_id = None self.accept_mode_disable = None self.follow = None self.primary_ipv4_address = None self.secondary_ipv4_addresses = Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters.SlaveVirtualRouter.SecondaryIpv4Addresses() self.secondary_ipv4_addresses.parent = self class SecondaryIpv4Addresses(object): """ The table of VRRP secondary IPv4 addresses .. attribute:: secondary_ipv4_address A VRRP secondary IPv4 address **type**\: list of :py:class:`SecondaryIpv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters.SlaveVirtualRouter.SecondaryIpv4Addresses.SecondaryIpv4Address>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.secondary_ipv4_address = YList() self.secondary_ipv4_address.parent = self self.secondary_ipv4_address.name = 'secondary_ipv4_address' class SecondaryIpv4Address(object): """ A VRRP secondary IPv4 address .. attribute:: ip_address <key> VRRP Secondary IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.ip_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.ip_address is None: raise YPYModelError('Key property ip_address is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:secondary-ipv4-address[Cisco-IOS-XR-ipv4-vrrp-cfg:ip-address = ' + str(self.ip_address) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.ip_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters.SlaveVirtualRouter.SecondaryIpv4Addresses.SecondaryIpv4Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:secondary-ipv4-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.secondary_ipv4_address is not None: for child_ref in self.secondary_ipv4_address: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters.SlaveVirtualRouter.SecondaryIpv4Addresses']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.slave_virtual_router_id is None: raise YPYModelError('Key property slave_virtual_router_id is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:slave-virtual-router[Cisco-IOS-XR-ipv4-vrrp-cfg:slave-virtual-router-id = ' + str(self.slave_virtual_router_id) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.slave_virtual_router_id is not None: return True if self.accept_mode_disable is not None: return True if self.follow is not None: return True if self.primary_ipv4_address is not None: return True if self.secondary_ipv4_addresses is not None and self.secondary_ipv4_addresses._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters.SlaveVirtualRouter']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:slave-virtual-routers' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.slave_virtual_router is not None: for child_ref in self.slave_virtual_router: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.SlaveVirtualRouters']['meta_info'] class Version2(object): """ Version 2 VRRP configuration .. attribute:: virtual_routers The VRRP virtual router configuration table **type**\: :py:class:`VirtualRouters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.virtual_routers = Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters() self.virtual_routers.parent = self class VirtualRouters(object): """ The VRRP virtual router configuration table .. attribute:: virtual_router The VRRP virtual router being configured **type**\: list of :py:class:`VirtualRouter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.virtual_router = YList() self.virtual_router.parent = self self.virtual_router.name = 'virtual_router' class VirtualRouter(object): """ The VRRP virtual router being configured .. attribute:: vr_id <key> VRID Virtual Router Identifier **type**\: int **range:** 1..255 .. attribute:: accept_mode_disable Disable Accept Mode for this virtual IPAddress **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: bfd Enable use of Bidirectional Forwarding Detection for this IP **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: preempt Preempt Master router if higher priority **type**\: int **range:** 0..3600 **default value**\: 0 .. attribute:: primary_ipv4_address The Primary VRRP IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: priority Priority value **type**\: int **range:** 1..254 **default value**\: 100 .. attribute:: secondary_ipv4_addresses The table of VRRP secondary IPv4 addresses **type**\: :py:class:`SecondaryIpv4Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses>` .. attribute:: session_name VRRP Session Name **type**\: str **length:** 0..16 .. attribute:: text_password Authentication password, 8 chars max **type**\: str .. attribute:: timer Set advertisement timer **type**\: :py:class:`Timer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Timer>` .. attribute:: tracked_objects Track an object, reducing priority if it goes down **type**\: :py:class:`TrackedObjects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.TrackedObjects>` .. attribute:: tracks Track an item, reducing priority if it goes down **type**\: :py:class:`Tracks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Tracks>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.vr_id = None self.accept_mode_disable = None self.bfd = None self.preempt = None self.primary_ipv4_address = None self.priority = None self.secondary_ipv4_addresses = Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses() self.secondary_ipv4_addresses.parent = self self.session_name = None self.text_password = <PASSWORD> self.timer = Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Timer() self.timer.parent = self self.tracked_objects = Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.TrackedObjects() self.tracked_objects.parent = self self.tracks = Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Tracks() self.tracks.parent = self class Timer(object): """ Set advertisement timer .. attribute:: advertisement_time_in_msec Advertisement time in milliseconds **type**\: int **range:** 100..3000 **units**\: millisecond .. attribute:: advertisement_time_in_sec Advertisement time in seconds **type**\: int **range:** 1..255 **units**\: second .. attribute:: forced TRUE \- Force configured timer values to be used, required when configured in milliseconds **type**\: bool **default value**\: false .. attribute:: in_msec TRUE \- Advertise time configured in milliseconds, FALSE \- Advertise time configured in seconds **type**\: bool **default value**\: false """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.advertisement_time_in_msec = None self.advertisement_time_in_sec = None self.forced = None self.in_msec = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:timer' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.advertisement_time_in_msec is not None: return True if self.advertisement_time_in_sec is not None: return True if self.forced is not None: return True if self.in_msec is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Timer']['meta_info'] class SecondaryIpv4Addresses(object): """ The table of VRRP secondary IPv4 addresses .. attribute:: secondary_ipv4_address A VRRP secondary IPv4 address **type**\: list of :py:class:`SecondaryIpv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses.SecondaryIpv4Address>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.secondary_ipv4_address = YList() self.secondary_ipv4_address.parent = self self.secondary_ipv4_address.name = 'secondary_ipv4_address' class SecondaryIpv4Address(object): """ A VRRP secondary IPv4 address .. attribute:: ip_address <key> VRRP Secondary IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.ip_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.ip_address is None: raise YPYModelError('Key property ip_address is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:secondary-ipv4-address[Cisco-IOS-XR-ipv4-vrrp-cfg:ip-address = ' + str(self.ip_address) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.ip_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses.SecondaryIpv4Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:secondary-ipv4-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.secondary_ipv4_address is not None: for child_ref in self.secondary_ipv4_address: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.SecondaryIpv4Addresses']['meta_info'] class Tracks(object): """ Track an item, reducing priority if it goes down .. attribute:: track Object to be tracked **type**\: list of :py:class:`Track <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Tracks.Track>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.track = YList() self.track.parent = self self.track.name = 'track' class Track(object): """ Object to be tracked .. attribute:: interface_name <key> Object to be tracked, interface name for interfaces **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: priority Priority decrement **type**\: int **range:** 1..254 **mandatory**\: True """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.interface_name = None self.priority = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.interface_name is None: raise YPYModelError('Key property interface_name is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:track[Cisco-IOS-XR-ipv4-vrrp-cfg:interface-name = ' + str(self.interface_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.interface_name is not None: return True if self.priority is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Tracks.Track']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracks' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.track is not None: for child_ref in self.track: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.Tracks']['meta_info'] class TrackedObjects(object): """ Track an object, reducing priority if it goes down .. attribute:: tracked_object Object to be tracked **type**\: list of :py:class:`TrackedObject <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.TrackedObjects.TrackedObject>` """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.tracked_object = YList() self.tracked_object.parent = self self.tracked_object.name = 'tracked_object' class TrackedObject(object): """ Object to be tracked .. attribute:: object_name <key> Object to be tracked, interface name for interfaces **type**\: str **pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+ .. attribute:: priority_decrement Priority decrement **type**\: int **range:** 1..254 **mandatory**\: True """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.object_name = None self.priority_decrement = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.object_name is None: raise YPYModelError('Key property object_name is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracked-object[Cisco-IOS-XR-ipv4-vrrp-cfg:object-name = ' + str(self.object_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.object_name is not None: return True if self.priority_decrement is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.TrackedObjects.TrackedObject']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:tracked-objects' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.tracked_object is not None: for child_ref in self.tracked_object: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter.TrackedObjects']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.vr_id is None: raise YPYModelError('Key property vr_id is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:virtual-router[Cisco-IOS-XR-ipv4-vrrp-cfg:vr-id = ' + str(self.vr_id) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.vr_id is not None: return True if self.accept_mode_disable is not None: return True if self.bfd is not None: return True if self.preempt is not None: return True if self.primary_ipv4_address is not None: return True if self.priority is not None: return True if self.secondary_ipv4_addresses is not None and self.secondary_ipv4_addresses._has_data(): return True if self.session_name is not None: return True if self.text_password is not None: return True if self.timer is not None and self.timer._has_data(): return True if self.tracked_objects is not None and self.tracked_objects._has_data(): return True if self.tracks is not None and self.tracks._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters.VirtualRouter']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:virtual-routers' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.virtual_router is not None: for child_ref in self.virtual_router: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2.VirtualRouters']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:version2' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.virtual_routers is not None and self.virtual_routers._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4.Version2']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:ipv4' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.slave_virtual_routers is not None and self.slave_virtual_routers._has_data(): return True if self.version2 is not None and self.version2._has_data(): return True if self.version3 is not None and self.version3._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Ipv4']['meta_info'] class Bfd(object): """ BFD configuration .. attribute:: detection_multiplier Detection multiplier for BFD sessions created by vrrp **type**\: int **range:** 2..50 .. attribute:: interval Hello interval for BFD sessions created by vrrp **type**\: int **range:** 3..30000 **units**\: millisecond """ _prefix = 'ipv4-vrrp-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.detection_multiplier = None self.interval = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-vrrp-cfg:bfd' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.detection_multiplier is not None: return True if self.interval is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface.Bfd']['meta_info'] @property def _common_path(self): if self.interface_name is None: raise YPYModelError('Key property interface_name is None') return '/Cisco-IOS-XR-ipv4-vrrp-cfg:vrrp/Cisco-IOS-XR-ipv4-vrrp-cfg:interfaces/Cisco-IOS-XR-ipv4-vrrp-cfg:interface[Cisco-IOS-XR-ipv4-vrrp-cfg:interface-name = ' + str(self.interface_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.interface_name is not None: return True if self.bfd is not None and self.bfd._has_data(): return True if self.delay is not None and self.delay._has_data(): return True if self.ipv4 is not None and self.ipv4._has_data(): return True if self.ipv6 is not None and self.ipv6._has_data(): return True if self.mac_refresh is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces.Interface']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ipv4-vrrp-cfg:vrrp/Cisco-IOS-XR-ipv4-vrrp-cfg:interfaces' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.interface is not None: for child_ref in self.interface: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp.Interfaces']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ipv4-vrrp-cfg:vrrp' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.interfaces is not None and self.interfaces._has_data(): return True if self.logging is not None and self.logging._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_vrrp_cfg as meta return meta._meta_table['Vrrp']['meta_info']
StarcoderdataPython
118721
#! /usr/bin/env python # -------------------------------------------------------------------- import re from epydoc import docstringparser as dsp CYTHON_SIGNATURE_RE = re.compile( # Class name (for builtin methods) r'^\s*((?P<class>\w+)\.)?' + # The function name r'(?P<func>\w+)' + # The parameters r'\(((?P<self>(?:self|cls|mcs)),?)?(?P<params>.*)\)' + # The return value (optional) r'(\s*(->)\s*(?P<return>\w+(?:\s*\w+)))?' + # The end marker r'\s*(?:\n|$)') parse_signature = dsp.parse_function_signature def parse_function_signature(func_doc, doc_source, docformat, parse_errors): PYTHON_SIGNATURE_RE = dsp._SIGNATURE_RE assert PYTHON_SIGNATURE_RE is not CYTHON_SIGNATURE_RE try: dsp._SIGNATURE_RE = CYTHON_SIGNATURE_RE found = parse_signature(func_doc, doc_source, docformat, parse_errors) dsp._SIGNATURE_RE = PYTHON_SIGNATURE_RE if not found: found = parse_signature(func_doc, doc_source, docformat, parse_errors) return found finally: dsp._SIGNATURE_RE = PYTHON_SIGNATURE_RE dsp.parse_function_signature = parse_function_signature # -------------------------------------------------------------------- from epydoc.cli import cli cli() # --------------------------------------------------------------------
StarcoderdataPython
4160
from torch.optim.lr_scheduler import StepLR, ReduceLROnPlateau, OneCycleLR def step_lr(optimizer, step_size, gamma=0.1, last_epoch=-1): """Create LR step scheduler. Args: optimizer (torch.optim): Model optimizer. step_size (int): Frequency for changing learning rate. gamma (float): Factor for changing learning rate. (default: 0.1) last_epoch (int): The index of last epoch. (default: -1) Returns: StepLR: Learning rate scheduler. """ return StepLR(optimizer, step_size=step_size, gamma=gamma, last_epoch=last_epoch) def reduce_lr_on_plateau(optimizer, factor=0.1, patience=10, verbose=False, min_lr=0): """Create LR plateau reduction scheduler. Args: optimizer (torch.optim): Model optimizer. factor (float, optional): Factor by which the learning rate will be reduced. (default: 0.1) patience (int, optional): Number of epoch with no improvement after which learning rate will be will be reduced. (default: 10) verbose (bool, optional): If True, prints a message to stdout for each update. (default: False) min_lr (float, optional): A scalar or a list of scalars. A lower bound on the learning rate of all param groups or each group respectively. (default: 0) Returns: ReduceLROnPlateau instance. """ return ReduceLROnPlateau( optimizer, factor=factor, patience=patience, verbose=verbose, min_lr=min_lr ) def one_cycle_lr( optimizer, max_lr, epochs, steps_per_epoch, pct_start=0.5, div_factor=10.0, final_div_factor=10000 ): """Create One Cycle Policy for Learning Rate. Args: optimizer (torch.optim): Model optimizer. max_lr (float): Upper learning rate boundary in the cycle. epochs (int): The number of epochs to train for. This is used along with steps_per_epoch in order to infer the total number of steps in the cycle. steps_per_epoch (int): The number of steps per epoch to train for. This is used along with epochs in order to infer the total number of steps in the cycle. pct_start (float, optional): The percentage of the cycle (in number of steps) spent increasing the learning rate. (default: 0.5) div_factor (float, optional): Determines the initial learning rate via initial_lr = max_lr / div_factor. (default: 10.0) final_div_factor (float, optional): Determines the minimum learning rate via min_lr = initial_lr / final_div_factor. (default: 1e4) Returns: OneCycleLR instance. """ return OneCycleLR( optimizer, max_lr, epochs=epochs, steps_per_epoch=steps_per_epoch, pct_start=pct_start, div_factor=div_factor, final_div_factor=final_div_factor )
StarcoderdataPython
165772
import cv2 import numpy as np def main(): #window_name="Cam feed" #cv2.namedWindow(window_name) cap=cv2.VideoCapture(0) #filename = 'F:\sample.avi' #codec=cv2.VideoWriter_fourcc('X','V','I','D') #framerate=30 #resolution = (500,500) # VideoFileOutput = cv2.VideoWriter(filename,codec,framerate,resolution) if cap.isOpened(): ret,frame = cap.read() else: ret =False ret,frame1 = cap.read() ret,frame2 = cap.read() while ret: ret,frame = cap.read() #VideoFileOutput.write(frame) d=cv2.absdiff(frame1,frame2) grey=cv2.cvtColor(d,cv2.COLOR_BGR2GRAY) blur =cv2.GaussianBlur(grey,(5,5),0) ret,th=cv2.threshold(blur,20,255,cv2.THRESH_BINARY) dilated=cv2.dilate(th,np.ones((3,3),np.uint8),iterations=3) img,c,h=cv2.findContours(dilated,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) cv2.drawContours(frame1,c,-1,(0,255,0),2) #cv2.imshow("win1",frame2) cv2.imshow("inter",frame1) if cv2.waitKey(40) == 27: break frame1 = frame2 ret,frame2= cap.read() cv2.destroyAllWindows() #VideoFileOutput.release() cap.release() main()
StarcoderdataPython
90094
<reponame>homata/geodjango-hands-on # -*- coding: utf-8 -*- import os from django.contrib.gis.utils import LayerMapping from world.models import Busstop # Modelとファイルのカラムのマッピング mapping = { 'p11_001' : 'P11_001' , 'p11_002' : 'P11_002', 'p11_003_1' : 'P11_003_1', 'p11_003_2' : 'P11_003_2', 'p11_003_3' : 'P11_003_3', 'p11_003_4' : 'P11_003_4', 'p11_003_5' : 'P11_003_5', 'p11_003_6' : 'P11_003_6', 'p11_003_7' : 'P11_003_7', 'p11_003_8' : 'P11_003_8', 'p11_003_9' : 'P11_003_9', 'p11_003_10' : 'P11_003_10', 'p11_003_11' : 'P11_003_11', 'p11_003_12' : 'P11_003_12', 'p11_003_13' : 'P11_003_13', 'p11_003_14' : 'P11_003_14', 'p11_003_15' : 'P11_003_15', 'p11_003_16' : 'P11_003_16', 'p11_003_17' : 'P11_003_17', 'p11_003_18' : 'P11_003_18', 'p11_003_19' : 'P11_003_19', 'p11_004_1' : 'P11_004_1', 'p11_004_2' : 'P11_004_2', 'p11_004_3' : 'P11_004_3', 'p11_004_4' : 'P11_004_4', 'p11_004_5' : 'P11_004_5', 'p11_004_6' : 'P11_004_6', 'p11_004_7' : 'P11_004_7', 'p11_004_8' : 'P11_004_8', 'p11_004_9' : 'P11_004_9', 'p11_004_10' : 'P11_004_10', 'p11_004_11' : 'P11_004_11', 'p11_004_12' : 'P11_004_12', 'p11_004_13' : 'P11_004_13', 'p11_004_14' : 'P11_004_14', 'p11_004_15' : 'P11_004_15', 'p11_004_16' : 'P11_004_16', 'p11_004_17' : 'P11_004_17', 'p11_004_18' : 'P11_004_18', 'p11_004_19' : 'P11_004_19', 'geom' : 'POINT', } # ファイルパス geojson_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data', 'busstop.geojson')) # 実行 def run(verbose=True): lm = LayerMapping(Busstop, geojson_file, mapping, transform=False, encoding='UTF-8') lm.save(strict=True, verbose=verbose)
StarcoderdataPython
181866
<reponame>cdev-framework/cdev-sdk<filename>src/core/default/commands/relationaldb/utils.py from typing import Tuple from core.constructs.workspace import Workspace from core.default.resources.simple.relational_db import simple_relational_db_model RUUID = "cdev::simple::relationaldb" def get_db_info_from_cdev_name( component_name: str, cdev_database_name: str ) -> Tuple[str, str, str]: """_summary_ Args: component_name (str): Name of the component that the resource is in cdev_database_name (str): Name of the resource Returns: Tuple[str,str,str]: cluster_arn, secret_arn, db_name """ try: ws = Workspace.instance() cluster_arn = ws.get_backend().get_cloud_output_value_by_name( ws.get_resource_state_uuid(), component_name, RUUID, cdev_database_name, "cluster_arn", ) secret_arn = ws.get_backend().get_cloud_output_value_by_name( ws.get_resource_state_uuid(), component_name, RUUID, cdev_database_name, "secret_arn", ) db_resource_model: simple_relational_db_model = ( ws.get_backend().get_resource_by_name( ws.get_resource_state_uuid(), component_name, RUUID, cdev_database_name, ) ) return (cluster_arn, secret_arn, db_resource_model.DatabaseName) except Exception as e: print(e)
StarcoderdataPython
3319913
from django import forms from . import models class MovieForm(forms.ModelForm): class Meta: model = models.Movie fields = [ 'imdb_id', 'plot', 'runtime', 'rated', 'title', 'year', ] exclude = [ 'last_reviewed', ] class InitialReviewForm(forms.ModelForm): class Meta: model = models.InitialReview fields = [ 'review_text', 'rating', 'watch_for', ] class RewatchReviewForm(forms.ModelForm): class Meta: model = models.RewatchReview fields = [ 'review_text', 'rating', 'discovery', ]
StarcoderdataPython
3209259
class research(): def __init__(self): pass
StarcoderdataPython
71528
<reponame>raildo/keystone-1 # Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Copyright 2012 <NAME> # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import functools import os import time import uuid from keystone.common import utils from keystone import config from keystone import exception from keystone import service from keystone import tests CONF = config.CONF TZ = None def timezone(func): @functools.wraps(func) def wrapper(*args, **kwargs): tz_original = os.environ.get('TZ') try: if TZ: os.environ['TZ'] = TZ time.tzset() return func(*args, **kwargs) finally: if TZ: if tz_original: os.environ['TZ'] = tz_original else: if 'TZ' in os.environ: del os.environ['TZ'] time.tzset() return wrapper class UtilsTestCase(tests.TestCase): OPTIONAL = object() def test_hash(self): password = '<PASSWORD>' wrong = '<PASSWORD>' # Two wrongs don't make a right hashed = utils.hash_password(password) self.assertTrue(utils.check_password(password, hashed)) self.assertFalse(utils.check_password(wrong, hashed)) def test_verify_normal_password_strict(self): self.config_fixture.config(strict_password_check=False) normal_password = uuid.uuid4().hex verified = utils.verify_length_and_trunc_password(normal_password) self.assertEqual(normal_password, verified) def test_verify_long_password_strict(self): self.config_fixture.config(strict_password_check=False) self.config_fixture.config(group='identity', max_password_length=5) max_length = CONF.identity.max_password_length invalid_password = '<PASSWORD>' truncated = utils.verify_length_and_trunc_password(invalid_password) self.assertEqual(invalid_password[:max_length], truncated) def test_verify_long_password_strict_raises_exception(self): self.config_fixture.config(strict_password_check=True) self.config_fixture.config(group='identity', max_password_length=5) invalid_password = '<PASSWORD>' self.assertRaises(exception.PasswordVerificationError, utils.verify_length_and_trunc_password, invalid_password) def test_hash_long_password_truncation(self): self.config_fixture.config(strict_password_check=False) invalid_length_password = '0' * 9999999 hashed = utils.hash_password(invalid_length_password) self.assertTrue(utils.check_password(invalid_length_password, hashed)) def test_hash_long_password_strict(self): self.config_fixture.config(strict_password_check=True) invalid_length_password = '0' * 9999999 self.assertRaises(exception.PasswordVerificationError, utils.hash_password, invalid_length_password) def _create_test_user(self, password=OPTIONAL): user = {"name": "hthtest"} if password is not self.OPTIONAL: user['password'] = password return user def test_hash_user_password_without_password(self): user = self._create_test_user() hashed = utils.hash_user_password(user) self.assertEqual(user, hashed) def test_hash_user_password_with_null_password(self): user = self._create_test_user(password=None) hashed = utils.hash_user_password(user) self.assertEqual(user, hashed) def test_hash_user_password_with_empty_password(self): password = '' user = self._create_test_user(password=password) user_hashed = utils.hash_user_password(user) password_hashed = user_hashed['password'] self.assertTrue(utils.check_password(password, password_hashed)) def test_hash_edge_cases(self): hashed = utils.hash_password('secret') self.assertFalse(utils.check_password('', hashed)) self.assertFalse(utils.check_password(None, hashed)) def test_hash_unicode(self): password = u'<PASSWORD>' wrong = 'Comment ?a va' hashed = utils.hash_password(password) self.assertTrue(utils.check_password(password, hashed)) self.assertFalse(utils.check_password(wrong, hashed)) def test_auth_str_equal(self): self.assertTrue(utils.auth_str_equal('abc123', '<PASSWORD>')) self.assertFalse(utils.auth_str_equal('a', 'aaaaa')) self.assertFalse(utils.auth_str_equal('aaaaa', 'a')) self.assertFalse(utils.auth_str_equal('ABC123', 'abc123')) def test_unixtime(self): global TZ @timezone def _test_unixtime(): epoch = utils.unixtime(dt) self.assertEqual(epoch, epoch_ans, "TZ=%s" % TZ) dt = datetime.datetime(1970, 1, 2, 3, 4, 56, 0) epoch_ans = 56 + 4 * 60 + 3 * 3600 + 86400 for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']: TZ = 'UTC' + d _test_unixtime() class ServiceHelperTests(tests.TestCase): @service.fail_gracefully def _do_test(self): raise Exception("Test Exc") def test_fail_gracefully(self): self.assertRaises(tests.UnexpectedExit, self._do_test) class LimitingReaderTests(tests.TestCase): def test_read_default_value(self): class FakeData(object): def read(self, *args, **kwargs): self.read_args = args self.read_kwargs = kwargs return 'helloworld' data = FakeData() utils.LimitingReader(data, 100) self.assertEqual(data.read(), 'helloworld') self.assertEqual(len(data.read_args), 0) self.assertEqual(len(data.read_kwargs), 0) self.assertEqual(data.read(10), 'helloworld') self.assertEqual(len(data.read_args), 1) self.assertEqual(len(data.read_kwargs), 0) self.assertEqual(data.read_args[0], 10)
StarcoderdataPython
145201
from __future__ import print_function import torch.backends.cudnn as cudnn import torch import torchvision.transforms as transforms import argparse import os import random import sys import pprint import datetime import dateutil import dateutil.tz from StackGAN.code.miscc.datasets import TextDataset from StackGAN.code.miscc.config import cfg, cfg_from_file from StackGAN.code.miscc.utils import mkdir_p from StackGAN.code.trainer import GANTrainer cfg_from_file('/home/iitm/a-PyTorch-Tutorial-to-Image-Captioning/StackGAN/code/cfg/coco_eval.yml') cfg.GPU_ID ='0' cfg.DATA_DIR = '' print('Using config:') pprint.pprint(cfg) manualSeed = random.randint(1, 10000) random.seed(manualSeed) torch.manual_seed(manualSeed) if cfg.CUDA: torch.cuda.manual_seed_all(manualSeed) now = datetime.datetime.now(dateutil.tz.tzlocal()) timestamp = now.strftime('%Y_%m_%d_%H_%M_%S') output_dir = '../output/%s_%s_%s' % \ (cfg.DATASET_NAME, cfg.CONFIG_NAME, timestamp) num_gpu = len(cfg.GPU_ID.split(',')) datapath= '%s' % (cfg.DATA_PATH) algo = GANTrainer(output_dir) def sample(datapath): return algo.sample(datapath + '/mini_batch_captions.t7', cfg.STAGE)
StarcoderdataPython
3292107
from .core import Manager
StarcoderdataPython
89592
<reponame>juzb/DeeProtein<gh_stars>10-100 import subprocess while True: #name = input('Please enter four letter name for this run: ') name = "AAAA" sequence = input('Please enter the sequence to analyze: ') gos = input('Please enter the GO terms to analyze sperarated by commas: ') with open('/results/tmp/masked_dataset.txt', 'w') as ofile: ofile.write('{};{};{};{};{};{}'.format(name, 'A', gos, sequence, '.' * len(sequence), '_' * len(sequence))) subprocess.call(['bash', '/code/analyze_sensitivity.sh', gos]) print('Performed sensitivity analysis. ' 'Please find the results in /results\n\n')
StarcoderdataPython
3389832
from tensorflow.python.keras.metrics import MeanMetricWrapper # metric module to monitor arbitrary loss class Metric(MeanMetricWrapper): """ A metric module to monitor arbitrary loss. """ def __init__(self, metric, name, dtype=None, **kwargs): self.metric = metric # self.name = name # super(Metric, self).__init__(self.metric_func(), name, dtype=dtype, **kwargs) super(Metric, self).__init__(lambda x, y: metric, name, dtype=dtype, **kwargs) # there seems to be a bug in the tf.keras implementation def get_config(self): return {"metric": self.metric_func(), "name": self.name, "dtype": self.dtype} # Can be removed def metric_func(self): def pass_metric(y_true, y_pred): return self.metric return pass_metric
StarcoderdataPython
12345
<gh_stars>10-100 ############################################################################### # Name: choicedlg.py # # Purpose: Generic Choice Dialog # # Author: <NAME> <<EMAIL>> # # Copyright: (c) 2008 <NAME> <<EMAIL>> # # License: wxWindows License # ############################################################################### """ Editra Control Library: Choice Dialog A generic choice dialog that uses a wx.Choice control to display its choices. @summary: Generic Choice Dialog """ __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: choicedlg.py 63820 2010-04-01 21:46:22Z CJP $" __revision__ = "$Revision: 63820 $" __all__ = ['ChoiceDialog',] #--------------------------------------------------------------------------# # Imports import wx #--------------------------------------------------------------------------# # Globals ChoiceDialogNameStr = u"ChoiceDialog" #--------------------------------------------------------------------------# class ChoiceDialog(wx.Dialog): """Dialog with a wx.Choice control for showing a list of choices""" def __init__(self, parent, id=wx.ID_ANY, msg=u'', title=u'', choices=None, default=u'', pos=wx.DefaultPosition, size=wx.DefaultSize, style=0, name=ChoiceDialogNameStr): """Create the choice dialog @keyword msg: Dialog Message @keyword title: Dialog Title @keyword choices: list of strings @keyword default: Default selection """ wx.Dialog.__init__(self, parent, id, title, style=wx.CAPTION, pos=pos, size=size, name=name) # Attributes self._panel = ChoicePanel(self, msg=msg, choices=choices, default=default, style=style) # Layout self.__DoLayout() def __DoLayout(self): """Layout the dialogs controls""" sizer = wx.BoxSizer(wx.HORIZONTAL) sizer.Add(self._panel, 1, wx.EXPAND) self.SetSizer(sizer) self.SetAutoLayout(True) self.SetInitialSize() def SetChoices(self, choices): """Set the dialogs choices @param choices: list of strings """ self._panel.SetChoices(choices) def GetSelection(self): """Get the selected choice @return: string """ return self._panel.GetSelection() def GetStringSelection(self): """Get the chosen string @return: string """ return self._panel.GetStringSelection() def SetBitmap(self, bmp): """Set the bitmap used in the dialog @param bmp: wx.Bitmap """ self._panel.SetBitmap(bmp) def SetStringSelection(self, sel): """Set the selected choice @param sel: string """ self._panel.SetStringSelection(sel) def SetSelection(self, sel): """Set the selected choice @param sel: string """ self._panel.SetSelection(sel) #--------------------------------------------------------------------------# class ChoicePanel(wx.Panel): """Generic Choice dialog panel""" def __init__(self, parent, msg=u'', choices=list(), default=u'', style=wx.OK|wx.CANCEL): """Create the panel @keyword msg: Display message @keyword choices: list of strings @keyword default: default selection @keyword style: dialog style """ wx.Panel.__init__(self, parent) # Attributes self._msg = msg self._choices = wx.Choice(self, wx.ID_ANY) self._selection = default self._selidx = 0 self._bmp = None self._buttons = list() # Setup self._choices.SetItems(choices) if default in choices: self._choices.SetStringSelection(default) self._selidx = self._choices.GetSelection() else: self._choices.SetSelection(0) self._selidx = 0 self._selection = self._choices.GetStringSelection() # Setup Buttons for btn, id_ in ((wx.OK, wx.ID_OK), (wx.CANCEL, wx.ID_CANCEL), (wx.YES, wx.ID_YES), (wx.NO, wx.ID_NO)): if btn & style: button = wx.Button(self, id_) self._buttons.append(button) if not len(self._buttons): self._buttons.append(wx.Button(self, wx.ID_OK)) self._buttons.append(wx.Button(self, wx.ID_CANCEL)) # Layout self.__DoLayout(style) # Event Handlers self.Bind(wx.EVT_CHOICE, self.OnChoice, self._choices) self.Bind(wx.EVT_BUTTON, self.OnButton) def __DoLayout(self, style): """Layout the panel""" hsizer = wx.BoxSizer(wx.HORIZONTAL) vsizer = wx.BoxSizer(wx.VERTICAL) caption = wx.StaticText(self, label=self._msg) # Layout the buttons bsizer = wx.StdDialogButtonSizer() for button in self._buttons: bsizer.AddButton(button) bid = button.GetId() if bid in (wx.ID_NO, wx.ID_YES): if wx.NO_DEFAULT & style: if bid == wx.ID_NO: button.SetDefault() else: if bid == wx.ID_YES: button.SetDefault() elif bid == wx.ID_OK: button.SetDefault() bsizer.Realize() vsizer.AddMany([((10, 10), 0), (caption, 0), ((20, 20), 0), (self._choices, 1, wx.EXPAND), ((10, 10), 0), (bsizer, 1, wx.EXPAND), ((10, 10), 0)]) icon_id = wx.ART_INFORMATION for i_id, a_id in ((wx.ICON_ERROR, wx.ART_ERROR), (wx.ICON_WARNING, wx.ART_WARNING)): if i_id & style: icon_id = a_id break icon = wx.ArtProvider.GetBitmap(icon_id, wx.ART_MESSAGE_BOX, (64, 64)) self._bmp = wx.StaticBitmap(self, bitmap=icon) bmpsz = wx.BoxSizer(wx.VERTICAL) bmpsz.AddMany([((10, 10), 0), (self._bmp, 0, wx.ALIGN_CENTER_VERTICAL), ((10, 30), 0, wx.EXPAND)]) hsizer.AddMany([((10, 10), 0), (bmpsz, 0, wx.ALIGN_TOP), ((10, 10), 0), (vsizer, 1), ((10, 10), 0)]) self.SetSizer(hsizer) self.SetInitialSize() self.SetAutoLayout(True) def GetChoiceControl(self): """Get the dialogs choice control @return: wx.Choice """ return self._choices def GetSelection(self): """Get the chosen index @return: int """ return self._selidx def GetStringSelection(self): """Get the chosen string @return: string """ return self._selection def OnButton(self, evt): """Handle button events @param evt: wx.EVT_BUTTON @type evt: wx.CommandEvent """ self.GetParent().EndModal(evt.GetId()) def OnChoice(self, evt): """Update the selection @param evt: wx.EVT_CHOICE @type evt: wx.CommandEvent """ if evt.GetEventObject() == self._choices: self._selection = self._choices.GetStringSelection() self._selidx = self._choices.GetSelection() else: evt.Skip() def SetBitmap(self, bmp): """Set the dialogs bitmap @param bmp: wx.Bitmap """ self._bmp.SetBitmap(bmp) self.Layout() def SetChoices(self, choices): """Set the dialogs choices @param choices: list of strings """ self._choices.SetItems(choices) self._choices.SetSelection(0) self._selection = self._choices.GetStringSelection() def SetSelection(self, sel): """Set the selected choice @param sel: int """ self._choices.SetSelection(sel) self._selection = self._choices.GetStringSelection() self._selidx = self._choices.GetSelection() def SetStringSelection(self, sel): """Set the selected choice @param sel: string """ self._choices.SetStringSelection(sel) self._selection = self._choices.GetStringSelection() self._selidx = self._choices.GetSelection() #--------------------------------------------------------------------------#
StarcoderdataPython
1795852
def calculate_longest_prefix_suffix(pattern, lps): length = len(pattern) lps[0] = 0 #lps of 0th index is always 0 l = 0 pos = 1 while pos < length: if pattern[pos] == pattern[l]: lps[pos] = l + 1 l += 1 pos += 1 else: if l != 0: l = lps[l-1] else: lps[pos] = 0 pos += 1 def KMP(pattern, input_str): ''' Time Complexity: O(N) ''' flag = 0 input_len = len(input_str) pat_len = len(pattern) if not pat_len: return 0 lps = [0] * pat_len # generate lps calculate_longest_prefix_suffix(pattern, lps) i = 0 j = 0 while i < input_len: if input_str[i] == pattern[j]: i += 1 j += 1 else: if j != 0: j = lps[j-1] else: i += 1 if j == pat_len: print("Match found at index", (i-j)) flag = 1 j = lps[j-1] if flag == 0: print("Match not found!!") if __name__ == "__main__": input_str = "ABABDABACDABABCABAB" pattern = "ABABC" print("Input string =", input_str) print("Pattern to be searched =", pattern) # Function cal KMP(pattern, input_str)
StarcoderdataPython
4818282
#! python3 # -*- encoding: utf-8 -*- ''' Current module: tests.test_driver Rough version history: v1.0 Original version to use ******************************************************************** @AUTHOR: Administrator-<NAME>(罗科峰) MAIL: <EMAIL> RCS: tests.test_driver, v1.0 2018年9月18日 FROM: 2018年9月18日 ******************************************************************** ====================================================================== Provide a function for the automation test ''' #! python3 # -*- encoding: utf-8 -*- ''' Current module: tests.test_driver Rough version history: v1.0 Original version to use ******************************************************************** @AUTHOR: Administrator-<NAME>(罗科峰) MAIL: <EMAIL> RCS: tests.test_driver, v1.0 2018年8月20日 FROM: 2018年8月20日 ******************************************************************** ====================================================================== Provide a function for the automation test ''' import unittest, os from rtsf.p_executer import TestRunner from rtsf.p_applog import logger from appuidriver.driver import LocalDriver,RemoteDriver from appuidriver.remote.AppiumJs import AppiumJs from webuidriver.remote.SeleniumJar import SeleniumJar class TestDriver(unittest.TestCase): ''' @note: adb version 1.0.39; %ANDROID_HOME% = D:\auto\buffer\test\test_rtsf_web\android; 天天模拟器 v2.5.6 ''' @classmethod def setUpClass(cls): #__tool_path = r'D:\auto\buffer\test\test_rtsf_web' __tool_path = r'C:\d_disk\auto\buffer\test\tools' cls.case_file = r'data\test_case.yaml' cls.data_driver_case = r'data\data_driver.yaml' cls.jar_path = os.path.join(__tool_path, "seleniumjar", "selenium-server-standalone-3.14.0.jar") cls.java_path = "java" platform_tools = os.path.join(__tool_path, "android", "platform-tools") cls._adb_exe_path = os.path.join(platform_tools, "adb.exe") cls._aapt_exe_path = os.path.join(platform_tools, "aapt.exe") cls._apk_abs_path = os.path.join(__tool_path, "android", "ApiDemos-debug.apk") cls._app_package = 'io.appium.android.apis' cls._app_activity = '.ApiDemos' def test_LocalDriver(self): LocalDriver._adb_exe_path = self._adb_exe_path LocalDriver._aapt_exe_path = self._aapt_exe_path LocalDriver._apk_abs_path = self._apk_abs_path LocalDriver._app_package = self._app_package LocalDriver._app_activity = self._app_activity server = AppiumJs(port = 4723).bind_device(device_id = "127.0.0.1:6555", platform_version = "4.4.4") server.start_server() runner = TestRunner(runner = LocalDriver).run(self.case_file) html_report = runner.gen_html_report() print(html_report) self.assertIsInstance(html_report, (list, tuple)) server.stop_server() def test_LocalDriver_with_datadriver(self): LocalDriver._adb_exe_path = self._adb_exe_path LocalDriver._aapt_exe_path = self._aapt_exe_path LocalDriver._apk_abs_path = self._apk_abs_path LocalDriver._app_package = self._app_package LocalDriver._app_activity = self._app_activity server = AppiumJs(port = 4723).bind_device(device_id = "127.0.0.1:6555", platform_version = "4.4.4") server.start_server() runner = TestRunner(runner = LocalDriver).run(self.data_driver_case) html_report = runner.gen_html_report() print(html_report) self.assertIsInstance(html_report, (list, tuple)) def test_RemoteDriver(self): RemoteDriver._aapt_exe_path = self._aapt_exe_path RemoteDriver._apk_abs_path = self._apk_abs_path RemoteDriver._app_package = self._app_package RemoteDriver._app_activity = self._app_activity hub = SeleniumJar(self.jar_path, self.java_path).hub(4444) hub.start_server() node = AppiumJs(port = 4723).bind_device(device_id = "127.0.0.1:6555", platform_version = "4.4.4").node("localhost", hub_address=("localhost", 4444)) node.start_server() runner = TestRunner(runner = RemoteDriver).run(self.case_file) html_report = runner.gen_html_report() print(html_report) self.assertIsInstance(html_report, (list, tuple)) node.stop_server() hub.stop_server() if __name__ == "__main__": # logger.setup_logger("debug") # unittest.main() suite = unittest.TestSuite() suite.addTest(TestDriver("test_LocalDriver_with_datadriver")) runner = unittest.TextTestRunner(verbosity=2) runner.run(suite)
StarcoderdataPython
64931
<filename>ch02/tcp_server.py from socket import socket, AF_INET, SOCK_STREAM server_port = 12000 server_socket = socket(AF_INET, SOCK_STREAM) server_socket.bind(('', server_port)) server_socket.listen(1) print('The server is ready to receive') while True: connection_socket, client_address = server_socket.accept() sentence = connection_socket.recv(2048) modified_sentence = sentence.upper() connection_socket.send(modified_sentence) connection_socket.close()
StarcoderdataPython
3244378
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. '''Generates a test suite from NIST PKITS test descriptions. The output is a set of Type Parameterized Tests which are included by pkits_unittest.h. See pkits_unittest.h for information on using the tests. GoogleTest has a limit of 50 tests per type parameterized testcase, so the tests are split up by section number (this also makes it possible to easily skip sections that pertain to non-implemented features). Usage: generate_tests.py <PKITS.pdf> <output.h> ''' import os import re import subprocess import sys import tempfile def sanitize_name(s): return s.translate(None, ' -') def finalize_test_case(test_case_name, sanitized_test_names, output): output.write('\nWRAPPED_REGISTER_TYPED_TEST_SUITE_P(%s' % test_case_name) for name in sanitized_test_names: output.write(',\n %s' % name) output.write(');\n') def bool_to_str(b): return "true" if b else "false" def make_policies_string(policies): return '"' + ','.join(policies) + '"' def output_test(test_case_name, test_number, raw_test_name, subpart_number, info, certs, crls, sanitized_test_names, output): '''Writes a test case to |output|, and appends the test name to |sanitized_test_names|.''' sanitized_test_name = 'Section%s%s' % (test_number.split('.')[1], sanitize_name(raw_test_name)) subpart_comment = '' if subpart_number is not None: sanitized_test_name += "Subpart%d" % (subpart_number) subpart_comment = ' (Subpart %d)' % (subpart_number) sanitized_test_names.append(sanitized_test_name) certs_formatted = ', '.join('"%s"' % n for n in certs) crls_formatted = ', '.join('"%s"' % n for n in crls) output.write(''' // %(test_number)s %(raw_test_name)s%(subpart_comment)s WRAPPED_TYPED_TEST_P(%(test_case_name)s, %(sanitized_test_name)s) { const char* const certs[] = { %(certs_formatted)s }; const char* const crls[] = { %(crls_formatted)s }; ''' % vars()) default_info = TestInfo(None) output.write('''PkitsTestInfo info; info.test_number = "%s"; info.should_validate = %s; ''' % (test_number, bool_to_str(info.should_validate))) # Output any non-default inputs/outputs. Only properties that differ from # the defaults are written, so as to keep the generated file more readable. if info.initial_policy_set != default_info.initial_policy_set: output.write(''' info.SetInitialPolicySet(%s); ''' % make_policies_string(info.initial_policy_set)) if info.initial_explicit_policy != default_info.initial_explicit_policy: output.write(''' info.SetInitialExplicitPolicy(%s); ''' % bool_to_str(info.initial_explicit_policy)) if (info.initial_policy_mapping_inhibit != default_info.initial_policy_mapping_inhibit): output.write(''' info.SetInitialPolicyMappingInhibit(%s); ''' % bool_to_str(info.initial_policy_mapping_inhibit)) if (info.initial_inhibit_any_policy != default_info.initial_inhibit_any_policy): output.write(''' info.SetInitialInhibitAnyPolicy(%s); ''' % bool_to_str(info.initial_inhibit_any_policy)) if (info.user_constrained_policy_set != default_info.user_constrained_policy_set): output.write(''' info.SetUserConstrainedPolicySet(%s); ''' % make_policies_string(info.user_constrained_policy_set)) output.write(''' this->RunTest(certs, crls, info); } ''' % vars()) # Matches a section header, ex: "4.1 Signature Verification" SECTION_MATCHER = re.compile('^\s*(\d+\.\d+)\s+(.+)\s*$') # Matches a test header, ex: "4.1.1 Valid Signatures Test1" TEST_MATCHER = re.compile('^\s*(\d+\.\d+.\d+)\s+(.+)\s*$') # Matches the various headers in a test specification. EXPECTED_HEADER_MATCHER = re.compile('^\s*Expected Result:') PROCEDURE_HEADER_MATCHER = re.compile('^\s*Procedure:') PATH_HEADER_MATCHER = re.compile('^\s*Certification Path:') # Matches the Procedure text if using default settings. USING_DEFAULT_SETTINGS_MATCHER = re.compile( '^.*using the \s*default settings.*') # Matches the description text if using custom settings. CUSTOM_SETTINGS_MATCHER = re.compile( '.*this\s+test\s+be\s+validated\s+using\s+the\s+following\s+inputs:.*') # Match an expected test result. Note that some results in the PDF have a typo # "path not should validate" instead of "path should not validate". TEST_RESULT_MATCHER = re.compile( '^.*path (should validate|should not validate|not should validate)') # Matches a line in the certification path, ex: # "\u2022 Good CA Cert, Good CA CRL" PATH_MATCHER = re.compile('^\s*\xe2\x80\xa2\s*(.+)\s*$') # Matches a page number. These may appear in the middle of multi-line fields and # thus need to be ignored. PAGE_NUMBER_MATCHER = re.compile('^\s*\d+\s*$') # Matches if an entry in a certification path refers to a CRL, ex: # "onlySomeReasons CA2 CRL1". CRL_MATCHER = re.compile('^.*CRL\d*$') class TestSections(object): def __init__(self): self.description_lines = [] self.procedure_lines = [] self.expected_result_lines = [] self.cert_path_lines = [] def parse_main_test_sections(lines, i): result = TestSections() # Read the description lines (text after test name up until # "Procedure:"). result.description_lines = [] while i < len(lines): if PROCEDURE_HEADER_MATCHER.match(lines[i]): break result.description_lines.append(lines[i]) i += 1 # Read the procedure lines (text starting at "Procedure:" and up until # "Expected Result:". result.procedure_lines = [] while i < len(lines): if EXPECTED_HEADER_MATCHER.match(lines[i]): break result.procedure_lines.append(lines[i]) i += 1 # Read the expected result lines (text starting at "Expected Result:" and up # until "Certification Path:". result.expected_result_lines = [] while i < len(lines): if PATH_HEADER_MATCHER.match(lines[i]): break result.expected_result_lines.append(lines[i]) i += 1 # Read the certification path lines (text starting at "Certification Path:" # and up until the next test title. result.cert_path_lines = [] while i < len(lines): if TEST_MATCHER.match(lines[i]) or SECTION_MATCHER.match(lines[i]): break result.cert_path_lines.append(lines[i]) i += 1 return i, result def parse_cert_path_lines(lines): path_lines = [] crls = [] certs = [] for line in lines[1:]: line = line.strip() if "is composed of the following objects:" in line: continue if "See the introduction to Section 4.4 for more information." in line: continue if not line or PAGE_NUMBER_MATCHER.match(line): continue path_match = PATH_MATCHER.match(line) if path_match: path_lines.append(path_match.group(1)) continue # Continuation of previous path line. path_lines[-1] += ' ' + line for path_line in path_lines: for path in path_line.split(','): path = sanitize_name(path.strip()) if CRL_MATCHER.match(path): crls.append(path) else: certs.append(path) return certs, crls ANY_POLICY = 'anyPolicy' TEST_POLICY_1 = 'NIST-test-policy-1' TEST_POLICY_2 = 'NIST-test-policy-2' TEST_POLICY_3 = 'NIST-test-policy-3' TEST_POLICY_6 = 'NIST-test-policy-6' # Note: This omits some outputs from PKITS: # # * authorities-constrained-policy-set # * explicit-policy-indicator class TestInfo(object): """This structure describes a test inputs and outputs""" def __init__(self, should_validate, # These defaults come from section 3 of PKITS.pdf initial_policy_set = [ANY_POLICY], initial_explicit_policy = False, initial_policy_mapping_inhibit = False, initial_inhibit_any_policy = False, # In all of the tests that are not related to policy processing, # each certificate in the path asserts the certificate policy # 2.16.840.172.16.58.3.1.48.1 user_constrained_policy_set = [TEST_POLICY_1]): self.should_validate = should_validate self.initial_policy_set = initial_policy_set self.initial_explicit_policy = initial_explicit_policy self.initial_policy_mapping_inhibit = initial_policy_mapping_inhibit self.initial_inhibit_any_policy = initial_inhibit_any_policy self.user_constrained_policy_set = user_constrained_policy_set TEST_OVERRIDES = { '4.8.1': [ # All Certificates Same Policy Test1 # 1. default settings, but with initial-explicit-policy set. The path # should validate successfully TestInfo(True, initial_explicit_policy=True, user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-explicit-policy set and # initial-policy-set = {NIST-test-policy-1}. The path should validate # successfully. TestInfo(True, initial_explicit_policy=True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 3. default settings, but with initial-explicit-policy set and # initial-policy-set = {NIST-test-policy-2}. The path should not validate # successfully. TestInfo(False, initial_explicit_policy=True, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[]), # 4. default settings, but with initial-explicit-policy set and # initial-policy-set = {NIST-test-policy-1, NIST-test-policy-2}. The path # should validate successfully. TestInfo(True, initial_explicit_policy=True, initial_policy_set=[TEST_POLICY_1, TEST_POLICY_2], user_constrained_policy_set=[TEST_POLICY_1]), ], '4.8.2': [ # All Certificates No Policies Test2 # 1. default settings. The path should validate successfully. TestInfo(True, user_constrained_policy_set=[]), # 2. default settings, but with initial-explicit-policy set. The path # should not validate successfully TestInfo(False, initial_explicit_policy=True, user_constrained_policy_set=[]), ], '4.8.3': [ # Different Policies Test3 # 1. default settings. The path should validate successfully. TestInfo(True, user_constrained_policy_set=[]), # 2. default settings, but with initial-explicit-policy set. The path # should not validate successfully. TestInfo(False, initial_explicit_policy=True, user_constrained_policy_set=[]), # 3. default settings, but with initial-explicit-policy set and # initial-policy-set = {NIST-test-policy-1, NIST-test-policy-2}. The path # should not validate successfully. TestInfo(False, initial_explicit_policy=True, initial_policy_set=[TEST_POLICY_1, TEST_POLICY_2], user_constrained_policy_set=[]), ], '4.8.4': [ # Different Policies Test4 # Procedure: Validate Different Policies Test4 EE using the default # settings or open and verify Signed Test Message 6.2.2.69 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty. The explicit-policy-indicator # will be set if the application can process the policyConstraints # extension. If the application can process the policyConstraints extension # then the path should not validate successfully. If the application can # not process the policyConstraints extension, then the path should # validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.8.5': [ # 4.8.5 Different Policies Test5 # Procedure: Validate Different Policies Test5 EE using the default # settings or open and verify Signed Test Message 6.2.2.70 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty. The explicit-policy-indicator # will be set if the application can process the policyConstraints # extension. If the application can process the policyConstraints extension # then the path should not validate successfully. If the application can # not process the policyConstraints extension, then the path should # validate successfully TestInfo(False, user_constrained_policy_set=[]), ], '4.8.6': [ # Overlapping Policies Test6 # 1. default settings. The path should validate successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 3. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should not validate successfully. TestInfo(False, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[]), ], '4.8.7': [ # Different Policies Test7 # Procedure: Validate Different Policies Test7 EE using the default # settings or open and verify Signed Test Message 192.168.3.112 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty. If the # explicit-policy-indicator will be set if the application can process the # policyConstraints extension. If the application can process the # policyConstraints extension, then the path should not validate # successfully. If the application can not process the policyConstraints # extension, then the path should validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.8.8': [ # Different Policies Test8 # Procedure: Validate Different Policies Test8 EE using the default # settings or open and verify Signed Test Message 192.168.127.12 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty. The explicit-policy-indicator # will be set if the application can process the policyConstraints # extension. If the application can process the policyConstraints extension # then the path should not validate successfully. If the application can # not process the policyConstraints extension, then the path should # validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.8.9': [ # Different Policies Test9 # Procedure: Validate Different Policies Test9 EE using the default # settings or open and verify Signed Test Message 192.168.3.114 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty. The explicit-policy-indicator # will be set if the application can process the policyConstraints # extension. If the application can process the policyConstraints # extension, then the path should not validate successfully. If the # application can not process the policyConstraints extension, then the # path should validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.8.10': [ # All Certificates Same Policies Test10 # 1. default settings. The path should validate successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1, TEST_POLICY_2]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 3. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[TEST_POLICY_2]), ], '4.8.11': [ # All Certificates AnyPolicy Test11 # 1. default settings. The path should validate successfully. TestInfo(True, user_constrained_policy_set=[ANY_POLICY]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), ], '4.8.12': [ # Different Policies Test12 # Procedure: Validate Different Policies Test12 EE using the default # settings or open and verify Signed Test Message 172.16.58.3 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty. The explicit-policy-indicator # will be set if the application can process the policyConstraints # extension. If the application can process the policyConstraints # extension, then the path should not validate successfully. If the # application can not process the policyConstraints extension, then the # path should validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.8.13': [ # All Certificates Same Policies Test13 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[TEST_POLICY_2]), # 3. default settings, but with initial-policy-set = {NIST-test-policy-3}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_3], user_constrained_policy_set=[TEST_POLICY_3]), ], '4.8.14': [ # AnyPolicy Test14 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should not validate successfully. TestInfo(False, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[]), ], '4.8.15': [ # User Notice Qualifier Test15 # Procedure: Validate User Notice Qualifier Test15 EE using the default # settings or open and verify Signed Test Message 172.16.58.3 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be the same # as the initial-explicit-policy indicator. If the initial-policy-set is # any-policy or otherwise includes NIST-test-policy-1, then the # user-constrained-policy-set will be {NIST-test-policy-1}. If not, the # user-constrained-policy-set will be empty. If the initial-explicit-policy # indicator is set and the initial-policy-set does not include # NIST-test-policy-1, then the path should be rejected, otherwise it should # validate successfully. If the path validates successfully, then the # application should display the user notice. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.8.16': [ # User Notice Qualifier Test16 # Procedure: Validate User Notice Qualifier Test16 EE using the default # settings or open and verify Signed Test Message 172.16.58.3 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be the same # as the initial-explicit-policy indicator. If the initial-policy-set is # any-policy or otherwise includes NIST-test-policy-1, then the # user-constrained-policy-set will be {NIST-test-policy-1}. If not, the # user-constrained-policy-set will be empty. If the initial-explicit-policy # indicator is set and the initial-policy-set does not include # NIST-test-policy-1, then the path should be rejected, otherwise it should # validate successfully. If the path validates successfully, then the # application should display the user notice associated with # NIST-test-policy-1. The user notice associated with NIST-test-policy-2 # should not be displayed. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.8.17': [ # User Notice Qualifier Test17 # Procedure: Validate User Notice Qualifier Test17 EE using the default # settings or open and verify Signed Test Message 172.16.58.3 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be the same # as the initial-explicit-policy indicator. If the initial-policy-set is # any-policy or otherwise includes NIST-test-policy-1, then the # user-constrained-policy-set will be {NIST-test-policy-1}. If not, the # user-constrained-policy-set will be empty. If the initial-explicit-policy # indicator is set and the initial-policy-set does not include # NIST-test-policy-1, then the path should be rejected, otherwise it should # validate successfully. If the path validates successfully, then the # application should display the user notice associated with anyPolicy. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.8.18': [ # User Notice Qualifier Test18 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully and the qualifier associated with # NIST-test-policy-1 in the end entity certificate should be displayed. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should validate successfully and the qualifier associated with # anyPolicy in the end entity certificate should be displayed. TestInfo(True, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[TEST_POLICY_2]), ], '4.8.19': [ # User Notice Qualifier Test19 # Procedure: Validate User Notice Qualifier Test19 EE using the default # settings or open and verify Signed Test Message 192.168.3.11 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be the same # as the initial-explicit-policy indicator. If the initial-policy-set is # any-policy or otherwise includes NIST-test-policy-1, then the # user-constrained-policy-set will be {NIST-test-policy-1}. If not, the # user-constrained-policy-set will be empty. If the initial-explicit-policy # indicator is set and the initial-policy-set does not include # NIST-test-policy-1, then the path should be rejected, otherwise it should # validate successfully. Since the explicitText exceeds the maximum size # of 200 characters, the application may choose to reject the certificate. # If the application accepts the certificate, display of the user notice is # optional. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.8.20': [ # CPS Pointer Qualifier Test20 # Procedure: Validate CPS Pointer Qualifier Test20 EE using the default # settings or open and verify Signed Test Message 6.2.2.85 using the # default settings. (If possible, it is recommended that this test be run # with the initial-explicit-policy indicator set. If this can not be done, # manually check that the authorities-constrained-policy-set and # user-constrained-policy-set are correct.) # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be the same # as the initial-explicit-policy indicator. If the initial-policy-set is # any-policy or otherwise includes NIST-test-policy-1, then the # user-constrained-policy-set will be {NIST-test-policy-1}. If not, the # user-constrained-policy-set will be empty. If the initial-explicit-policy # indicator is set and the initial-policy-set does not include # NIST-test-policy-1, then the path should be rejected, otherwise it should # validate successfully. The CPS pointer in the qualifier should be # associated with NIST-testpolicy-1 in the # authorities-constrained-policy-set (and in the user-constrained-policy-set # if NIST-test-policy-1 is in that set). There are no processing # requirements associated with the CPS pointer qualifier. TestInfo(True, initial_explicit_policy=True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), ], '4.9.1': [ # Valid RequireExplicitPolicy Test1 # Procedure: Validate Valid requireExplicitPolicy Test1 EE using the # default settings or open and verify Signed Test Message 6.2.2.86 using # the default settings. # # Expected Result: The path should validate successfully since the # explicit-policy-indicator is not set. TestInfo(True, user_constrained_policy_set=[]), ], '4.9.2': [ # Valid RequireExplicitPolicy Test2 # Procedure: Validate Valid requireExplicitPolicy Test2 EE using the # default settings or open and verify Signed Test Message 6.2.2.87 using # the default settings. # # Expected Result: The path should validate successfully since the # explicit-policy-indicator is not set TestInfo(True, user_constrained_policy_set=[]), ], '4.9.6': [ # Valid Self-Issued requireExplicitPolicy Test6 # Procedure: Validate Valid Self-Issued requireExplicitPolicy Test6 EE using # the default settings or open and verify Signed Test Message 6.2.2.91 using # the default settings. # # Expected Result: The path should validate successfully since the # explicit-policy-indicator is not set. TestInfo(True, user_constrained_policy_set=[]), ], '4.10.1': [ # Valid Policy Mapping Test1 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should not validate successfully. TestInfo(False, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[]), # 3. default settings, but with initial-policy-mapping-inhibit set. The # path should not validate successfully. TestInfo(False, initial_policy_mapping_inhibit=True, user_constrained_policy_set=[]), ], '4.10.2': [ # Invalid Policy Mapping Test2 # 1. default settings. The path should not validate successfully. TestInfo(False, user_constrained_policy_set=[]), # 2. default settings, but with initial-policy-mapping-inhibit set. The # path should not validate successfully. TestInfo(False, initial_policy_mapping_inhibit=True, user_constrained_policy_set=[]), ], '4.10.3': [ # Valid Policy Mapping Test3 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should not validate successfully. TestInfo(False, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[TEST_POLICY_2]), ], '4.10.4': [ # Invalid Policy Mapping Test4 # Procedure: Validate Invalid Policy Mapping Test4 EE using the default # settings or open and verify Signed Test Message 172.16.58.3 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should be rejected, otherwise # it should validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.10.5': [ # Valid Policy Mapping Test5 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-6}. # The path should not validate successfully. TestInfo(False, initial_policy_set=[TEST_POLICY_6], user_constrained_policy_set=[]), ], '4.10.6': [ # Valid Policy Mapping Test6 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-6}. # The path should not validate successfully. TestInfo(False, initial_policy_set=[TEST_POLICY_6], user_constrained_policy_set=[]), ], '4.10.7': [ # Invalid Mapping From anyPolicy Test7 # Procedure: Validate Invalid Mapping From anyPolicy Test7 EE using the # default settings or open and verify Signed Test Message 172.16.31.10 using # the default settings. # # Expected Result: The path should not validate successfully since the # intermediate certificate includes a policy mapping extension in which # anyPolicy appears as an issuerDomainPolicy. TestInfo(False, user_constrained_policy_set=[]), ], '4.10.8': [ # Invalid Mapping To anyPolicy Test8 # Procedure: Validate Invalid Mapping To anyPolicy Test8 EE using the # default settings or open and verify Signed Test Message 172.16.31.10 using # the default settings. # # Expected Result: The path should not validate successfully since the # intermediate certificate includes a policy mapping extension in which # anyPolicy appears as an subjectDomainPolicy. TestInfo(False, user_constrained_policy_set=[]), ], '4.10.9': [ # Valid Policy Mapping Test9 # Procedure: Validate Valid Policy Mapping Test9 EE using the default # settings or open and verify Signed Test Message 6.2.2.102 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set (if # the application can process the policyConstraints extension). If the # initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the user-constrained-policy-set will be # {NIST-test-policy-1}. If not, the user-constrained-policy-set will be # empty. If the initial-policy-set does not include NIST-test-policy-1 (and # the application can process the policyConstraints extension), then the # path should be rejected, otherwise it should validate successfully. TestInfo(True), ], '4.10.10': [ # Invalid Policy Mapping Test10 # Procedure: Validate Invalid Policy Mapping Test10 EE using the default # settings or open and verify Signed Test Message 6.2.2.103 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should be rejected, otherwise # it should validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.10.11': [ # Valid Policy Mapping Test11 # Procedure: Validate Valid Policy Mapping Test11 EE using the default # settings or open and verify Signed Test Message 6.2.2.104 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set (if # the application can process the policyConstraints extension). If the # initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the user-constrained-policy-set will be # {NIST-test-policy-1}. If not, the user-constrained-policy-set will be # empty. If the initial-policy-set does not include NIST-test-policy-1 (and # the application can process the policyConstraints extension), then the # path should be rejected, otherwise it should validate successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.10.12': [ # Valid Policy Mapping Test12 # 1. default settings, but with initial-policy-set = {NIST-test-policy-1}. # The path should validate successfully and the application should display # the user notice associated with NIST-test-policy-3 in the end entity # certificate. TestInfo(True, initial_policy_set=[TEST_POLICY_1], user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-policy-set = {NIST-test-policy-2}. # The path should validate successfully and the application should display # the user notice associated with anyPolicy in the end entity certificate. TestInfo(True, initial_policy_set=[TEST_POLICY_2], user_constrained_policy_set=[TEST_POLICY_2]), ], '4.10.13': [ # Valid Policy Mapping Test13 # Procedure: Validate Valid Policy Mapping Test13 EE using the default # settings or open and verify Signed Test Message 6.2.2.106 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set (if # the application can process the policyConstraints extension). If the # initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the user-constrained-policy-set will be # {NIST-test-policy-1}. If not, the user-constrained-policy-set will be # empty. If the initial-policy-set does not include NIST-test-policy-1 (and # the application can process the policyConstraints extension), then the # path should be rejected, otherwise it should validate successfully. If # the path is accepted, the application should display the user notice # associated with NIST-testpolicy-1 in the intermediate certificate. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.10.14': [ # Valid Policy Mapping Test14 # Procedure: Validate Valid Policy Mapping Test14 EE using the default # settings or open and verify Signed Test Message 6.2.2.107 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set (if # the application can process the policyConstraints extension). If the # initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the user-constrained-policy-set will be # {NIST-test-policy-1}. If not, the user-constrained-policy-set will be # empty. If the initial-policy-set does not include NIST-test-policy-1 (and # the application can process the policyConstraints extension), then the # path should be rejected, otherwise it should validate successfully. If # the path is accepted, the application should display the user notice # associated with anyPolicy in the intermediate certificate TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.11.1': [ # Invalid inhibitPolicyMapping Test1 # Procedure: Validate Invalid inhibitPolicyMapping Test1 EE using the # default settings or open and verify Signed Test Message 6.2.2.108 using # the default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty. The explicit-policy-indicator # will be set. The path should not validate successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.11.2': [ # Valid inhibitPolicyMapping Test2 # Procedure: Validate Valid inhibitPolicyMapping Test2 EE using the default # settings or open and verify Signed Test Message 6.2.2.109 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set. If # the initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the path should validate successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.11.3': [ # Invalid inhibitPolicyMapping Test3 # Procedure: Validate Invalid inhibitPolicyMapping Test3 EE using the # default settings or open and verify Signed Test Message 6.2.2.110 using # the default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set. The path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.11.4': [ # Valid inhibitPolicyMapping Test4 # Procedure: Validate Valid inhibitPolicyMapping Test4 EE using the default # settings or open and verify Signed Test Message 6.2.2.111 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-2} and the explicit-policy-indicator will be set. If # the initial-policy-set is any-policy or otherwise includes # NIST-test-policy-2, then the path should validate successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_2]), ], '4.11.5': [ # Invalid inhibitPolicyMapping Test5 # Procedure: Validate Invalid inhibitPolicyMapping Test5 EE using the # default settings or open and verify Signed Test Message 6.2.2.112 using # the default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set. The path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.11.6': [ # Invalid inhibitPolicyMapping Test6 # Procedure: Validate Invalid inhibitPolicyMapping Test6 EE using the # default settings or open and verify Signed Test Message 172.16.31.103 using # the default settings. # # Expected Result: The authorities-constrained-policy-set and the # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set. The path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.11.7': [ # Valid Self-Issued inhibitPolicyMapping Test7 # Procedure: Validate Valid Self-Issued inhibitPolicyMapping Test7 EE using # the default settings or open and verify Signed Test Message 172.16.17.32 # using the default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set. If # the initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the path should validate successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.11.8': [ # Invalid Self-Issued inhibitPolicyMapping Test8 # Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test8 EE # using the default settings or open and verify Signed Test Message # 172.16.58.3 using the default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set. The path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.11.9': [ # Invalid Self-Issued inhibitPolicyMapping Test9 # Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test9 EE # using the default settings or open and verify Signed Test Message # 172.16.31.106 using the default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set. The path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.11.10': [ # Invalid Self-Issued inhibitPolicyMapping Test10 # Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test10 EE # using the default settings or open and verify Signed Test Message # 172.16.17.32 using the default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set. The path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.11.11': [ # Invalid Self-Issued inhibitPolicyMapping Test11 # Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test11 EE # using the default settings or open and verify Signed Test Message # 172.16.58.3 using the default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set. The path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.12.1': [ # Invalid inhibitAnyPolicy Test1 # Procedure: Validate Invalid inhibitAnyPolicy Test1 EE using the default # settings or open and verify Signed Test Message 172.16.17.32 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.12.2': [ # Valid inhibitAnyPolicy Test2 # Procedure: Validate Valid inhibitAnyPolicy Test2 EE using the default # settings or open and verify Signed Test Message 6.2.2.120 using the # default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set (if # the application can process the policyConstraints extension). If the # initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the user-constrained-policy-set will be # {NIST-test-policy-1} and the path should validate successfully. If not, # then the user-constrained-policy-set will be empty. If the # user-constrained-policy-set is empty and the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.12.3': [ # inhibitAnyPolicy Test3 # 1. default settings. The path should validate successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), # 2. default settings, but with initial-inhibit-any-policy set. The path # should not validate successfully. TestInfo(False, initial_inhibit_any_policy=True, user_constrained_policy_set=[]), ], '4.12.4': [ # Invalid inhibitAnyPolicy Test4 # Procedure: Validate Invalid inhibitAnyPolicy Test4 EE using the default # settings or open and verify Signed Test Message 6.2.2.122 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.12.5': [ # Invalid inhibitAnyPolicy Test5 # Procedure: Validate Invalid inhibitAnyPolicy Test5 EE using the default # settings or open and verify Signed Test Message 6.2.2.123 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.12.6': [ # Invalid inhibitAnyPolicy Test6 # Procedure: Validate Invalid inhibitAnyPolicy Test6 EE using the default # settings or open and verify Signed Test Message 6.2.2.124 using the # default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.12.7': [ # Valid Self-Issued inhibitAnyPolicy Test7 # Procedure: Validate Valid Self-Issued inhibitAnyPolicy Test7 EE using the # default settings or open and verify Signed Test Message 6.2.2.125 using # the default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set (if # the application can process the policyConstraints extension). If the # initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the user-constrained-policy-set will be # {NIST-test-policy-1} and the path should validate successfully. If not, # then the user-constrained-policy-set will be empty. If the # user-constrained-policy-set is empty and the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.12.8': [ # Invalid Self-Issued inhibitAnyPolicy Test8 # Procedure: Validate Invalid Self-Issued inhibitAnyPolicy Test8 EE using # the default settings or open and verify Signed Test Message 6.2.2.126 # using the default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], '4.12.9': [ # Valid Self-Issued inhibitAnyPolicy Test9 # Procedure: Validate Valid Self-Issued inhibitAnyPolicy Test9 EE using the # default settings or open and verify Signed Test Message 6.2.2.127 using # the default settings. # # Expected Result: The authorities-constrained-policy-set will be # {NIST-test-policy-1} and the explicit-policy-indicator will be set (if # the application can process the policyConstraints extension). If the # initial-policy-set is any-policy or otherwise includes # NIST-test-policy-1, then the user-constrained-policy-set will be # {NIST-test-policy-1} and the path should validate successfully. If not, # then the user-constrained-policy-set will be empty. If the # user-constrained-policy-set is empty and the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]), ], '4.12.10': [ # Invalid Self-Issued inhibitAnyPolicy Test10 # Procedure: Validate Invalid Self-Issued inhibitAnyPolicy Test10 EE using # the default settings or open and verify Signed Test Message 6.2.2.128 # using the default settings. # # Expected Result: The authorities-constrained-policy-set and # user-constrained-policy-set will be empty and the # explicit-policy-indicator will be set (if the application can process the # policyConstraints extension). If the application can process the # policyConstraints extension, then the path should not validate # successfully. TestInfo(False, user_constrained_policy_set=[]), ], } def parse_test(lines, i, test_case_name, test_number, test_name, sanitized_test_names, output): # Start by doing a coarse level of parsing that separates out the lines for # the main sections. i, test_sections = parse_main_test_sections(lines, i) certs, crls = parse_cert_path_lines(test_sections.cert_path_lines) # Most tests have a formulaic specification: they use the default # settings, and have one expectation. These are easily parsed and are handled # programmatically. In contrast, many of the policies tests have a more # complicated specification which involves multiple subtests having various # settings, as well as expectations described in terms of supported # extensions. Rather than try to handle all the nuanced language, these are # handled manually via "overrides". overrides = TEST_OVERRIDES.get(test_number, None) if overrides is None: # Verify that the test description doesn't include numbered subparts (those # are not handled here). if CUSTOM_SETTINGS_MATCHER.match(" ".join(test_sections.description_lines)): sys.stderr.write('Unexpected custom settings for %s\n' % test_number) sys.exit(1) # Verify that the test is using only default settings. if not USING_DEFAULT_SETTINGS_MATCHER.match( " ".join(test_sections.procedure_lines)): sys.stderr.write('Unexpected procedure for %s: %s\n' % (test_number, " ".join(test_section.procedure_lines))) sys.exit(1) # Check whether expected result is validation success or failure. result_match = TEST_RESULT_MATCHER.match( test_sections.expected_result_lines[0]) if not result_match: sys.stderr.write('Unknown expectation for %s:\n%s\n' % ( test_number, " ".join(test_sections.expected_result_lines))) sys.exit(1) # Initializes with default settings. info = TestInfo(result_match.group(1) == 'should validate') # Special case the 4.9 test failures (require explicit policy) to set # user_constrained_policy_set to empty. This is only done for the 4.9 # tests, because the other policy tests are special cased as overrides and # hence set this manually on a per-test basis. # # user_constrained_policy_set enumerates the subset of the initial policy # set (anyPolicy in the default case) that were valid for the path. For # non-policy tests the expectation for user_constrained_policy_set is # [TEST_POLICY_1] since each policy asserts that. However for these tests, # the expectation is an empty user_constrained_policy_set since there was # no valid policy for the path (in fact, that is why the path validation is # expected to fail). if test_number.startswith('4.9.') and not info.should_validate: info.user_constrained_policy_set = [] output_test(test_case_name, test_number, test_name, None, info, certs, crls, sanitized_test_names, output) else: # The overrides may have a series of inputs (settings) and outputs # (success/failure) for this test. Output each as a separate test case. for subpart_i in range(len(overrides)): info = overrides[subpart_i] # If the test has only 1 subpart, don't number it. subpart_number = subpart_i + 1 if len(overrides) > 1 else None output_test(test_case_name, test_number, test_name, subpart_number, info, certs, crls, sanitized_test_names, output) return i def main(): pkits_pdf_path, output_path = sys.argv[1:] pkits_txt_file = tempfile.NamedTemporaryFile() subprocess.check_call(['pdftotext', '-layout', '-nopgbrk', '-eol', 'unix', pkits_pdf_path, pkits_txt_file.name]) test_descriptions = pkits_txt_file.read() # Extract section 4 of the text, which is the part that contains the tests. test_descriptions = test_descriptions.split( '4 Certification Path Validation Tests')[-1] test_descriptions = test_descriptions.split( '5 Relationship to Previous Test Suite', 1)[0] output = open(output_path, 'w') output.write('// Autogenerated by %s, do not edit\n\n' % sys.argv[0]) output.write(""" // This file intentionally does not have header guards, it's intended to // be inlined in another header file. The following line silences a // presubmit warning that would otherwise be triggered by this: // no-include-guard-because-multiply-included // NOLINT(build/header_guard)\n\n""") output.write('// Hack to allow disabling type parameterized test cases.\n' '// See https://github.com/google/googletest/issues/389\n') output.write('#define WRAPPED_TYPED_TEST_P(CaseName, TestName) ' 'TYPED_TEST_P(CaseName, TestName)\n') output.write('#define WRAPPED_REGISTER_TYPED_TEST_SUITE_P(CaseName, ...) ' 'REGISTER_TYPED_TEST_SUITE_P(CaseName, __VA_ARGS__)\n\n') test_case_name = None sanitized_test_names = [] lines = test_descriptions.splitlines() i = 0 while i < len(lines): section_match = SECTION_MATCHER.match(lines[i]) match = TEST_MATCHER.match(lines[i]) i += 1 if section_match: if test_case_name: finalize_test_case(test_case_name, sanitized_test_names, output) sanitized_test_names = [] test_case_name = 'PkitsTest%02d%s' % ( int(section_match.group(1).split('.')[-1]), sanitize_name(section_match.group(2))) output.write('\ntemplate <typename PkitsTestDelegate>\n') output.write('class %s : public PkitsTest<PkitsTestDelegate> {};\n' % test_case_name) output.write('TYPED_TEST_SUITE_P(%s);\n' % test_case_name) if match: test_number = match.group(1) test_name = match.group(2) if not test_case_name: output.write('// Skipped %s %s\n' % (test_number, test_name)) continue i, parse_test(lines, i, test_case_name, test_number, test_name, sanitized_test_names, output) if test_case_name: finalize_test_case(test_case_name, sanitized_test_names, output) if __name__ == '__main__': main()
StarcoderdataPython
4813690
import unittest from pymetrics.metric import Metric class Nothing(Metric): def dump(self): pass def __init__(self): Metric.__init__(self, 'test') class TestMetric(unittest.TestCase): def test_name_and_metric(self): metric = Nothing() self.assertEqual('nothing', metric.metric) self.assertEqual('test', metric.name) ### if __name__ == '__main__': unittest.main()
StarcoderdataPython
3329629
from __future__ import absolute_import, unicode_literals, print_function import spreadsheet spreadsheet.start()
StarcoderdataPython
3288554
import psycopg2 from django.db import connection from django.utils.text import force_text def adapt(text): connection.ensure_connection() a = psycopg2.extensions.adapt(force_text(text)) c = connection.connection # This is a workaround for https://github.com/18F/calc/issues/1498. if hasattr(c, '__wrapped__'): c = getattr(c, '__wrapped__') a.prepare(c) return a
StarcoderdataPython
3309173
#!/usr/bin/env python3 # testing using unittest framework import unittest import sys sys.path.append("..") # append parent folder into the system path import egypt class Test(unittest.TestCase): # optional - executes before runing each test function def setUp(self): print('Running unittest on egypt module') self.input1 = [8, 6, 10] self.input2 = [5, 4, 3] self.input3 = [5, 12, 13] self.input4 = [1, 2, 3] self.input5 = [2000, 100, 30000] def testAnswer1(self): expect = "right" result = egypt.answer(self.input1) self.assertEqual(expect, result) def testAnswer2(self): expect = "right" result = egypt.answer(self.input2) self.assertEqual(expect, result) def testAnswer3(self): expect = "right" result = egypt.answer(self.input3) self.assertEqual(expect, result) def testAnswer4(self): expect = "wrong" result = egypt.answer(self.input4) self.assertEqual(expect, result) def testAnswer5(self): expect = "wrong" result = egypt.answer(self.input5) self.assertEqual(expect, result) def tearDown(self): # optional - executes after each test function print('Done running unittest') if __name__ == "__main__": unittest.main()
StarcoderdataPython
3226350
<filename>socialserver/resources/config/schema.py # Copyright (c) <NAME> 2022 from pydantic import BaseModel, IPvAnyAddress, Field, validator from socialserver.constants import MAX_PIXEL_RATIO from typing import Literal, Optional class _ServerConfigNetwork(BaseModel): host: IPvAnyAddress # 1-65535 is the valid TCP port range, hence the limit. port: int = Field(..., ge=1, le=65535) class _ServerConfigMisc(BaseModel): enable_landing_page: bool class _ServerConfigDatabase(BaseModel): # these are optional depending on the connector, # handled by the connection_validation validator below. filename: Optional[str] username: Optional[str] password: Optional[str] database_name: Optional[str] host: Optional[str] connector: Literal["sqlite", "postgres"] @validator("connector") def connector_validation(cls, value, values): if value == "sqlite": filename = values.get("filename") assert filename not in [ None, "", ], "filename required when using the sqlite connector" if value == "postgres": required_values = ["username", "password", "database_name", "host"] for reqd_value in required_values: assert ( values.get(reqd_value) is not None ), "username, password, filename, database_name, host required when using the postgres connector" return value class _ServerConfigMediaImages(BaseModel): quality: int = Field(..., ge=1, le=100) post_quality: int = Field(..., ge=1, le=100) storage_dir: str # max size cannot be negative. god knows what would happen if it was. # probably not much. but you definitely wouldn't be uploading any images. max_image_request_size_mb: float = Field(..., ge=0) class _ServerConfigMediaVideos(BaseModel): storage_dir: str class _ServerConfigMedia(BaseModel): images: _ServerConfigMediaImages videos: _ServerConfigMediaVideos class _ServerConfigAuthRegistration(BaseModel): enabled: bool approval_required: bool auto_approve_when_approval_disabled: bool class _ServerConfigAuthTotp(BaseModel): replay_prevention_enabled: bool issuer: str # it makes no sense for a time in the future to be < 0, # and would just cause issues. unconfirmed_expiry_time: int = Field(..., ge=0) class _ServerConfigAuth(BaseModel): registration: _ServerConfigAuthRegistration totp: _ServerConfigAuthTotp class _ServerConfigPosts(BaseModel): silent_fail_on_double_report: bool class _ServerConfigLegacyApiInterface(BaseModel): enable: bool image_pixel_ratio: int = Field(..., ge=0, le=MAX_PIXEL_RATIO) signup_enabled: bool deliver_full_post_images: bool report_legacy_version: bool enable_less_secure_password_change: bool provide_legacy_video_thumbnails: bool provide_incompatible_video_thumbnail_text_overlay: bool class ServerConfig(BaseModel): network: _ServerConfigNetwork misc: _ServerConfigMisc database: _ServerConfigDatabase media: _ServerConfigMedia auth: _ServerConfigAuth posts: _ServerConfigPosts legacy_api_interface: _ServerConfigLegacyApiInterface
StarcoderdataPython
39329
<reponame>wdobbels/CAAPR<gh_stars>1-10 #!/usr/bin/env python # -*- coding: utf8 -*- # ***************************************************************** # ** PTS -- Python Toolkit for working with SKIRT ** # ** © Astronomical Observatory, Ghent University ** # ***************************************************************** ## \package pts.magic.animation.scatter Contains the ScatterAnimation class. # ----------------------------------------------------------------- # Import standard modules import io import numpy as np import copy import imageio # Import the relevant PTS classes and modules from ...core.basics.animation import Animation from ...core.plot.scatter import ScatterPlotter # ----------------------------------------------------------------- class ScatterAnimation(Animation): """ This class ... """ def __init__(self, x_limits, y_limits, z_limits): """ The constructor ... """ # Call the constructor of the base class super(ScatterAnimation, self).__init__() # Set the number of frames per second self.fps = 5 # Properties self.x_limits = x_limits self.y_limits = y_limits self.z_limits = z_limits self.x_label = None self.y_label = None self.z_label = None self.density = True # The plotter self._plotter = ScatterPlotter() # ----------------------------------------------------------------- def add_point(self, x, y, z): """ This function ... :return: """ # Add a point to the plotter self._plotter.add_point(x, y, z) buf = io.BytesIO() self._plotter.set_x_limits(self.x_limits[0], self.x_limits[1]) self._plotter.set_y_limits(self.y_limits[0], self.y_limits[1]) self._plotter.set_z_limits(self.z_limits[0], self.z_limits[1]) if self.x_label is not None: self._plotter.set_x_label(self.x_label) if self.y_label is not None: self._plotter.set_y_label(self.y_label) if self.z_label is not None: self._plotter.set_z_label(self.z_label) self._plotter.format = "png" self._plotter.density = self.density # Run the scatter plotter self._plotter.run(buf) buf.seek(0) im = imageio.imread(buf) buf.close() self.add_frame(im) # Clear the scatter plotter self._plotter.clear_figure() # -----------------------------------------------------------------
StarcoderdataPython
173220
import automox_console_sdk as automox from automox_console_sdk.api import DevicesApi from automox_console_sdk.api import GroupsApi from automox_console_sdk.models import ServersIdBody, ServerGroupCreateOrUpdateRequest from getpass import getpass import ldap from ldap.controls import SimplePagedResultsControl import re import os def map_automox_devices(d_api): hostname_map, ip_map = {}, {} page = 0 while True: devices_page = d_api.get_devices(o=org_id, limit=500, page=page) # All devices retrieved once no more are returned if len(devices_page) == 0: break for d in devices_page: hostname_map[d.name.lower()] = d # Iterate IP address for ip in d.ip_addrs: ip_map[ip] = d page += 1 return hostname_map, ip_map def get_automox_groups(g_api): group_list = [] default_group_id = 0 page = 0 while True: groups_page = g_api.get_server_groups(o=org_id, limit=500, page=page) if len(groups_page) == 0: break for g in groups_page: if not g.name: default_group_id = g.id group_list.append(g) page += 1 return group_list, default_group_id def get_ou_from_dn(dn): exploded_dn = ldap.dn.explode_dn(dn, flags=ldap.DN_FORMAT_LDAPV2) parent_ou = ','.join(exploded_dn[1:]) return(parent_ou) if __name__ == '__main__': # Prompt for inputs # API Key api_key = os.getenv('AUTOMOX_API_KEY') or getpass("Enter your API Key: ") # Org ID org_id = int(os.getenv('AUTOMOX_ORGANIZATION_ID') or input("Enter your Organization ID: ")) # LDAP/AD Details ldap_url = os.getenv('LDAP_URL') or input("Enter your LDAP Bind URL (eg ldap://domain-controller:389): ") ldap_user = os.getenv('LDAP_USER') or input("Enter your LDAP Bind User: ") ldap_password = os.getenv('LDAP_PASSWORD') or getpass("Enter your LDAP Bind Password: ") ldap_base = os.getenv('LDAP_BASE') or input("Enter your LDAP base for computers (eg dc=example, dc=com): ") # Computer query/filter ldap_computer_filter = os.getenv('LDAP_COMPUTER_FILTER') or \ input("Enter your LDAP Computer Filter (default: (&(objectClass=computer))") or \ "(&(objectClass=computer))" # Attribute for hostname comparison hn_attribute = input("Enter the LDAP/AD computer field for hostname comparison (default: name): ") or "name" # Attribute for FQDN Address comparison fqdn_attribute = input("Enter the LDAP/AD computer attribute for FQDN comparison (default: dnsHostName): ") or "dnsHostName" # Attributes for tagging tag_attributes = input("Enter a comma separated list of LDAP/AD computer attributes used for tagging devices: ") tag_attributes = tag_attributes.split(',') tag_prefix = 'AD-' ldap_attributes = [hn_attribute, fqdn_attribute] + tag_attributes counter_created_groups, counter_matched_devices, counter_unmatched_devices = 0, 0, 0 config = automox.Configuration() client = automox.ApiClient(configuration=config) client.default_headers['Authorization'] = f"Bearer {api_key}" devices_api = DevicesApi(client) groups_api = GroupsApi(client) # Add devices to dict for quick lookup by ip/hostname device_hostname_map, device_ip_map = map_automox_devices(devices_api) # Get all groups groups, default_server_group_id = get_automox_groups(groups_api) # Pull computers from LDAP/Active Directory connect = ldap.initialize(ldap_url) try: connect.protocol_version = ldap.VERSION3 connect.set_option(ldap.OPT_REFERRALS, 0) bind = connect.simple_bind_s(ldap_user, ldap_password) except Exception as e: exit(f"Failed to connect to {ldap_url}: {e}") page_control = SimplePagedResultsControl(True, size=1, cookie='') while True: # Page LDAP Query response = connect.search_ext(base=ldap_base, scope=ldap.SCOPE_SUBTREE, filterstr=ldap_computer_filter, attrlist=ldap_attributes, serverctrls=[page_control]) rtype, ldap_devices, rmsgid, serverctrls = connect.result3(response) # Process devices returned by LDAP/AD for device_dn, d in ldap_devices: if device_dn is None: continue device = None try: device_hostname = d[hn_attribute][0].decode("utf-8").lower() except Exception: device_hostname = None try: device_fqdn = d.get(fqdn_attribute)[0].decode("utf-8") except Exception: device_fqdn = None # Check by hostname first if device_hostname and device_hostname in device_hostname_map: device = device_hostname_map.get(device_hostname, None) # Check for device by ip as fallback elif device_fqdn and device_fqdn in device_ip_map: device = device_ip_map.get(device_fqdn, None) # Device found, update group based on ServiceNow field value if device: # Pull group value based on DN of computer group_value = get_ou_from_dn(device_dn) # Trim to max group name limit group_value = group_value[:44] group = next((g for g in groups if g.name == group_value), None) # Gather current device tags not prefixed with AD- tags = set() for t in device.tags: if not t.startswith(tag_prefix): tags.add(t) managed_tags = set() for ta in tag_attributes: tag_value = d.get(ta) if tag_value is not None: managed_tags.add(f"{tag_prefix}-{ta}-{tag_value[0].decode('utf-8')}") tags.update(managed_tags) # Create group if it doesn't exist yet if group is None: g = ServerGroupCreateOrUpdateRequest(name=group_value, refresh_interval=1440, parent_server_group_id=default_server_group_id, ui_color="#FFFF00") try: group = groups_api.create_server_group(o=org_id, body=g) groups.append(group) print(f"Successfully created Group ID {group.id} ({group_value})") counter_created_groups += 1 except Exception as e: print(f"Failed to created group [{group_value}]: {e}") server_update = ServersIdBody(server_group_id=group.id, exception=device.exception, tags=list(tags)) try: update_result = devices_api.update_device(o=org_id, id=device.id, body=server_update) print(f"Successfully updated Device ID {device.id} ({device_hostname}, {device_fqdn})") counter_matched_devices += 1 except Exception as e: print(f"Failed to update Device ID {device.id} ({device_hostname}, {device_fqdn}): {e}") else: #print(f"No device matching hostname [{device_hostname}] or fqdn [{device_fqdn}]") counter_unmatched_devices += 1 # Should we page again controls = [control for control in serverctrls if control.controlType == SimplePagedResultsControl.controlType] if not controls: print('The server ignores RFC 2696 control') break if not controls[0].cookie: break page_control.cookie = controls[0].cookie print(f"Script complete; matched devices: {counter_matched_devices}, unmatched devices: " f"{counter_unmatched_devices}, groups created: {counter_created_groups}")
StarcoderdataPython
1797749
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'gui.ui' # # Created by: PyQt4 UI code generator 4.11.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName(_fromUtf8("MainWindow")) MainWindow.setWindowModality(QtCore.Qt.ApplicationModal) MainWindow.resize(466, 700) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth()) MainWindow.setSizePolicy(sizePolicy) MainWindow.setMinimumSize(QtCore.QSize(466, 700)) MainWindow.setMaximumSize(QtCore.QSize(466, 700)) MainWindow.setBaseSize(QtCore.QSize(0, 0)) self.centralwidget = QtGui.QWidget(MainWindow) self.centralwidget.setObjectName(_fromUtf8("centralwidget")) self.layoutWidget = QtGui.QWidget(self.centralwidget) self.layoutWidget.setGeometry(QtCore.QRect(10, 10, 489, 710)) self.layoutWidget.setObjectName(_fromUtf8("layoutWidget")) self.verticalLayout = QtGui.QVBoxLayout(self.layoutWidget) self.verticalLayout.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.verticalLayout.setSpacing(0) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.gr_input = QtGui.QGroupBox(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.gr_input.sizePolicy().hasHeightForWidth()) self.gr_input.setSizePolicy(sizePolicy) self.gr_input.setMinimumSize(QtCore.QSize(430, 0)) self.gr_input.setMaximumSize(QtCore.QSize(430, 16777215)) self.gr_input.setObjectName(_fromUtf8("gr_input")) self.verticalLayout_7 = QtGui.QVBoxLayout(self.gr_input) self.verticalLayout_7.setSpacing(0) self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7")) self.horizontalLayout_4 = QtGui.QHBoxLayout() self.horizontalLayout_4.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_4.setSpacing(0) self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4")) self.lb_run = QtGui.QLabel(self.gr_input) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_run.sizePolicy().hasHeightForWidth()) self.lb_run.setSizePolicy(sizePolicy) self.lb_run.setMinimumSize(QtCore.QSize(40, 0)) self.lb_run.setMaximumSize(QtCore.QSize(40, 24)) self.lb_run.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_run.setObjectName(_fromUtf8("lb_run")) self.horizontalLayout_4.addWidget(self.lb_run) self.le_run = QtGui.QLineEdit(self.gr_input) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_run.sizePolicy().hasHeightForWidth()) self.le_run.setSizePolicy(sizePolicy) self.le_run.setMinimumSize(QtCore.QSize(280, 0)) self.le_run.setMaximumSize(QtCore.QSize(16777215, 24)) self.le_run.setObjectName(_fromUtf8("le_run")) self.horizontalLayout_4.addWidget(self.le_run) self.pb_run = QtGui.QPushButton(self.gr_input) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.pb_run.sizePolicy().hasHeightForWidth()) self.pb_run.setSizePolicy(sizePolicy) self.pb_run.setMaximumSize(QtCore.QSize(16777215, 24)) self.pb_run.setObjectName(_fromUtf8("pb_run")) self.horizontalLayout_4.addWidget(self.pb_run) self.verticalLayout_7.addLayout(self.horizontalLayout_4) self.horizontalLayout_2 = QtGui.QHBoxLayout() self.horizontalLayout_2.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_2.setSpacing(0) self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2")) self.lb_map = QtGui.QLabel(self.gr_input) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_map.sizePolicy().hasHeightForWidth()) self.lb_map.setSizePolicy(sizePolicy) self.lb_map.setMinimumSize(QtCore.QSize(40, 0)) self.lb_map.setMaximumSize(QtCore.QSize(40, 24)) self.lb_map.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_map.setObjectName(_fromUtf8("lb_map")) self.horizontalLayout_2.addWidget(self.lb_map) self.le_map = QtGui.QLineEdit(self.gr_input) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_map.sizePolicy().hasHeightForWidth()) self.le_map.setSizePolicy(sizePolicy) self.le_map.setMinimumSize(QtCore.QSize(280, 0)) self.le_map.setMaximumSize(QtCore.QSize(16777215, 24)) self.le_map.setObjectName(_fromUtf8("le_map")) self.horizontalLayout_2.addWidget(self.le_map) self.pb_map = QtGui.QPushButton(self.gr_input) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.pb_map.sizePolicy().hasHeightForWidth()) self.pb_map.setSizePolicy(sizePolicy) self.pb_map.setMaximumSize(QtCore.QSize(16777215, 24)) self.pb_map.setObjectName(_fromUtf8("pb_map")) self.horizontalLayout_2.addWidget(self.pb_map) self.verticalLayout_7.addLayout(self.horizontalLayout_2) self.verticalLayout.addWidget(self.gr_input) self.gr_stage = QtGui.QGroupBox(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.gr_stage.sizePolicy().hasHeightForWidth()) self.gr_stage.setSizePolicy(sizePolicy) self.gr_stage.setMinimumSize(QtCore.QSize(440, 0)) self.gr_stage.setMaximumSize(QtCore.QSize(440, 16777215)) self.gr_stage.setObjectName(_fromUtf8("gr_stage")) self.horizontalLayout_16 = QtGui.QHBoxLayout(self.gr_stage) self.horizontalLayout_16.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_16.setSpacing(0) self.horizontalLayout_16.setObjectName(_fromUtf8("horizontalLayout_16")) self.tb_align = QtGui.QTabWidget(self.gr_stage) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.tb_align.sizePolicy().hasHeightForWidth()) self.tb_align.setSizePolicy(sizePolicy) self.tb_align.setMinimumSize(QtCore.QSize(220, 190)) self.tb_align.setMaximumSize(QtCore.QSize(220, 190)) self.tb_align.setBaseSize(QtCore.QSize(0, 0)) self.tb_align.setTabPosition(QtGui.QTabWidget.West) self.tb_align.setObjectName(_fromUtf8("tb_align")) self.tab_print = QtGui.QWidget() self.tab_print.setObjectName(_fromUtf8("tab_print")) self.lb_printpix = QtGui.QLabel(self.tab_print) self.lb_printpix.setGeometry(QtCore.QRect(0, 0, 191, 181)) self.lb_printpix.setText(_fromUtf8("")) self.lb_printpix.setPixmap( QtGui.QPixmap(_fromUtf8(":/alignment/alignprint.png")) ) self.lb_printpix.setScaledContents(False) self.lb_printpix.setAlignment(QtCore.Qt.AlignCenter) self.lb_printpix.setObjectName(_fromUtf8("lb_printpix")) self.tb_align.addTab(self.tab_print, _fromUtf8("")) self.tab_wafer = QtGui.QWidget() self.tab_wafer.setObjectName(_fromUtf8("tab_wafer")) self.lb_waferpix = QtGui.QLabel(self.tab_wafer) self.lb_waferpix.setGeometry(QtCore.QRect(0, 0, 191, 181)) self.lb_waferpix.setText(_fromUtf8("")) self.lb_waferpix.setPixmap( QtGui.QPixmap(_fromUtf8(":/alignment/alignwafer.png")) ) self.lb_waferpix.setScaledContents(False) self.lb_waferpix.setAlignment(QtCore.Qt.AlignCenter) self.lb_waferpix.setObjectName(_fromUtf8("lb_waferpix")) self.tb_align.addTab(self.tab_wafer, _fromUtf8("")) self.horizontalLayout_16.addWidget(self.tb_align) spacerItem = QtGui.QSpacerItem( 8, 20, QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Minimum ) self.horizontalLayout_16.addItem(spacerItem) self.verticalLayout_8 = QtGui.QVBoxLayout() self.verticalLayout_8.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.verticalLayout_8.setSpacing(0) self.verticalLayout_8.setObjectName(_fromUtf8("verticalLayout_8")) self.lb_stagcoord = QtGui.QLabel(self.gr_stage) self.lb_stagcoord.setObjectName(_fromUtf8("lb_stagcoord")) self.verticalLayout_8.addWidget(self.lb_stagcoord) spacerItem1 = QtGui.QSpacerItem( 20, 8, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed ) self.verticalLayout_8.addItem(spacerItem1) self.tw_stage = QtGui.QTableWidget(self.gr_stage) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.tw_stage.sizePolicy().hasHeightForWidth()) self.tw_stage.setSizePolicy(sizePolicy) self.tw_stage.setMaximumSize(QtCore.QSize(180, 97)) self.tw_stage.setObjectName(_fromUtf8("tw_stage")) self.tw_stage.setColumnCount(3) self.tw_stage.setRowCount(3) item = QtGui.QTableWidgetItem() self.tw_stage.setVerticalHeaderItem(0, item) item = QtGui.QTableWidgetItem() self.tw_stage.setVerticalHeaderItem(1, item) item = QtGui.QTableWidgetItem() self.tw_stage.setVerticalHeaderItem(2, item) item = QtGui.QTableWidgetItem() self.tw_stage.setHorizontalHeaderItem(0, item) item = QtGui.QTableWidgetItem() self.tw_stage.setHorizontalHeaderItem(1, item) item = QtGui.QTableWidgetItem() self.tw_stage.setHorizontalHeaderItem(2, item) item = QtGui.QTableWidgetItem() self.tw_stage.setItem(0, 0, item) self.tw_stage.horizontalHeader().setDefaultSectionSize(54) self.tw_stage.horizontalHeader().setMinimumSectionSize(54) self.tw_stage.verticalHeader().setVisible(True) self.tw_stage.verticalHeader().setDefaultSectionSize(24) self.tw_stage.verticalHeader().setMinimumSectionSize(24) self.verticalLayout_8.addWidget(self.tw_stage) spacerItem2 = QtGui.QSpacerItem( 20, 8, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed ) self.verticalLayout_8.addItem(spacerItem2) self.horizontalLayout_15 = QtGui.QHBoxLayout() self.horizontalLayout_15.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_15.setSpacing(0) self.horizontalLayout_15.setObjectName(_fromUtf8("horizontalLayout_15")) self.lb_stagz = QtGui.QLabel(self.gr_stage) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_stagz.sizePolicy().hasHeightForWidth()) self.lb_stagz.setSizePolicy(sizePolicy) self.lb_stagz.setMinimumSize(QtCore.QSize(32, 0)) self.lb_stagz.setMaximumSize(QtCore.QSize(32, 24)) self.lb_stagz.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_stagz.setObjectName(_fromUtf8("lb_stagz")) self.horizontalLayout_15.addWidget(self.lb_stagz) self.le_stagz = QtGui.QLineEdit(self.gr_stage) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_stagz.sizePolicy().hasHeightForWidth()) self.le_stagz.setSizePolicy(sizePolicy) self.le_stagz.setMinimumSize(QtCore.QSize(54, 24)) self.le_stagz.setMaximumSize(QtCore.QSize(54, 24)) self.le_stagz.setObjectName(_fromUtf8("le_stagz")) self.horizontalLayout_15.addWidget(self.le_stagz) spacerItem3 = QtGui.QSpacerItem( 18, 20, QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Minimum ) self.horizontalLayout_15.addItem(spacerItem3) self.cb_rotonly = QtGui.QCheckBox(self.gr_stage) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.cb_rotonly.sizePolicy().hasHeightForWidth()) self.cb_rotonly.setSizePolicy(sizePolicy) self.cb_rotonly.setMinimumSize(QtCore.QSize(78, 0)) self.cb_rotonly.setMaximumSize(QtCore.QSize(78, 16777215)) self.cb_rotonly.setObjectName(_fromUtf8("cb_rotonly")) self.horizontalLayout_15.addWidget(self.cb_rotonly) self.verticalLayout_8.addLayout(self.horizontalLayout_15) spacerItem4 = QtGui.QSpacerItem( 20, 8, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed ) self.verticalLayout_8.addItem(spacerItem4) self.le_waferignore = QtGui.QLabel(self.gr_stage) self.le_waferignore.setTextFormat(QtCore.Qt.RichText) self.le_waferignore.setObjectName(_fromUtf8("le_waferignore")) self.verticalLayout_8.addWidget(self.le_waferignore) self.lb_rotonly = QtGui.QLabel(self.gr_stage) self.lb_rotonly.setTextFormat(QtCore.Qt.RichText) self.lb_rotonly.setObjectName(_fromUtf8("lb_rotonly")) self.verticalLayout_8.addWidget(self.lb_rotonly) self.horizontalLayout_16.addLayout(self.verticalLayout_8) self.verticalLayout.addWidget(self.gr_stage) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout.setSpacing(0) self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) self.gr_filter = QtGui.QGroupBox(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.gr_filter.sizePolicy().hasHeightForWidth()) self.gr_filter.setSizePolicy(sizePolicy) self.gr_filter.setMinimumSize(QtCore.QSize(220, 90)) self.gr_filter.setMaximumSize(QtCore.QSize(220, 90)) self.gr_filter.setObjectName(_fromUtf8("gr_filter")) self.verticalLayout_2 = QtGui.QVBoxLayout(self.gr_filter) self.verticalLayout_2.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.verticalLayout_2.setSpacing(0) self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2")) self.horizontalLayout_5 = QtGui.QHBoxLayout() self.horizontalLayout_5.setSpacing(0) self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5")) self.lb_keepcode = QtGui.QLabel(self.gr_filter) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_keepcode.sizePolicy().hasHeightForWidth()) self.lb_keepcode.setSizePolicy(sizePolicy) self.lb_keepcode.setMinimumSize(QtCore.QSize(60, 0)) self.lb_keepcode.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_keepcode.setObjectName(_fromUtf8("lb_keepcode")) self.horizontalLayout_5.addWidget(self.lb_keepcode) self.le_keepcode = QtGui.QLineEdit(self.gr_filter) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_keepcode.sizePolicy().hasHeightForWidth()) self.le_keepcode.setSizePolicy(sizePolicy) self.le_keepcode.setMinimumSize(QtCore.QSize(103, 24)) self.le_keepcode.setMaximumSize(QtCore.QSize(130, 24)) self.le_keepcode.setObjectName(_fromUtf8("le_keepcode")) self.horizontalLayout_5.addWidget(self.le_keepcode) self.verticalLayout_2.addLayout(self.horizontalLayout_5) self.horizontalLayout_7 = QtGui.QHBoxLayout() self.horizontalLayout_7.setSpacing(0) self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7")) self.lb_omitch = QtGui.QLabel(self.gr_filter) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_omitch.sizePolicy().hasHeightForWidth()) self.lb_omitch.setSizePolicy(sizePolicy) self.lb_omitch.setMinimumSize(QtCore.QSize(60, 0)) self.lb_omitch.setMaximumSize(QtCore.QSize(60, 16777215)) self.lb_omitch.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_omitch.setObjectName(_fromUtf8("lb_omitch")) self.horizontalLayout_7.addWidget(self.lb_omitch) self.le_omitch = QtGui.QLineEdit(self.gr_filter) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_omitch.sizePolicy().hasHeightForWidth()) self.le_omitch.setSizePolicy(sizePolicy) self.le_omitch.setMinimumSize(QtCore.QSize(130, 24)) self.le_omitch.setMaximumSize(QtCore.QSize(130, 24)) self.le_omitch.setObjectName(_fromUtf8("le_omitch")) self.horizontalLayout_7.addWidget(self.le_omitch) self.verticalLayout_2.addLayout(self.horizontalLayout_7) self.horizontalLayout.addWidget(self.gr_filter) self.gr_platecoord = QtGui.QGroupBox(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.gr_platecoord.sizePolicy().hasHeightForWidth() ) self.gr_platecoord.setSizePolicy(sizePolicy) self.gr_platecoord.setMinimumSize(QtCore.QSize(220, 90)) self.gr_platecoord.setMaximumSize(QtCore.QSize(220, 90)) self.gr_platecoord.setObjectName(_fromUtf8("gr_platecoord")) self.verticalLayout_4 = QtGui.QVBoxLayout(self.gr_platecoord) self.verticalLayout_4.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.verticalLayout_4.setSpacing(0) self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4")) self.horizontalLayout_11 = QtGui.QHBoxLayout() self.horizontalLayout_11.setSpacing(0) self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11")) self.lb_xmin = QtGui.QLabel(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_xmin.sizePolicy().hasHeightForWidth()) self.lb_xmin.setSizePolicy(sizePolicy) self.lb_xmin.setMinimumSize(QtCore.QSize(42, 0)) self.lb_xmin.setMaximumSize(QtCore.QSize(42, 16777215)) self.lb_xmin.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_xmin.setObjectName(_fromUtf8("lb_xmin")) self.horizontalLayout_11.addWidget(self.lb_xmin) self.le_xmin = QtGui.QLineEdit(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_xmin.sizePolicy().hasHeightForWidth()) self.le_xmin.setSizePolicy(sizePolicy) self.le_xmin.setMinimumSize(QtCore.QSize(58, 24)) self.le_xmin.setMaximumSize(QtCore.QSize(58, 24)) self.le_xmin.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) self.le_xmin.setObjectName(_fromUtf8("le_xmin")) self.horizontalLayout_11.addWidget(self.le_xmin) self.lb_xmax = QtGui.QLabel(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_xmax.sizePolicy().hasHeightForWidth()) self.lb_xmax.setSizePolicy(sizePolicy) self.lb_xmax.setMinimumSize(QtCore.QSize(42, 0)) self.lb_xmax.setMaximumSize(QtCore.QSize(42, 16777215)) self.lb_xmax.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_xmax.setObjectName(_fromUtf8("lb_xmax")) self.horizontalLayout_11.addWidget(self.lb_xmax) self.le_xmax = QtGui.QLineEdit(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_xmax.sizePolicy().hasHeightForWidth()) self.le_xmax.setSizePolicy(sizePolicy) self.le_xmax.setMinimumSize(QtCore.QSize(58, 24)) self.le_xmax.setMaximumSize(QtCore.QSize(58, 24)) self.le_xmax.setObjectName(_fromUtf8("le_xmax")) self.horizontalLayout_11.addWidget(self.le_xmax) self.verticalLayout_4.addLayout(self.horizontalLayout_11) self.horizontalLayout_6 = QtGui.QHBoxLayout() self.horizontalLayout_6.setSpacing(0) self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6")) self.lb_ymin = QtGui.QLabel(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_ymin.sizePolicy().hasHeightForWidth()) self.lb_ymin.setSizePolicy(sizePolicy) self.lb_ymin.setMinimumSize(QtCore.QSize(42, 0)) self.lb_ymin.setMaximumSize(QtCore.QSize(42, 16777215)) self.lb_ymin.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_ymin.setObjectName(_fromUtf8("lb_ymin")) self.horizontalLayout_6.addWidget(self.lb_ymin) self.le_ymin = QtGui.QLineEdit(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_ymin.sizePolicy().hasHeightForWidth()) self.le_ymin.setSizePolicy(sizePolicy) self.le_ymin.setMinimumSize(QtCore.QSize(58, 24)) self.le_ymin.setMaximumSize(QtCore.QSize(58, 24)) self.le_ymin.setObjectName(_fromUtf8("le_ymin")) self.horizontalLayout_6.addWidget(self.le_ymin) self.lb_ymax = QtGui.QLabel(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_ymax.sizePolicy().hasHeightForWidth()) self.lb_ymax.setSizePolicy(sizePolicy) self.lb_ymax.setMinimumSize(QtCore.QSize(42, 0)) self.lb_ymax.setMaximumSize(QtCore.QSize(42, 16777215)) self.lb_ymax.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_ymax.setObjectName(_fromUtf8("lb_ymax")) self.horizontalLayout_6.addWidget(self.lb_ymax) self.le_ymax = QtGui.QLineEdit(self.gr_platecoord) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_ymax.sizePolicy().hasHeightForWidth()) self.le_ymax.setSizePolicy(sizePolicy) self.le_ymax.setMinimumSize(QtCore.QSize(58, 24)) self.le_ymax.setMaximumSize(QtCore.QSize(58, 24)) self.le_ymax.setObjectName(_fromUtf8("le_ymax")) self.horizontalLayout_6.addWidget(self.le_ymax) self.verticalLayout_4.addLayout(self.horizontalLayout_6) self.horizontalLayout.addWidget(self.gr_platecoord) self.verticalLayout.addLayout(self.horizontalLayout) self.horizontalLayout_12 = QtGui.QHBoxLayout() self.horizontalLayout_12.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_12.setSpacing(0) self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12")) self.gr_skip = QtGui.QGroupBox(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.gr_skip.sizePolicy().hasHeightForWidth()) self.gr_skip.setSizePolicy(sizePolicy) self.gr_skip.setMinimumSize(QtCore.QSize(200, 150)) self.gr_skip.setMaximumSize(QtCore.QSize(200, 150)) self.gr_skip.setObjectName(_fromUtf8("gr_skip")) self.verticalLayout_5 = QtGui.QVBoxLayout(self.gr_skip) self.verticalLayout_5.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.verticalLayout_5.setSpacing(0) self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5")) self.horizontalLayout_9 = QtGui.QHBoxLayout() self.horizontalLayout_9.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_9.setSpacing(0) self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9")) self.le_sampleskip = QtGui.QLineEdit(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.le_sampleskip.sizePolicy().hasHeightForWidth() ) self.le_sampleskip.setSizePolicy(sizePolicy) self.le_sampleskip.setMinimumSize(QtCore.QSize(60, 24)) self.le_sampleskip.setMaximumSize(QtCore.QSize(60, 24)) self.le_sampleskip.setObjectName(_fromUtf8("le_sampleskip")) self.horizontalLayout_9.addWidget(self.le_sampleskip) self.lb_sampleskip = QtGui.QLabel(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.lb_sampleskip.sizePolicy().hasHeightForWidth() ) self.lb_sampleskip.setSizePolicy(sizePolicy) self.lb_sampleskip.setMinimumSize(QtCore.QSize(50, 0)) self.lb_sampleskip.setAlignment( QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter ) self.lb_sampleskip.setObjectName(_fromUtf8("lb_sampleskip")) self.horizontalLayout_9.addWidget(self.lb_sampleskip) self.verticalLayout_5.addLayout(self.horizontalLayout_9) self.horizontalLayout_8 = QtGui.QHBoxLayout() self.horizontalLayout_8.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_8.setSpacing(0) self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8")) self.le_colskip = QtGui.QLineEdit(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_colskip.sizePolicy().hasHeightForWidth()) self.le_colskip.setSizePolicy(sizePolicy) self.le_colskip.setMinimumSize(QtCore.QSize(60, 24)) self.le_colskip.setMaximumSize(QtCore.QSize(60, 24)) self.le_colskip.setObjectName(_fromUtf8("le_colskip")) self.horizontalLayout_8.addWidget(self.le_colskip) self.lb_colskip = QtGui.QLabel(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_colskip.sizePolicy().hasHeightForWidth()) self.lb_colskip.setSizePolicy(sizePolicy) self.lb_colskip.setMinimumSize(QtCore.QSize(50, 0)) self.lb_colskip.setAlignment( QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter ) self.lb_colskip.setObjectName(_fromUtf8("lb_colskip")) self.horizontalLayout_8.addWidget(self.lb_colskip) self.verticalLayout_5.addLayout(self.horizontalLayout_8) self.horizontalLayout_14 = QtGui.QHBoxLayout() self.horizontalLayout_14.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_14.setSpacing(0) self.horizontalLayout_14.setObjectName(_fromUtf8("horizontalLayout_14")) self.le_rowskip = QtGui.QLineEdit(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_rowskip.sizePolicy().hasHeightForWidth()) self.le_rowskip.setSizePolicy(sizePolicy) self.le_rowskip.setMinimumSize(QtCore.QSize(60, 24)) self.le_rowskip.setMaximumSize(QtCore.QSize(60, 24)) self.le_rowskip.setObjectName(_fromUtf8("le_rowskip")) self.horizontalLayout_14.addWidget(self.le_rowskip) self.lb_rowskip = QtGui.QLabel(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_rowskip.sizePolicy().hasHeightForWidth()) self.lb_rowskip.setSizePolicy(sizePolicy) self.lb_rowskip.setMinimumSize(QtCore.QSize(50, 0)) self.lb_rowskip.setAlignment( QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter ) self.lb_rowskip.setObjectName(_fromUtf8("lb_rowskip")) self.horizontalLayout_14.addWidget(self.lb_rowskip) self.verticalLayout_5.addLayout(self.horizontalLayout_14) self.horizontalLayout_10 = QtGui.QHBoxLayout() self.horizontalLayout_10.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout_10.setSpacing(0) self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10")) self.le_atskip = QtGui.QLineEdit(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_atskip.sizePolicy().hasHeightForWidth()) self.le_atskip.setSizePolicy(sizePolicy) self.le_atskip.setMinimumSize(QtCore.QSize(60, 24)) self.le_atskip.setMaximumSize(QtCore.QSize(60, 24)) self.le_atskip.setObjectName(_fromUtf8("le_atskip")) self.horizontalLayout_10.addWidget(self.le_atskip) self.lb_atskip = QtGui.QLabel(self.gr_skip) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_atskip.sizePolicy().hasHeightForWidth()) self.lb_atskip.setSizePolicy(sizePolicy) self.lb_atskip.setMinimumSize(QtCore.QSize(50, 0)) self.lb_atskip.setAlignment( QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter ) self.lb_atskip.setObjectName(_fromUtf8("lb_atskip")) self.horizontalLayout_10.addWidget(self.lb_atskip) self.verticalLayout_5.addLayout(self.horizontalLayout_10) self.horizontalLayout_12.addWidget(self.gr_skip) self.gr_sample = QtGui.QGroupBox(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.gr_sample.sizePolicy().hasHeightForWidth()) self.gr_sample.setSizePolicy(sizePolicy) self.gr_sample.setMinimumSize(QtCore.QSize(285, 136)) self.gr_sample.setMaximumSize(QtCore.QSize(285, 136)) self.gr_sample.setObjectName(_fromUtf8("gr_sample")) self.verticalLayout_6 = QtGui.QVBoxLayout(self.gr_sample) self.verticalLayout_6.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.verticalLayout_6.setSpacing(0) self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6")) self.verticalLayout_3 = QtGui.QVBoxLayout() self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3")) self.te_samplelist = QtGui.QTextEdit(self.gr_sample) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.te_samplelist.sizePolicy().hasHeightForWidth() ) self.te_samplelist.setSizePolicy(sizePolicy) self.te_samplelist.setMinimumSize(QtCore.QSize(280, 68)) self.te_samplelist.setMaximumSize(QtCore.QSize(280, 68)) self.te_samplelist.setObjectName(_fromUtf8("te_samplelist")) self.verticalLayout_3.addWidget(self.te_samplelist) spacerItem5 = QtGui.QSpacerItem( 20, 4, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed ) self.verticalLayout_3.addItem(spacerItem5) self.horizontalLayout_13 = QtGui.QHBoxLayout() self.horizontalLayout_13.setSpacing(0) self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13")) self.lb_samplemin = QtGui.QLabel(self.gr_sample) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_samplemin.sizePolicy().hasHeightForWidth()) self.lb_samplemin.setSizePolicy(sizePolicy) self.lb_samplemin.setMinimumSize(QtCore.QSize(80, 0)) self.lb_samplemin.setMaximumSize(QtCore.QSize(80, 16777215)) self.lb_samplemin.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_samplemin.setObjectName(_fromUtf8("lb_samplemin")) self.horizontalLayout_13.addWidget(self.lb_samplemin) self.le_samplemin = QtGui.QLineEdit(self.gr_sample) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_samplemin.sizePolicy().hasHeightForWidth()) self.le_samplemin.setSizePolicy(sizePolicy) self.le_samplemin.setMinimumSize(QtCore.QSize(60, 24)) self.le_samplemin.setMaximumSize(QtCore.QSize(60, 24)) self.le_samplemin.setObjectName(_fromUtf8("le_samplemin")) self.horizontalLayout_13.addWidget(self.le_samplemin) self.lb_samplemax = QtGui.QLabel(self.gr_sample) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.lb_samplemax.sizePolicy().hasHeightForWidth()) self.lb_samplemax.setSizePolicy(sizePolicy) self.lb_samplemax.setMinimumSize(QtCore.QSize(80, 0)) self.lb_samplemax.setMaximumSize(QtCore.QSize(80, 16777215)) self.lb_samplemax.setAlignment( QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter ) self.lb_samplemax.setObjectName(_fromUtf8("lb_samplemax")) self.horizontalLayout_13.addWidget(self.lb_samplemax) self.le_samplemax = QtGui.QLineEdit(self.gr_sample) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.le_samplemax.sizePolicy().hasHeightForWidth()) self.le_samplemax.setSizePolicy(sizePolicy) self.le_samplemax.setMinimumSize(QtCore.QSize(60, 24)) self.le_samplemax.setMaximumSize(QtCore.QSize(60, 24)) self.le_samplemax.setObjectName(_fromUtf8("le_samplemax")) self.horizontalLayout_13.addWidget(self.le_samplemax) self.verticalLayout_3.addLayout(self.horizontalLayout_13) self.verticalLayout_6.addLayout(self.verticalLayout_3) self.horizontalLayout_12.addWidget(self.gr_sample) self.verticalLayout.addLayout(self.horizontalLayout_12) spacerItem6 = QtGui.QSpacerItem( 20, 8, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed ) self.verticalLayout.addItem(spacerItem6) self.horizontalLayout_17 = QtGui.QHBoxLayout() self.horizontalLayout_17.setObjectName(_fromUtf8("horizontalLayout_17")) self.pb_preview = QtGui.QPushButton(self.layoutWidget) self.pb_preview.setObjectName(_fromUtf8("pb_preview")) self.horizontalLayout_17.addWidget(self.pb_preview) self.pb_alignsave = QtGui.QPushButton(self.layoutWidget) self.pb_alignsave.setObjectName(_fromUtf8("pb_alignsave")) self.horizontalLayout_17.addWidget(self.pb_alignsave) self.verticalLayout.addLayout(self.horizontalLayout_17) spacerItem7 = QtGui.QSpacerItem( 20, 8, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed ) self.verticalLayout.addItem(spacerItem7) self.br_outputlog = QtGui.QTextBrowser(self.layoutWidget) sizePolicy = QtGui.QSizePolicy( QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.br_outputlog.sizePolicy().hasHeightForWidth()) self.br_outputlog.setSizePolicy(sizePolicy) self.br_outputlog.setMaximumSize(QtCore.QSize(16777215, 110)) self.br_outputlog.setObjectName(_fromUtf8("br_outputlog")) self.verticalLayout.addWidget(self.br_outputlog) MainWindow.setCentralWidget(self.centralwidget) self.retranslateUi(MainWindow) self.tb_align.setCurrentIndex(0) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): MainWindow.setWindowTitle(_translate("MainWindow", "Orbis Map Alignment", None)) self.gr_input.setTitle(_translate("MainWindow", "Input:", None)) self.lb_run.setText(_translate("MainWindow", "Run", None)) self.pb_run.setText(_translate("MainWindow", "choose run", None)) self.lb_map.setText(_translate("MainWindow", "Map", None)) self.pb_map.setText(_translate("MainWindow", "choose map", None)) self.gr_stage.setTitle(_translate("MainWindow", "Stage Alignment:", None)) self.tb_align.setTabText( self.tb_align.indexOf(self.tab_print), _translate("MainWindow", "Print", None), ) self.tb_align.setTabText( self.tb_align.indexOf(self.tab_wafer), _translate("MainWindow", "Wafer", None), ) self.lb_stagcoord.setText( _translate("MainWindow", "Use stage x, y, z from Orbis Vision.", None) ) item = self.tw_stage.verticalHeaderItem(0) item.setText(_translate("MainWindow", "A", None)) item = self.tw_stage.verticalHeaderItem(1) item.setText(_translate("MainWindow", "B", None)) item = self.tw_stage.verticalHeaderItem(2) item.setText(_translate("MainWindow", "C", None)) item = self.tw_stage.horizontalHeaderItem(0) item.setText(_translate("MainWindow", "Sample *", None)) item = self.tw_stage.horizontalHeaderItem(1) item.setText(_translate("MainWindow", "StagX", None)) item = self.tw_stage.horizontalHeaderItem(2) item.setText(_translate("MainWindow", "StagY", None)) __sortingEnabled = self.tw_stage.isSortingEnabled() self.tw_stage.setSortingEnabled(False) self.tw_stage.setSortingEnabled(__sortingEnabled) self.lb_stagz.setText(_translate("MainWindow", "StagZ ", None)) self.cb_rotonly.setText(_translate("MainWindow", "Rot. only**", None)) self.le_waferignore.setText( _translate( "MainWindow", '<html><head/><body><p><span style=" font-size:7pt;">*Sample column ignored for wafer.</span></p></body></html>', None, ) ) self.lb_rotonly.setText( _translate( "MainWindow", '<html><head/><body><p><span style=" font-size:7pt;">**Use </span><span style=" font-size:7pt; text-decoration: overline;">BC</span><span style=" font-size:7pt;"> for rotation only, no skew.</span></p></body></html>', None, ) ) self.gr_filter.setTitle(_translate("MainWindow", "Filters:", None)) self.lb_keepcode.setText(_translate("MainWindow", "keep codes ", None)) self.lb_omitch.setText(_translate("MainWindow", "omit Ch's ", None)) self.gr_platecoord.setTitle( _translate("MainWindow", "Platemap Coordinate Limits:", None) ) self.lb_xmin.setText(_translate("MainWindow", "x_min ", None)) self.lb_xmax.setText(_translate("MainWindow", "x_max ", None)) self.lb_ymin.setText(_translate("MainWindow", "y_min ", None)) self.lb_ymax.setText(_translate("MainWindow", "y_max ", None)) self.gr_skip.setTitle(_translate("MainWindow", "Skip Every:", None)) self.lb_sampleskip.setText(_translate("MainWindow", " samples", None)) self.lb_colskip.setText(_translate("MainWindow", " columns", None)) self.lb_rowskip.setText(_translate("MainWindow", " rows", None)) self.lb_atskip.setText(_translate("MainWindow", " at.%", None)) self.gr_sample.setTitle(_translate("MainWindow", "Override Sample List:", None)) self.lb_samplemin.setText(_translate("MainWindow", "sample_min ", None)) self.lb_samplemax.setText(_translate("MainWindow", "sample_max ", None)) self.pb_preview.setText( _translate("MainWindow", "Preview output (rotation/skew/points)", None) ) self.pb_alignsave.setText( _translate("MainWindow", "Generate stage and sample list", None) ) self.br_outputlog.setHtml( _translate( "MainWindow", '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">\n' '<html><head><meta name="qrichtext" content="1" /><style type="text/css">\n' "p, li { white-space: pre-wrap; }\n" "</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n" '<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:8pt;">Ready.</span></p></body></html>', None, ) ) import alignprint_rc
StarcoderdataPython
3317239
from frictionless import describe_schema # General def test_describe_schema(): schema = describe_schema("data/leading-zeros.csv") assert schema == {"fields": [{"name": "value", "type": "integer"}]}
StarcoderdataPython
118551
<gh_stars>0 #import goods packages from goods.good import Good class Cleaning(Good): def __init__(self): pass
StarcoderdataPython
162450
<filename>lib/gstreamer/vaapi/vpp.py ### ### Copyright (C) 2018-2021 Intel Corporation ### ### SPDX-License-Identifier: BSD-3-Clause ### import os import slash from ....lib.gstreamer.vppbase import BaseVppTest from ....lib.gstreamer.util import have_gst_element from ....lib.gstreamer.vaapi.util import map_best_hw_format, mapformat, mapformatu from ....lib.common import get_media, mapRange, mapRangeWithDefault @slash.requires(*have_gst_element("vaapi")) @slash.requires(*have_gst_element("vaapipostproc")) class VppTest(BaseVppTest): def before(self): super().before() os.environ["GST_VAAPI_DRM_DEVICE"] = get_media().render_device vars(self).update(gstvpp = "vaapipostproc") def map_best_hw_format(self, format, hwformats): return map_best_hw_format(format, hwformats) def map_format(self, format): return mapformat(format) def map_formatu(self, format): return mapformatu(format) def gen_vpp_opts(self): opts = "" procamp = dict( brightness = [ -1.0, 0.0, 1.0], contrast = [ 0.0, 1.0, 2.0], hue = [-180.0, 0.0, 180.0], saturation = [ 0.0, 1.0, 2.0], ) if self.vpp_op in procamp: self.mlevel = mapRangeWithDefault( self.level, [0.0, 50.0, 100.0], procamp[self.vpp_op] ) opts += " {vpp_op}={mlevel}" elif self.vpp_op in ["denoise"]: self.mlevel = mapRange(self.level, [0, 100], [0.0, 1.0]) opts += " denoise={mlevel}" elif self.vpp_op in ["sharpen"]: self.mlevel = mapRange(self.level, [0, 100], [-1.0, 1.0]) opts += " sharpen={mlevel}" elif self.vpp_op in ["deinterlace"]: opts += " deinterlace-mode=1 deinterlace-method={mmethod}" elif self.vpp_op in ["transpose"]: opts += " video-direction={direction}" elif self.vpp_op in ["crop"]: opts += " crop-left={left} crop-right={right} crop-top={top} crop-bottom={bottom}" return opts
StarcoderdataPython
1741382
<gh_stars>1-10 """ Implements the TypeDefaultBounds NamedTuple, which holds information about an argument. """ from typing import NamedTuple, Any from . import bounds class TypeDefaultBounds(NamedTuple): """ NamedTuple representing the name, type, default value, and bounds of an argument. """ #: the name of the argument arg_name: str #: the type of the argument type_: type = None #: flag to indicate if there is a default value for the argument has_default: bool = False #: the value of the default argument default_value: Any = None #: an instance of the Bound class, representing the bounds of the argument bound_obj: bounds.Bounds = None
StarcoderdataPython
3282878
<reponame>ericchou1/network-devops-kafka-up-and-running<filename>chapter5/ch5_azure_publisher_clean.py # Example from https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-python-get-started-send import asyncio from azure.eventhub.aio import EventHubProducerClient from azure.eventhub import EventData async def run(): # Create a producer client to send messages to the event hub. # Specify a connection string to your event hubs namespace and # the event hub name. producer = EventHubProducerClient.from_connection_string(conn_str="Endpoint=sb://eventhub-test.servicebus.windows.net/;SharedAccessKeyName=manage-event-hub;SharedAccessKey=<key>;EntityPath=test-event", eventhub_name="test-event") async with producer: # Create a batch. event_data_batch = await producer.create_batch() # Add events to the batch. event_data_batch.add(EventData('First event ')) event_data_batch.add(EventData('Second event')) event_data_batch.add(EventData('Third event')) # Send the batch of events to the event hub. await producer.send_batch(event_data_batch) loop = asyncio.get_event_loop() loop.run_until_complete(run())
StarcoderdataPython