id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2,287,100 | databricks-sql-warehouse-unload.py | zhiweio_data-engineer-scripts/src/databricks-sql-warehouse-unload/databricks-sql-warehouse-unload.py | import concurrent
import functools
import itertools
import os
import shlex
import subprocess
import tempfile
import time
from collections import deque
from concurrent.futures import (
ThreadPoolExecutor,
ProcessPoolExecutor,
FIRST_COMPLETED,
wait,
)
from datetime import datetime
from enum import Enum
from io import BytesIO
from multiprocessing import cpu_count
from pathlib import Path
from typing import List, Tuple, Union
import pyarrow as pa
import pyarrow.parquet as pq
import redo
import requests
from databricks.sdk import WorkspaceClient
from databricks.sdk.errors import DatabricksError, NotFound
from databricks.sdk.service.sql import (
Disposition,
Format,
StatementState,
)
from loguru import logger
from requests.adapters import HTTPAdapter
from requests.exceptions import RequestException, ReadTimeout
LOG = logger
KB = 1024
MB = KB * KB
v_cores = cpu_count()
class ExpiredFileLinkError(RequestException):
""""""
class Suffix(str, Enum):
csv = ".csv"
parquet = ".parquet"
arrow_stream = ".arrow_stream"
json = ".json"
@redo.retriable(
attempts=5,
sleeptime=10,
max_sleeptime=300,
sleepscale=1,
retry_exceptions=(ReadTimeout,),
)
def _download_as_csv(file_url, savefile, http_session, timeout=30):
"""Query task writes data in 20 MB chunks using uncompressed CSV format
ref: https://www.databricks.com/blog/2021/08/11/how-we-achieved-high-bandwidth-connectivity-with-bi-tools.html
"""
try:
response = http_session.get(file_url, timeout=timeout)
if response.status_code == 403:
raise ExpiredFileLinkError(f"File link {file_url} expired")
if response.status_code != 200:
raise RequestException(
f"File link download error, {response.status_code}\t{response.content}"
)
with open(savefile, "wb") as bos:
bos.write(response.content)
except ReadTimeout as e:
LOG.warning(f"Download result {file_url} timeout, retrying...")
raise ReadTimeout(e)
except Exception as e:
LOG.error(f"Download result {file_url} error, {e}")
if "Read timed out" in str(e):
raise ReadTimeout(e)
raise e
def convert_to_parquet(arrow_stream: bytes, savefile: str):
input_stream = BytesIO(arrow_stream)
with pa.ipc.open_stream(input_stream) as reader:
with pq.ParquetWriter(savefile, reader.schema) as writer:
table = pa.Table.from_batches(reader, reader.schema)
writer.write_table(table)
@redo.retriable(
attempts=5,
sleeptime=10,
max_sleeptime=300,
sleepscale=1,
retry_exceptions=(ReadTimeout,),
)
def _download_as_parquet(file_url, savefile, http_session, timeout=15):
"""Query task writes data in 2 MB chunks using LZ4 compressed Arrow streaming format"""
try:
response = http_session.get(file_url, timeout=timeout)
if response.status_code == 403:
raise ExpiredFileLinkError(f"File link {file_url} expired")
if response.status_code != 200:
raise RequestException(
f"File link download error, {response.status_code}\t{response.content}"
)
except ReadTimeout as e:
LOG.warning(f"Download result {file_url} timeout, retrying...")
raise ReadTimeout(e)
except Exception as e:
LOG.error(f"Download result {file_url} error, {e}")
if "Read timed out" in str(e):
raise ReadTimeout(e)
raise e
basename, _ = os.path.splitext(savefile)
savefile = basename + Suffix.parquet.value
convert_to_parquet(arrow_stream=response.content, savefile=savefile)
@redo.retriable(
attempts=5,
sleeptime=15,
max_sleeptime=300,
sleepscale=1,
retry_exceptions=(ExpiredFileLinkError,),
)
def download_external_files(
chunk_id: int,
statement_id,
output_path: str,
suffix: str,
chunk_size=100 * MB,
wc: WorkspaceClient = None,
databricks_host=None,
databricks_token=None,
http_session=None,
):
if wc is None:
if not databricks_token or not databricks_host:
raise ValueError(f"Cannot create client for Workspace")
wc = WorkspaceClient(host=databricks_host, token=databricks_token)
if http_session is None:
http_session = requests.session()
LOG.info(f"Get result chunk by index {chunk_id}")
statement = wc.statement_execution.get_statement_result_chunk_n(
statement_id, chunk_id
)
for link in statement.external_links:
file_url = link.external_link
LOG.info(
f"Downloading chunk {chunk_id}, {file_url!r} expire at {link.expiration}"
)
filename = f"results_{statement_id}_{chunk_id}_{datetime.now().strftime('%Y%m%d%H%M%S%f')}"
savefile = os.path.join(output_path, filename)
if suffix == Suffix.csv:
savefile += Suffix.csv.value
download = _download_as_csv
elif suffix == Suffix.arrow_stream:
savefile += Suffix.parquet.value
download = _download_as_parquet
# elif suffix == Suffix.json:
# savefile += Suffix.json.value
else:
raise ValueError(f"Do not support download as {suffix}")
# failed to download external_link file, suppose it expired and refresh
try:
download(file_url, savefile, http_session)
except ReadTimeout as e:
raise ExpiredFileLinkError(str(e))
LOG.info(f"Downloaded sql result data into {savefile} from link {file_url}")
return True
def threading_download(
chunk_ids: Union[List[int], Tuple[int]],
statement_id,
output_path: str,
chunk_size=100 * MB,
wc: WorkspaceClient = None,
databricks_host=None,
databricks_token=None,
**kwargs,
):
if wc is None:
if not databricks_token or not databricks_host:
raise ValueError(f"Cannot create client for Workspace")
wc = WorkspaceClient(host=databricks_host, token=databricks_token)
pid = os.getpid()
LOG.info(f"Started download process {pid} for chunk {chunk_ids}")
session = requests.session()
adapter = HTTPAdapter(pool_connections=100, pool_maxsize=100)
session.mount("https://", adapter)
session.mount("http://", adapter)
download = functools.partial(
download_external_files,
statement_id=statement_id,
output_path=output_path,
suffix=kwargs.get("suffix", ".csv"),
chunk_size=chunk_size,
wc=wc,
http_session=session,
)
max_workers = len(chunk_ids)
with ThreadPoolExecutor(max_workers) as t_executor:
futures = {
t_executor.submit(download, chunk_id): chunk_id for chunk_id in chunk_ids
}
LOG.info(f"Waiting for download process {pid} to finish...")
for future in concurrent.futures.as_completed(futures):
chunk_id = futures[future]
try:
ok = future.result()
bool(ok)
except Exception as e:
LOG.error(f"Download chunk index {chunk_id} failed: {e}, pid: {pid}")
raise e
else:
LOG.info(f"Download chunk index {chunk_id} ok: {ok}, pid: {pid}")
LOG.info(f"Download process {pid} finished")
return True
def chunked(iterable, chunk_size):
it = iter(iterable)
while True:
chunk = list(itertools.islice(it, chunk_size))
if not chunk:
return
yield chunk
def parallel_download(
wc: WorkspaceClient,
statement_id,
chunk_ids: List[int],
output_path: str,
chunk_size=100 * MB,
processes: int = v_cores,
threads: int = 8,
wait_time=60,
**kwargs,
):
chunks = [tuple(chunk_ids) for chunk_ids in chunked(chunk_ids, threads)]
max_workers = min(processes, len(chunks))
if max_workers <= 0:
max_workers = v_cores
rq = dict() # non-process-safe queue
tq = deque()
tq.extend(chunks)
download = functools.partial(
threading_download,
statement_id=statement_id,
output_path=output_path,
chunk_size=chunk_size,
databricks_host=wc.config.host,
databricks_token=wc.config.token,
suffix=kwargs.get("suffix", ".csv"),
)
with ProcessPoolExecutor(max_workers=max_workers) as p_executor:
while len(tq) > 0:
if len(rq) < max_workers:
chunk_ids = tq.popleft()
future = p_executor.submit(download, chunk_ids=chunk_ids)
rq[future] = chunk_ids
LOG.info(f"Batch downloading task submitted for chunk {chunk_ids}")
else:
done, not_done = wait(
rq, timeout=wait_time, return_when=FIRST_COMPLETED
)
if not done:
LOG.info(
f"Waiting for {wait_time}s for batch downloading task to finish..."
)
continue
for future in done:
chunk_ids = rq[future]
try:
ok = future.result()
bool(ok)
except Exception as e:
LOG.error(
f"Batch downloading task generated an exception {e}, chunk: {chunk_ids}"
)
raise e
else:
del rq[future]
LOG.info(
f"Batch downloading task completed and removed from queue, chunk: {chunk_ids}"
)
def unload(
query: str,
output_path: str,
wc: WorkspaceClient,
warehouse_id,
catalog=None,
schema=None,
result_format: Format = Format.ARROW_STREAM,
parallel_downloading: bool = True,
parallel_processes: int = v_cores,
parallel_threads: int = 16,
parallel_wait_time: int = 60,
):
if result_format not in (Format.ARROW_STREAM, Format.CSV):
raise ValueError(
f"{result_format} is not supported as the format of the databricks sql result data "
)
if parallel_threads == 1 and parallel_processes == 1:
parallel_downloading = False
if output_path.startswith("s3://"):
local_path = tempfile.mkdtemp(prefix="databricks_sql_warehouse_results")
is_upload_s3 = True
else:
local_path = output_path
Path(local_path).mkdir(parents=True, exist_ok=True)
is_upload_s3 = False
LOG.info(
f"Unloading {query!r} from sql warehouse {warehouse_id!r} and catalog {catalog}.{schema or ''} into {output_path}"
)
statement = wc.statement_execution.execute_statement(
warehouse_id=warehouse_id,
catalog=catalog,
schema=schema,
statement=query,
disposition=Disposition.EXTERNAL_LINKS,
format=result_format,
)
LOG.info(f"Statement execution: {statement.as_dict()}")
while statement.status.state in (StatementState.RUNNING, StatementState.PENDING):
LOG.info(f"Wait for statement to complete, 5s...")
time.sleep(5)
statement = wc.statement_execution.get_statement(statement.statement_id)
LOG.info(f"Refreshing statement info: {statement.as_dict()}")
if statement.status.state != StatementState.SUCCEEDED:
raise DatabricksError(f"Execute statement error, {statement.as_dict()}")
result_suffix = "." + result_format.value.lower()
if not statement.manifest.chunks:
LOG.info(f"No records found")
return
chunk_ids = [chunk.chunk_index for chunk in statement.manifest.chunks]
if parallel_downloading:
parallel_download(
wc,
statement.statement_id,
chunk_ids,
output_path=local_path,
suffix=result_suffix,
processes=parallel_processes,
threads=parallel_threads,
wait_time=parallel_wait_time,
)
else:
for chunk_id in chunk_ids:
download_external_files(
chunk_id,
statement.statement_id,
output_path=local_path,
suffix=result_suffix,
wc=wc,
)
LOG.success(f"Databricks unload successfully")
if is_upload_s3:
LOG.info(f"Unloading databricks sql results data to {output_path}")
cmd = [
"aws",
"s3",
"cp",
"--recursive",
shlex.quote(local_path),
shlex.quote(output_path),
]
cmd = " ".join(cmd)
ret = subprocess.run(cmd, shell=True, text=True)
if ret.returncode != 0:
raise RuntimeError(f"Upload s3 failed")
LOG.success(f"S3 upload successfully")
DATABRICKS_HOST = os.environ.get("DATABRICKS_HOST", "")
DATABRICKS_TOKEN = os.environ.get("DATABRICKS_TOKEN", "")
def arg_parse():
import argparse
support_formats = ["csv", "parquet"]
parser = argparse.ArgumentParser()
opt = parser.add_argument
opt("query")
opt("output_path")
opt("-W", "--warehouse_id", required=True, help="databricks sql warehouse id")
opt(
"-F",
"--result_format",
choices=support_formats,
default="parquet",
help=f"query results data format, support: {support_formats}, (default: %(default)s)",
)
opt("-C", "--catalog", help="catalog of databricks sql warehouse")
opt("-S", "--schema", help="schema of databricks sql warehouse ")
opt(
"-P",
"--parallel_downloading",
action="store_true",
default=False,
help="enable parallel downloading",
)
opt(
"--host",
default=DATABRICKS_HOST,
help="databricks host, default use environment variable 'DATABRICKS_HOST'",
)
opt(
"--token",
default=DATABRICKS_TOKEN,
help="databricks token, default use environment variable 'DATABRICKS_TOKEN'",
)
args = parser.parse_args()
return args
if __name__ == "__main__":
args = arg_parse()
if args.result_format == "parquet":
result_format = Format.ARROW_STREAM
else:
result_format = Format.CSV
if not args.host or not args.token:
raise NotFound("databricks host or token")
wc = WorkspaceClient(host=args.host, token=args.token)
LOG.info(f"Connected to databricks instance {args.host}")
unload(
query=args.query,
output_path=args.output_path,
wc=wc,
warehouse_id=args.warehouse_id,
catalog=args.catalog,
schema=args.schema,
result_format=result_format,
parallel_downloading=args.parallel_downloading,
)
| 14,835 | Python | .py | 417 | 27.508393 | 122 | 0.626923 | zhiweio/data-engineer-scripts | 8 | 0 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,101 | strbuild.py | swfeiyu_NUC-AFLtool/strbuild.py | """
:file: strbuild.py
:brief: To build the strings which need to draw
:version: 2.0
:author: SWfeiyu
:date: 2024.4.18
"""
from typing import Dict
"""
:name: BuildStr
:brief: To build the strings which need to draw
:param: Value -> Dict ={
"Name",
"StudentID",
"Class",
"Gender",
"AcademicYear",
"Semester",
"Days",
"StartTime":{"year","mon","day","hour","min","sec"},
"EndTime":{"year","mon","day","hour","min","sec"},
"CreationTime":{"year","mon","day","hour","min","sec"},
"ClosingTime":{"year","mon","day","hour","min","sec"},
}
:return: ValueStr -> Dict ={
"Name",
"StudentID",
"AcademicYear",
"Semester",
"Class",
"Gender",
"Type",
"Days",
"Apply",
"StartTime",
"EndTime",
"CreationTime",
"ClosingTime",
"State"
}
"""
def BuildStr(Value: Dict) -> Dict:
ValueStr = {
"Name": Value["Name"],
"StudentID": Value["StudentID"],
"AcademicYear": "学年:" + str(Value["AcademicYear"]) + "-" + str(Value["AcademicYear"] + 1),
"Class": "班级:" + Value["Class"],
"Type": "请假类型:因私-病假",
"Days": "请假天数:" + str(Value["Days"]),
"Apply": "是否补假申请:是",
"State": "状态:"
}
if Value["Semester"] == 1:
ValueStr["Semester"] = "学期:第一学期"
elif Value["Semester"] == 2:
ValueStr["Semester"] = "学期:第二学期"
if Value["Gender"] == 'm' or Value["Gender"] == "男":
ValueStr["Gender"] = "性别:男性"
elif Value["Gender"] == 'w' or Value["Gender"] == "女":
ValueStr["Gender"] = "性别:女性"
ValueStr["StartTime"] = (
"请假开始时间:{0}-{1}-{2} {3}:{4}:{5}".format(
str(Value["StartTime"]["year"]),
str(Value["StartTime"]["mon"]).rjust(2, '0'),
str(Value["StartTime"]["day"]).rjust(2, '0'),
str(Value["StartTime"]["hour"]).rjust(2, '0'),
str(Value["StartTime"]["min"]).rjust(2, '0'),
str(Value["StartTime"]["sec"]).rjust(2, '0')
)
)
ValueStr["EndTime"] = (
"请假结束时间:{0}-{1}-{2} {3}:{4}:{5}".format(
str(Value["EndTime"]["year"]),
str(Value["EndTime"]["mon"]).rjust(2, '0'),
str(Value["EndTime"]["day"]).rjust(2, '0'),
str(Value["EndTime"]["hour"]).rjust(2, '0'),
str(Value["EndTime"]["min"]).rjust(2, '0'),
str(Value["EndTime"]["sec"]).rjust(2, '0')
)
)
ValueStr["CreationTime"] = (
"创建时间:{0}-{1}-{2} {3}:{4}:{5}".format(
str(Value["CreationTime"]["year"]),
str(Value["CreationTime"]["mon"]).rjust(2, '0'),
str(Value["CreationTime"]["day"]).rjust(2, '0'),
str(Value["CreationTime"]["hour"]).rjust(2, '0'),
str(Value["CreationTime"]["min"]).rjust(2, '0'),
str(Value["CreationTime"]["sec"]).rjust(2, '0')
)
)
ValueStr["ClosingTime"] = (
"办结时间:{0}-{1}-{2} {3}:{4}:{5}".format(
str(Value["ClosingTime"]["year"]),
str(Value["ClosingTime"]["mon"]).rjust(2, '0'),
str(Value["ClosingTime"]["day"]).rjust(2, '0'),
str(Value["ClosingTime"]["hour"]).rjust(2, '0'),
str(Value["ClosingTime"]["min"]).rjust(2, '0'),
str(Value["ClosingTime"]["sec"]).rjust(2, '0')
)
)
return ValueStr
| 4,267 | Python | .py | 101 | 25.80198 | 99 | 0.404154 | swfeiyu/NUC-AFLtool | 8 | 2 | 0 | GPL-2.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,102 | getdata.py | swfeiyu_NUC-AFLtool/getdata.py | """
:file: getdata.py
:brief: To get values of user from conf.ini and processed these values
:version: 2.0
:author:SWfeiyu
:date: 2024.4.18
"""
from configparser import ConfigParser
from cal import *
"""
:name: GetData
:brief: To get values of user from conf.ini and processed these values
:return: Value -> Dict ={
"Name",
"StudentID",
"Class",
"Gender",
"AcademicYear",
"Semester",
"Days",
"StartTime":{"year","mon","day","hour","min","sec"}
"EndTime":{"year","mon","day","hour","min","sec"}
"CreationTime":{"year","mon","day","hour","min","sec"}
"ClosingTime":{"year","mon","day","hour","min","sec"}
}
"""
def GetData() -> Dict:
config = ConfigParser()
config.read("conf.ini",encoding="utf-8")
template = config.get("template", "template")
flag=int(config.get(template, "flag"))
Value = {}
Value["Name"] = config.get(template, "Name")
Value["StudentID"] = config.get(template, "StudentID")
Value["Class"] = config.get(template, "Class")
Value["Gender"] = config.get(template, "Gender")
Value["AcademicYear"] = CalAcademicYear()
Value["Semester"] = CalSemester()
if flag != 10:
Value["StartTime"] = CalStartTime(flag)
Value["EndTime"] = CalEndTime(flag)
Value["CreationTime"] = CalCreationTime(flag)
Value["ClosingTime"] = CalClosingTime(flag)
else:
StartTime = {}
EndTime = {}
CreationTime = {}
ClosingTime = {}
StartTime["year"] = int(config.get(template, "StartTimeYear"))
StartTime["mon"] = int(config.get(template, "StartTimeMon"))
StartTime["day"] = int(config.get(template, "StartTimeDay"))
StartTime["hour"] = int(config.get(template, "StartTimeHour"))
StartTime["min"] = int(config.get(template, "StartTimeMin"))
StartTime["sec"] = 0
EndTime["year"] = int(config.get(template, "EndTimeYear"))
EndTime["mon"] = int(config.get(template, "EndTimeMon"))
EndTime["day"] = int(config.get(template, "EndTimeDay"))
EndTime["hour"] = int(config.get(template, "EndTimeHour"))
EndTime["min"] = int(config.get(template, "EndTimeMin"))
EndTime["sec"] = 0
CreationTime["year"] = StartTime["year"]
CreationTime["mon"] = StartTime["mon"]
CreationTime["day"] = StartTime["day"]
CreationTime["hour"] = StartTime["hour"] - 1
CreationTime["min"] = random.randint(0, 60)
CreationTime["sec"] = random.randint(0, 60)
ClosingTime["year"] = StartTime["year"]
ClosingTime["mon"] = StartTime["mon"]
ClosingTime["day"] = StartTime["day"]
ClosingTime["hour"] = StartTime["hour"] + 1
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
Value["StartTime"] = StartTime
Value["EndTime"] = EndTime
Value["CreationTime"] = CreationTime
Value["ClosingTime"] = ClosingTime
Value["Days"] = CalDays(flag, Value)
return Value
| 3,319 | Python | .py | 78 | 32.730769 | 82 | 0.564182 | swfeiyu/NUC-AFLtool | 8 | 2 | 0 | GPL-2.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,103 | draw.py | swfeiyu_NUC-AFLtool/draw.py | """
:file: draw.py
:brief: To draw the strings in the base.jpg and get the photos
:version: 2.0
:author: SWfeiyu
:date: 2024.4.18
"""
from typing import Dict
from PIL import Image, ImageDraw, ImageFont
"""
:name: Draw
:brief: To draw the strings in the base.jpg and save the photos into the output folder
:param: ValueStr -> Dict ={
"Name",
"StudentID",
"AcademicYear",
"Semester",
"Class",
"Gender",
"Type",
"Days",
"Apply",
"StartTime",
"EndTime",
"CreationTime",
"ClosingTime",
"State"
}
"""
def Draw(ValueStr: Dict):
ZhLFont = ImageFont.truetype("font/regular.ttf", 50)
ZhSFont = ImageFont.truetype("font/regular.ttf", 40)
im = Image.open("img/base.jpg")
imd = ImageDraw.Draw(im)
imd.text((92, 789), ValueStr["Name"], font=ZhLFont, fill=(17, 17, 17))
imd.text((92 + 50 * len(ValueStr["Name"]) + 46, 801), ValueStr["StudentID"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 900), ValueStr["AcademicYear"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 960), ValueStr["Semester"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1020), ValueStr["Class"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1080), ValueStr["Gender"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1140), ValueStr["Type"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1200), ValueStr["Days"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1260), ValueStr["Apply"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1320), ValueStr["StartTime"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1380), ValueStr["EndTime"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1440), ValueStr["CreationTime"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1500), ValueStr["ClosingTime"], font=ZhSFont, fill=(102, 102, 102))
imd.text((90, 1560), ValueStr["State"], font=ZhSFont, fill=(102, 102, 102))
im.save("output/note.jpg")
| 2,414 | Python | .py | 49 | 36.408163 | 117 | 0.526964 | swfeiyu/NUC-AFLtool | 8 | 2 | 0 | GPL-2.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,104 | cal.py | swfeiyu_NUC-AFLtool/cal.py | """
:file: cal.py
:brief: For calculating some values used by application
:version: 2.0
:author: SWfeiyu
:date: 2024.4.18
"""
import random
import time
from typing import Dict
"""
:name: CalAcademicYear
:brief: According to current month to calculate the academic year
:return: AcademicYear -> int
"""
def CalAcademicYear() -> int:
if 8 < time.localtime(time.time()).tm_mon <= 12:
return time.localtime(time.time()).tm_year
else:
return time.localtime(time.time()).tm_year - 1
"""
:name: CalSemester
:brief: According to current month to calculate the semester
:return: Semester -> int
"""
def CalSemester() -> int:
if 2 <= time.localtime(time.time()).tm_mon <= 7:
return 2
else:
return 1
"""
:name: CalStartTime
:brief: According to the flag to calculte the start time
:param: flag -> int
:return: StartTime -> Dict ={"year","mon","day","hour","min","sec"}
"""
def CalStartTime(flag: int) -> Dict:
StartTime = {}
if flag == 0 or flag == 6 or flag == 8:
StartTime["year"] = time.localtime(time.time()).tm_year
StartTime["mon"] = time.localtime(time.time()).tm_mon
StartTime["day"] = time.localtime(time.time()).tm_mday
StartTime["hour"] = 6
StartTime["min"] = 40
StartTime["sec"] = 0
elif flag == 1:
StartTime["year"] = time.localtime(time.time()).tm_year
StartTime["mon"] = time.localtime(time.time()).tm_mon
StartTime["day"] = time.localtime(time.time()).tm_mday
StartTime["hour"] = 8
StartTime["min"] = 0
StartTime["sec"] = 0
elif flag == 2:
StartTime["year"] = time.localtime(time.time()).tm_year
StartTime["mon"] = time.localtime(time.time()).tm_mon
StartTime["day"] = time.localtime(time.time()).tm_mday
StartTime["hour"] = 10
StartTime["min"] = 10
StartTime["sec"] = 0
elif flag == 3 or flag == 7:
StartTime["year"] = time.localtime(time.time()).tm_year
StartTime["mon"] = time.localtime(time.time()).tm_mon
StartTime["day"] = time.localtime(time.time()).tm_mday
if StartTime["mon"] < 5 or StartTime["mon"] >= 10:
StartTime["hour"] = 14
StartTime["min"] = 0
StartTime["sec"] = 0
else:
StartTime["hour"] = 14
StartTime["min"] = 30
StartTime["sec"] = 0
elif flag == 4:
StartTime["year"] = time.localtime(time.time()).tm_year
StartTime["mon"] = time.localtime(time.time()).tm_mon
StartTime["day"] = time.localtime(time.time()).tm_mday
if StartTime["mon"] < 5 or StartTime["mon"] >= 10:
StartTime["hour"] = 16
StartTime["min"] = 10
StartTime["sec"] = 0
else:
StartTime["hour"] = 16
StartTime["min"] = 40
StartTime["sec"] = 0
elif flag == 5:
StartTime["year"] = time.localtime(time.time()).tm_year
StartTime["mon"] = time.localtime(time.time()).tm_mon
StartTime["day"] = time.localtime(time.time()).tm_mday
if StartTime["mon"] < 5 or StartTime["mon"] >= 10:
StartTime["hour"] = 19
StartTime["min"] = 0
StartTime["sec"] = 0
else:
StartTime["hour"] = 19
StartTime["min"] = 30
StartTime["sec"] = 0
elif flag == 9:
StartTime["year"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_year
StartTime["mon"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_mon
StartTime["day"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_mday
StartTime["hour"] = 6
StartTime["min"] = 40
StartTime["sec"] = 0
return StartTime
"""
:name: CalEndTime
:brief: According to the flag to calculte the end time
:param: flag -> int
:return: EndTime -> Dict ={"year","mon","day","hour","min","sec"}
"""
def CalEndTime(flag: int) -> Dict:
EndTime = {}
if flag == 0:
EndTime["year"] = time.localtime(time.time()).tm_year
EndTime["mon"] = time.localtime(time.time()).tm_mon
EndTime["day"] = time.localtime(time.time()).tm_mday
EndTime["hour"] = 7
EndTime["min"] = 30
EndTime["sec"] = 0
elif flag == 1:
EndTime["year"] = time.localtime(time.time()).tm_year
EndTime["mon"] = time.localtime(time.time()).tm_mon
EndTime["day"] = time.localtime(time.time()).tm_mday
EndTime["hour"] = 9
EndTime["min"] = 40
EndTime["sec"] = 0
elif flag == 2 or flag == 6:
EndTime["year"] = time.localtime(time.time()).tm_year
EndTime["mon"] = time.localtime(time.time()).tm_mon
EndTime["day"] = time.localtime(time.time()).tm_mday
EndTime["hour"] = 11
EndTime["min"] = 50
EndTime["sec"] = 0
elif flag == 3:
EndTime["year"] = time.localtime(time.time()).tm_year
EndTime["mon"] = time.localtime(time.time()).tm_mon
EndTime["day"] = time.localtime(time.time()).tm_mday
if EndTime["mon"] < 5 or EndTime["mon"] >= 10:
EndTime["hour"] = 15
EndTime["min"] = 40
EndTime["sec"] = 0
else:
EndTime["hour"] = 16
EndTime["min"] = 10
EndTime["sec"] = 0
elif flag == 4 or flag == 7:
EndTime["year"] = time.localtime(time.time()).tm_year
EndTime["mon"] = time.localtime(time.time()).tm_mon
EndTime["day"] = time.localtime(time.time()).tm_mday
if EndTime["mon"] < 5 or EndTime["mon"] >= 10:
EndTime["hour"] = 17
EndTime["min"] = 50
EndTime["sec"] = 0
else:
EndTime["hour"] = 18
EndTime["min"] = 20
EndTime["sec"] = 0
elif flag == 5 or flag == 8:
EndTime["year"] = time.localtime(time.time()).tm_year
EndTime["mon"] = time.localtime(time.time()).tm_mon
EndTime["day"] = time.localtime(time.time()).tm_mday
if EndTime["mon"] < 5 or EndTime["mon"] >= 10:
EndTime["hour"] = 21
EndTime["min"] = 35
EndTime["sec"] = 0
else:
EndTime["hour"] = 22
EndTime["min"] = 5
EndTime["sec"] = 0
elif flag == 9:
EndTime["year"] = time.localtime(
time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60 + 4 * 24 * 60 * 60).tm_year
EndTime["mon"] = time.localtime(
time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60 + 4 * 24 * 60 * 60).tm_mon
EndTime["day"] = time.localtime(
time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60 + 4 * 24 * 60 * 60).tm_mday
if EndTime["mon"] < 5 or EndTime["mon"] >= 10:
EndTime["hour"] = 21
EndTime["min"] = 35
EndTime["sec"] = 0
else:
EndTime["hour"] = 22
EndTime["min"] = 5
EndTime["sec"] = 0
return EndTime
"""
:name: CalCreationTime
:brief: According to the flag to calculte the creation time
:param: flag -> int
:return: CreationTime -> Dict ={"year","mon","day","hour","min","sec"}
"""
def CalCreationTime(flag: int) -> Dict:
CreationTime = {}
if flag == 0 or flag == 6 or flag == 8:
CreationTime["year"] = time.localtime(time.time()).tm_year
CreationTime["mon"] = time.localtime(time.time()).tm_mon
CreationTime["day"] = time.localtime(time.time()).tm_mday
CreationTime["hour"] = 6
CreationTime["min"] = random.randint(20, 40)
CreationTime["sec"] = random.randint(0, 60)
elif flag == 1:
CreationTime["year"] = time.localtime(time.time()).tm_year
CreationTime["mon"] = time.localtime(time.time()).tm_mon
CreationTime["day"] = time.localtime(time.time()).tm_mday
CreationTime["hour"] = 7
CreationTime["min"] = random.randint(0, 40)
CreationTime["sec"] = random.randint(0, 60)
elif flag == 2:
CreationTime["year"] = time.localtime(time.time()).tm_year
CreationTime["mon"] = time.localtime(time.time()).tm_mon
CreationTime["day"] = time.localtime(time.time()).tm_mday
CreationTime["hour"] = random.randint(7, 9)
CreationTime["min"] = random.randint(0, 60)
CreationTime["sec"] = random.randint(0, 60)
elif flag == 3 or flag == 7:
CreationTime["year"] = time.localtime(time.time()).tm_year
CreationTime["mon"] = time.localtime(time.time()).tm_mon
CreationTime["day"] = time.localtime(time.time()).tm_mday
CreationTime["hour"] = random.randint(7, 12)
CreationTime["min"] = random.randint(0, 60)
CreationTime["sec"] = random.randint(0, 60)
elif flag == 4:
CreationTime["year"] = time.localtime(time.time()).tm_year
CreationTime["mon"] = time.localtime(time.time()).tm_mon
CreationTime["day"] = time.localtime(time.time()).tm_mday
CreationTime["hour"] = random.randint(7, 15)
CreationTime["min"] = random.randint(0, 60)
CreationTime["sec"] = random.randint(0, 60)
elif flag == 5:
CreationTime["year"] = time.localtime(time.time()).tm_year
CreationTime["mon"] = time.localtime(time.time()).tm_mon
CreationTime["day"] = time.localtime(time.time()).tm_mday
CreationTime["hour"] = random.randint(7, 17)
CreationTime["min"] = random.randint(0, 60)
CreationTime["sec"] = random.randint(0, 60)
elif flag == 9:
CreationTime["year"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_year
CreationTime["mon"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_mon
CreationTime["day"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_mday
CreationTime["hour"] = 6
CreationTime["min"] = random.randint(20, 40)
CreationTime["sec"] = random.randint(0, 60)
return CreationTime
"""
:name: CalClosingTime
:brief: According to the flag to calculte the closing time
:param: flag -> int
:return: ClosingTime -> Dict ={"year","mon","day","hour","min","sec"}
"""
def CalClosingTime(flag: int) -> Dict:
ClosingTime = {}
if flag == 0 or flag == 6 or flag == 8:
ClosingTime["year"] = time.localtime(time.time()).tm_year
ClosingTime["mon"] = time.localtime(time.time()).tm_mon
ClosingTime["day"] = time.localtime(time.time()).tm_mday
ClosingTime["hour"] = random.randint(7, 10)
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
elif flag == 1:
ClosingTime["year"] = time.localtime(time.time()).tm_year
ClosingTime["mon"] = time.localtime(time.time()).tm_mon
ClosingTime["day"] = time.localtime(time.time()).tm_mday
ClosingTime["hour"] = random.randint(8, 10)
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
elif flag == 2:
ClosingTime["year"] = time.localtime(time.time()).tm_year
ClosingTime["mon"] = time.localtime(time.time()).tm_mon
ClosingTime["day"] = time.localtime(time.time()).tm_mday
ClosingTime["hour"] = random.randint(10, 12)
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
elif flag == 3 or flag == 7:
ClosingTime["year"] = time.localtime(time.time()).tm_year
ClosingTime["mon"] = time.localtime(time.time()).tm_mon
ClosingTime["day"] = time.localtime(time.time()).tm_mday
ClosingTime["hour"] = random.randint(13, 15)
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
elif flag == 4:
ClosingTime["year"] = time.localtime(time.time()).tm_year
ClosingTime["mon"] = time.localtime(time.time()).tm_mon
ClosingTime["day"] = time.localtime(time.time()).tm_mday
ClosingTime["hour"] = random.randint(16, 17)
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
elif flag == 5:
ClosingTime["year"] = time.localtime(time.time()).tm_year
ClosingTime["mon"] = time.localtime(time.time()).tm_mon
ClosingTime["day"] = time.localtime(time.time()).tm_mday
ClosingTime["hour"] = random.randint(18, 20)
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
elif flag == 9:
ClosingTime["year"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_year
ClosingTime["mon"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_mon
ClosingTime["day"] = time.localtime(time.time() - time.localtime(time.time()).tm_wday * 24 * 60 * 60).tm_mday
ClosingTime["hour"] = random.randint(7, 10)
ClosingTime["min"] = random.randint(0, 60)
ClosingTime["sec"] = random.randint(0, 60)
return ClosingTime
"""
:name: CalDays
:brief: According to the flag and time dicts to calculte the total length of leave
:param0: flag -> int
:param1: Value -> Dict ={
"Name",
"StudentID",
"Class",
"Gender",
"AcademicYear",
"Semester",
"StartTime":{"year","mon","day","hour","min","sec"},
"EndTime":{"year","mon","day","hour","min","sec"},
"CreationTime":{"year","mon","day","hour","min","sec"},
"ClosingTime":{"year","mon","day","hour","min","sec"},
}
:return: DayValue -> float
"""
def CalDays(flag: int, Value: Dict) -> float:
if flag == 0:
return 0.03
elif flag == 1 or flag == 2 or flag == 3 or flag == 4:
return 0.07
elif flag == 5:
return 0.11
elif flag == 6:
return 0.22
elif flag == 7:
return 0.16
elif flag == 8:
return 0.62
elif flag == 9:
return 4.62
elif flag == 10:
year_sec: int = (Value["EndTime"]["year"] - Value["StartTime"]["year"]) * 365 * 24 * 60 * 60
mon_sec: int = (Value["EndTime"]["mon"] - Value["StartTime"]["mon"]) * 30 * 24 * 60 * 60
day_sec: int = (Value["EndTime"]["day"] - Value["StartTime"]["day"]) * 24 * 60 * 60
hour_sec: int = (Value["EndTime"]["hour"] - Value["StartTime"]["hour"]) * 60 * 60
min_sec: int = (Value["EndTime"]["min"] - Value["StartTime"]["min"]) * 60
sec_sec: int = Value["EndTime"]["sec"] - Value["StartTime"]["sec"]
sec: float = year_sec + mon_sec + day_sec + hour_sec + min_sec + sec_sec
DayValue: float = round(sec / 60 / 60 / 24, 2)
return DayValue
| 15,368 | Python | .py | 343 | 35.48688 | 120 | 0.558437 | swfeiyu/NUC-AFLtool | 8 | 2 | 0 | GPL-2.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,105 | main.py | swfeiyu_NUC-AFLtool/main.py | """
:file: main.py
:brief: To write the main logic of this tool
:version: 2.0
:author: SWfeiyu
:date: 2024.4.18
"""
from getdata import *
from strbuild import *
from draw import *
if __name__ == '__main__':
Draw(BuildStr(GetData()))
| 255 | Python | .py | 12 | 18.416667 | 45 | 0.654008 | swfeiyu/NUC-AFLtool | 8 | 2 | 0 | GPL-2.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,106 | bot.py | erotixe_FileShareBot2/bot.py | #(©)AnimeXyz
from aiohttp import web
from plugins import web_server
import pyromod.listen
from pyrogram import Client
from pyrogram.enums import ParseMode
import sys
from datetime import datetime
from config import API_HASH, APP_ID, LOGGER, TG_BOT_TOKEN, TG_BOT_WORKERS, FORCE_SUB_CHANNEL, FORCE_SUB_CHANNEL2, CHANNEL_ID, PORT
name ="""
BY MIKEY FROM TG
"""
class Bot(Client):
def __init__(self):
super().__init__(
name="Bot",
api_hash=API_HASH,
api_id=APP_ID,
plugins={
"root": "plugins"
},
workers=TG_BOT_WORKERS,
bot_token=TG_BOT_TOKEN
)
self.LOGGER = LOGGER
async def start(self):
await super().start()
usr_bot_me = await self.get_me()
self.uptime = datetime.now()
if FORCE_SUB_CHANNEL:
try:
link = (await self.get_chat(FORCE_SUB_CHANNEL)).invite_link
if not link:
await self.export_chat_invite_link(FORCE_SUB_CHANNEL)
link = (await self.get_chat(FORCE_SUB_CHANNEL)).invite_link
self.invitelink = link
except Exception as a:
self.LOGGER(__name__).warning(a)
self.LOGGER(__name__).warning("Bot can't Export Invite link from Force Sub Channel!")
self.LOGGER(__name__).warning(f"Please Double check the FORCE_SUB_CHANNEL value and Make sure Bot is Admin in channel with Invite Users via Link Permission, Current Force Sub Channel Value: {FORCE_SUB_CHANNEL}")
self.LOGGER(__name__).info("\nBot Stopped. https://t.me/weebs_support for support")
sys.exit()
if FORCE_SUB_CHANNEL2:
try:
link = (await self.get_chat(FORCE_SUB_CHANNEL2)).invite_link
if not link:
await self.export_chat_invite_link(FORCE_SUB_CHANNEL2)
link = (await self.get_chat(FORCE_SUB_CHANNEL2)).invite_link
self.invitelink2 = link
except Exception as a:
self.LOGGER(__name__).warning(a)
self.LOGGER(__name__).warning("Bot can't Export Invite link from Force Sub Channel!")
self.LOGGER(__name__).warning(f"Please Double check the FORCE_SUB_CHANNEL2 value and Make sure Bot is Admin in channel with Invite Users via Link Permission, Current Force Sub Channel Value: {FORCE_SUB_CHANNEL2}")
self.LOGGER(__name__).info("\nBot Stopped. https://t.me/weebs_support for support")
sys.exit()
try:
db_channel = await self.get_chat(CHANNEL_ID)
self.db_channel = db_channel
test = await self.send_message(chat_id = db_channel.id, text = "Test Message")
await test.delete()
except Exception as e:
self.LOGGER(__name__).warning(e)
self.LOGGER(__name__).warning(f"Make Sure bot is Admin in DB Channel, and Double check the CHANNEL_ID Value, Current Value {CHANNEL_ID}")
self.LOGGER(__name__).info("\nBot Stopped. Join https://t.me/weebs_support for support")
sys.exit()
self.set_parse_mode(ParseMode.HTML)
self.LOGGER(__name__).info(f"Bot Running..!\n\nCreated by \nhttps://t.me/weebs_support")
self.LOGGER(__name__).info(f""" \n\n
▄▄▄▄███▄▄▄▄ ▄█ ▄█ ▄█▄ ▄████████ ▄██ ▄
▄██▀▀▀███▀▀▀██▄ ███ ███ ▄███▀ ███ ███ ███ ██▄
███ ███ ███ ███▌ ███▐██▀ ███ █▀ ███▄▄▄███
███ ███ ███ ███▌ ▄█████▀ ▄███▄▄▄ ▀▀▀▀▀▀███
███ ███ ███ ███▌ ▀▀█████▄ ▀▀███▀▀▀ ▄██ ███
███ ███ ███ ███ ███▐██▄ ███ █▄ ███ ███
███ ███ ███ ███ ███ ▀███▄ ███ ███ ███ ███
▀█ ███ █▀ █▀ ███ ▀█▀ ██████████ ▀█████▀
▀
""")
self.username = usr_bot_me.username
#web-response
app = web.AppRunner(await web_server())
await app.setup()
bind_address = "0.0.0.0"
await web.TCPSite(app, bind_address, PORT).start()
async def stop(self, *args):
await super().stop()
self.LOGGER(__name__).info("Bot stopped.")
| 4,852 | Python | .py | 87 | 38.172414 | 229 | 0.519039 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,107 | config.py | erotixe_FileShareBot2/config.py | #(©)t.me/Team_Netflix
import os
import logging
from logging.handlers import RotatingFileHandler
#Bot token @Botfather
TG_BOT_TOKEN = os.environ.get("TG_BOT_TOKEN", "")
#Your API ID from my.telegram.org
APP_ID = int(os.environ.get("APP_ID", ""))
#Your API Hash from my.telegram.org
API_HASH = os.environ.get("API_HASH", "")
#Your db channel Id
CHANNEL_ID = int(os.environ.get("CHANNEL_ID", "-1001995978690"))
# NAMA OWNER
OWNER = os.environ.get("OWNER", "VeldXd")
#OWNER ID
OWNER_ID = int(os.environ.get("OWNER_ID", "6497757690"))
#Port
PORT = os.environ.get("PORT", "8030")
#Database
DB_URI = os.environ.get("DATABASE_URL", "")
DB_NAME = os.environ.get("DATABASE_NAME", "Cluster0")
#force sub channel id, if you want enable force sub
FORCE_SUB_CHANNEL = int(os.environ.get("FORCE_SUB_CHANNEL", "-1001473043276"))
FORCE_SUB_CHANNEL2 = int(os.environ.get("FORCE_SUB_CHANNEL2", "-1001495022147"))
TG_BOT_WORKERS = int(os.environ.get("TG_BOT_WORKERS", "4"))
#start message
START_MSG = os.environ.get("START_MESSAGE", "<b> ô·¥Ä·¥ã·¥ã·¥Ä·¥Ä·¥Ä!! {first}</b>\n\nI can provide files for @Team_Netflix & @Anime_Cruise_Netflix\nchannel Members!!\n\n·¥ä·¥úÍú±·¥õ ·¥Ö·¥è…¥'·¥õ ·¥è·¥†·¥á Ä ü·¥è·¥Ä·¥Ö ·¥ç·¥á <a href=https://graph.org/file/6ef6eb1f0aed4920adaf2.jpg>ü´£.</a></b>")
try:
ADMINS=[6376328008]
for x in (os.environ.get("ADMINS", "5115691197 6273945163 6103092779 5231212075").split()):
ADMINS.append(int(x))
except ValueError:
raise Exception("Your Admins list does not contain valid integers.")
#Force sub message
FORCE_MSG = os.environ.get("FORCE_SUB_MESSAGE", "üëã Hello {first}!\nTo access these files you have to join our channel first.\nPlease subscribe to our channels through the buttons below and then tap on try again to get your files.\nThank You ‚ù§Ô∏è")
#set your Custom Caption here, Keep None for Disable Custom Caption
CUSTOM_CAPTION = os.environ.get("CUSTOM_CAPTION", "<b>By @team_netflix</a>")
#set True if you want to prevent users from forwarding files from bot
PROTECT_CONTENT = True if os.environ.get('PROTECT_CONTENT', "False") == "True" else False
#Set true if you want Disable your Channel Posts Share button
DISABLE_CHANNEL_BUTTON = os.environ.get("DISABLE_CHANNEL_BUTTON", None) == 'True'
BOT_STATS_TEXT = "<b>BOT UPTIME</b>\n{uptime}"
USER_REPLY_TEXT = " ô·¥Ä·¥ã·¥ã·¥Ä ! 跥跥ú ·¥Ä Ä·¥á …¥·¥è·¥õ ·¥ç è Íú±·¥á…¥·¥ò·¥Ä…™!!"
ADMINS.append(OWNER_ID)
ADMINS.append(5191566338)
LOG_FILE_NAME = "filesharingbot.txt"
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s - %(levelname)s] - %(name)s - %(message)s",
datefmt='%d-%b-%y %H:%M:%S',
handlers=[
RotatingFileHandler(
LOG_FILE_NAME,
maxBytes=50000000,
backupCount=10
),
logging.StreamHandler()
]
)
logging.getLogger("pyrogram").setLevel(logging.WARNING)
def LOGGER(name: str) -> logging.Logger:
return logging.getLogger(name)
| 2,955 | Python | .py | 62 | 44.403226 | 298 | 0.696017 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,108 | helper_func.py | erotixe_FileShareBot2/helper_func.py | #(©)AnimeXyz
import base64
import re
import asyncio
from pyrogram import filters
from pyrogram.enums import ChatMemberStatus
from config import FORCE_SUB_CHANNEL, FORCE_SUB_CHANNEL2, ADMINS
from pyrogram.errors.exceptions.bad_request_400 import UserNotParticipant
from pyrogram.errors import FloodWait
async def is_subscribed(filter, client, update):
if not FORCE_SUB_CHANNEL:
return True
user_id = update.from_user.id
if user_id in ADMINS:
return True
try:
member = await client.get_chat_member(chat_id = FORCE_SUB_CHANNEL, user_id = user_id)
except UserNotParticipant:
return False
if not member.status in [ChatMemberStatus.OWNER, ChatMemberStatus.ADMINISTRATOR, ChatMemberStatus.MEMBER]:
return False
else:
return True
async def is_subscribed(filter, client, update):
if not FORCE_SUB_CHANNEL2:
return True
user_id = update.from_user.id
if user_id in ADMINS:
return True
try:
member = await client.get_chat_member(chat_id = FORCE_SUB_CHANNEL2, user_id = user_id)
except UserNotParticipant:
return False
if not member.status in [ChatMemberStatus.OWNER, ChatMemberStatus.ADMINISTRATOR, ChatMemberStatus.MEMBER]:
return False
else:
return True
async def is_subscribed(filter, client, update):
if not FORCE_SUB_CHANNEL:
return True
if not FORCE_SUB_CHANNEL2:
return True
user_id = update.from_user.id
if user_id in ADMINS:
return True
try:
member = await client.get_chat_member(chat_id = FORCE_SUB_CHANNEL, user_id = user_id)
except UserNotParticipant:
return False
if not member.status in [ChatMemberStatus.OWNER, ChatMemberStatus.ADMINISTRATOR, ChatMemberStatus.MEMBER]:
return False
try:
member = await client.get_chat_member(chat_id = FORCE_SUB_CHANNEL2, user_id = user_id)
except UserNotParticipant:
return False
else:
return True
async def encode(string):
string_bytes = string.encode("ascii")
base64_bytes = base64.urlsafe_b64encode(string_bytes)
base64_string = (base64_bytes.decode("ascii")).strip("=")
return base64_string
async def decode(base64_string):
base64_string = base64_string.strip("=") # links generated before this commit will be having = sign, hence striping them to handle padding errors.
base64_bytes = (base64_string + "=" * (-len(base64_string) % 4)).encode("ascii")
string_bytes = base64.urlsafe_b64decode(base64_bytes)
string = string_bytes.decode("ascii")
return string
async def get_messages(client, message_ids):
messages = []
total_messages = 0
while total_messages != len(message_ids):
temb_ids = message_ids[total_messages:total_messages+200]
try:
msgs = await client.get_messages(
chat_id=client.db_channel.id,
message_ids=temb_ids
)
except FloodWait as e:
await asyncio.sleep(e.x)
msgs = await client.get_messages(
chat_id=client.db_channel.id,
message_ids=temb_ids
)
except:
pass
total_messages += len(temb_ids)
messages.extend(msgs)
return messages
async def get_message_id(client, message):
if message.forward_from_chat:
if message.forward_from_chat.id == client.db_channel.id:
return message.forward_from_message_id
else:
return 0
elif message.forward_sender_name:
return 0
elif message.text:
pattern = "https://t.me/(?:c/)?(.*)/(\d+)"
matches = re.match(pattern,message.text)
if not matches:
return 0
channel_id = matches.group(1)
msg_id = int(matches.group(2))
if channel_id.isdigit():
if f"-100{channel_id}" == str(client.db_channel.id):
return msg_id
else:
if channel_id == client.db_channel.username:
return msg_id
else:
return 0
def get_readable_time(seconds: int) -> str:
count = 0
up_time = ""
time_list = []
time_suffix_list = ["s", "m", "h", "days"]
while count < 4:
count += 1
remainder, result = divmod(seconds, 60) if count < 3 else divmod(seconds, 24)
if seconds == 0 and remainder == 0:
break
time_list.append(int(result))
seconds = int(remainder)
hmm = len(time_list)
for x in range(hmm):
time_list[x] = str(time_list[x]) + time_suffix_list[x]
if len(time_list) == 4:
up_time += f"{time_list.pop()}, "
time_list.reverse()
up_time += ":".join(time_list)
return up_time
subscribed = filters.create(is_subscribed)
| 4,808 | Python | .py | 133 | 28.834586 | 150 | 0.647071 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,109 | database.py | erotixe_FileShareBot2/database/database.py | #(©)CodeXBotz
import pymongo, os
from config import DB_URI, DB_NAME
dbclient = pymongo.MongoClient(DB_URI)
database = dbclient[DB_NAME]
user_data = database['users']
async def present_user(user_id : int):
found = user_data.find_one({'_id': user_id})
return bool(found)
async def add_user(user_id: int):
user_data.insert_one({'_id': user_id})
return
async def full_userbase():
user_docs = user_data.find()
user_ids = []
for doc in user_docs:
user_ids.append(doc['_id'])
return user_ids
async def del_user(user_id: int):
user_data.delete_one({'_id': user_id})
return
| 637 | Python | .py | 21 | 25.952381 | 48 | 0.676223 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,110 | link_generator.py | erotixe_FileShareBot2/plugins/link_generator.py | from pyrogram import Client, filters
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, Message
from bot import Bot
from config import ADMINS
from helper_func import encode, get_message_id
@Bot.on_message(filters.private & filters.user(ADMINS) & filters.command("batch"))
async def batch(client: Client, message: Message):
while True:
try:
first_message = await client.ask(
text="<b>Please Forward the First Message/File from Channel DataBase.(Forward with Quote)\n\nor Submit Post Link from Channel Database</b>",
chat_id=message.from_user.id,
filters=(filters.forwarded | (filters.text & ~filters.forwarded)),
timeout=60,
)
except BaseException:
return
f_msg_id = await get_message_id(client, first_message)
if f_msg_id:
break
await first_message.reply(
"‚ùå <b> ô·¥Ä·¥ã·¥ã·¥Ä ! 跥跥ú ·¥Ä Ä·¥á …¥·¥è·¥õ ·¥ç è Íú±·¥á…¥·¥ò·¥Ä…™!!</b>",
quote=True,
)
continue
while True:
try:
second_message = await client.ask(
text="<b>Please Forward the Last Message/File from Channel DataBase.(Forward with Quote)\n\nor Submit Post Link from Channel Database</b>",
chat_id=message.from_user.id,
filters=(filters.forwarded | (filters.text & ~filters.forwarded)),
timeout=60,
)
except BaseException:
return
s_msg_id = await get_message_id(client, second_message)
if s_msg_id:
break
await second_message.reply(
"‚ùå <b>E ô·¥Ä·¥ã·¥ã·¥Ä ! …™ ·¥Ä·¥ç ·¥è…¥ ü è ·¥°·¥è Ä·¥ã…™…¥…¢ “ì·¥è Ä ·¥õ·¥á·¥Ä·¥ç …¥·¥á·¥õ“ì ü…™x</b>",
quote=True,
)
continue
string = f"get-{f_msg_id * abs(client.db_channel.id)}-{s_msg_id * abs(client.db_channel.id)}"
base64_string = await encode(string)
link = f"https://t.me/{client.username}?start={base64_string}"
reply_markup = InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"üîÅ s ú·¥Ä Ä·¥á ü…™…¥·¥ã", url=f"https://telegram.me/share/url?url={link}"
)
]
]
)
await second_message.reply_text(
f"<b>File Sharing Link Successfully Created:</b>\n\n{link}",
quote=True,
reply_markup=reply_markup,
)
@Bot.on_message(filters.private & filters.user(ADMINS) & filters.command("genlink"))
async def link_generator(client: Client, message: Message):
while True:
try:
channel_message = await client.ask(
text="<b>Please Forward the First Message/File from Channel DataBase.(Forward with Quote)\n\nor Submit Post Link from Channel Database</b>",
chat_id=message.from_user.id,
filters=(filters.forwarded | (filters.text & ~filters.forwarded)),
timeout=60,
)
except BaseException:
return
msg_id = await get_message_id(client, channel_message)
if msg_id:
break
await channel_message.reply(
"‚ùå <b> ô·¥Ä·¥ã·¥ã·¥Ä ! …™ ·¥Ä·¥ç ·¥è…¥ ü è ·¥°·¥è Ä·¥ã…™…¥…¢ “ì·¥è Ä ·¥õ·¥á·¥Ä·¥ç …¥·¥á·¥õ“ì ü…™x</b>",
quote=True,
)
continue
base64_string = await encode(f"get-{msg_id * abs(client.db_channel.id)}")
link = f"https://t.me/{client.username}?start={base64_string}"
reply_markup = InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"üîÅ s ú·¥Ä Ä·¥á ü…™…¥·¥ã", url=f"https://telegram.me/share/url?url={link}"
)
]
]
)
await channel_message.reply_text(
f"<b>File Sharing Link Successfully Created:</b>\n\n{link}",
quote=True,
reply_markup=reply_markup,
)
| 3,936 | Python | .py | 96 | 30.833333 | 156 | 0.560021 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,111 | start.py | erotixe_FileShareBot2/plugins/start.py | #(©)Codeflix_Bots
import os
import asyncio
from pyrogram import Client, filters, __version__
from pyrogram.enums import ParseMode
from pyrogram.types import Message, InlineKeyboardMarkup, InlineKeyboardButton, CallbackQuery
from pyrogram.errors import FloodWait, UserIsBlocked, InputUserDeactivated
from bot import Bot
from config import ADMINS, OWNER_ID, FORCE_MSG, START_MSG, CUSTOM_CAPTION, DISABLE_CHANNEL_BUTTON, PROTECT_CONTENT
from helper_func import subscribed, encode, decode, get_messages
from database.database import add_user, del_user, full_userbase, present_user
"""add time im seconds for waitingwaiting before delete
1min=60, 2min=60√ó2=120, 5min=60√ó5=300"""
SECONDS = int(os.getenv("SECONDS", "120"))
@Bot.on_message(filters.command('start') & filters.private & subscribed)
async def start_command(client: Client, message: Message):
id = message.from_user.id
if not await present_user(id):
try:
await add_user(id)
except:
pass
text = message.text
if len(text)>7:
try:
base64_string = text.split(" ", 1)[1]
except:
return
string = await decode(base64_string)
argument = string.split("-")
if len(argument) == 3:
try:
start = int(int(argument[1]) / abs(client.db_channel.id))
end = int(int(argument[2]) / abs(client.db_channel.id))
except:
return
if start <= end:
ids = range(start,end+1)
else:
ids = []
i = start
while True:
ids.append(i)
i -= 1
if i < end:
break
elif len(argument) == 2:
try:
ids = [int(int(argument[1]) / abs(client.db_channel.id))]
except:
return
temp_msg = await message.reply("Wait A Second...")
try:
messages = await get_messages(client, ids)
except:
await message.reply_text("Something went wrong..!")
return
await temp_msg.delete()
snt_msgs = []
for msg in messages:
if bool(CUSTOM_CAPTION) & bool(msg.document):
caption = CUSTOM_CAPTION.format(previouscaption = "" if not msg.caption else msg.caption.html, filename = msg.document.file_name)
else:
caption = "" if not msg.caption else msg.caption.html
if DISABLE_CHANNEL_BUTTON:
reply_markup = msg.reply_markup
else:
reply_markup = None
try:
snt_msg = await msg.copy(chat_id=message.from_user.id, caption = caption, parse_mode = ParseMode.HTML, reply_markup = reply_markup, protect_content=PROTECT_CONTENT)
await asyncio.sleep(0.5)
snt_msgs.append(snt_msg)
except FloodWait as e:
await asyncio.sleep(e.x)
snt_msg = await msg.copy(chat_id=message.from_user.id, caption = caption, parse_mode = ParseMode.HTML, reply_markup = reply_markup, protect_content=PROTECT_CONTENT)
snt_msgs.append(snt_msg)
except:
pass
await message.reply_text("‚ĺÔ∏èF…™ ü·¥á ·¥°…™ ü ü ·¥Ä·¥ú·¥õ·¥è ·¥Ö·¥á ü·¥á·¥õ·¥á …™…¥ 5 ·¥ç…™…¥·¥ú·¥õ·¥ásüò±\n_F·¥è Ä·¥°·¥Ä Ä·¥Ö …™·¥õ ·¥õ·¥è s·¥Ä·¥†·¥á·¥Ö ·¥ç·¥áss·¥Ä…¢·¥ás ·¥è Ä ·¥Ä…¥ è·¥° ú·¥á Ä·¥á ô·¥á“ì·¥è Ä·¥á ·¥Ö·¥è·¥°…¥ ü·¥è·¥Ä·¥Ö…™…¥…¢.")
await asyncio.sleep(SECONDS)
for snt_msg in snt_msgs:
try:
await snt_msg.delete()
except:
pass
return
else:
reply_markup = InlineKeyboardMarkup(
[
[
InlineKeyboardButton('·¥ç·¥è·¥†…™·¥á …¢ Ä·¥è·¥ú·¥ò', url=f"https://telegram.me/movie7xchat"),
InlineKeyboardButton(' ü·¥ú·¥Ñ è', url=f"https://t.me/Lucy_Filter_bot")
],[
InlineKeyboardButton('·¥ä·¥è…™…¥ ·¥ç è ·¥ú·¥ò·¥Ö·¥Ä·¥õ·¥ás ·¥Ñ ú·¥Ä…¥…¥·¥á ü ', url=f"https://telegram.me/codeflix_bots")
],[
InlineKeyboardButton("·¥ç·¥è Ä·¥á …™…¥“ì·¥è ", callback_data = "about"),
InlineKeyboardButton("·¥Ñ ü·¥ès·¥á", callback_data = "close")
]
]
)
await message.reply_text(
text = START_MSG.format(
first = message.from_user.first_name,
last = message.from_user.last_name,
username = None if not message.from_user.username else '@' + message.from_user.username,
mention = message.from_user.mention,
id = message.from_user.id
),
reply_markup = reply_markup,
disable_web_page_preview = True,
quote = True
)
return
#=====================================================================================##
WAIT_MSG = """"<b>Processing ....</b>"""
REPLY_ERROR = """<code>Use this command as a reply to any telegram message with out any spaces.</code>"""
#=====================================================================================##
@Bot.on_message(filters.command('start') & filters.private)
async def not_joined(client: Client, message: Message):
buttons = [
[
InlineKeyboardButton(text="·¥ä·¥è…™…¥ ·¥Ñ ú·¥Ä…¥…¥·¥á ü", url=client.invitelink),
InlineKeyboardButton(text="·¥ä·¥è…™…¥ ·¥Ñ ú·¥Ä…¥…¥·¥á ü", url=client.invitelink2),
]
]
try:
buttons.append(
[
InlineKeyboardButton(
text = '·¥õ Ä è ·¥Ä…¢·¥Ä…™…¥',
url = f"https://t.me/{client.username}?start={message.command[1]}"
)
]
)
except IndexError:
pass
await message.reply(
text = FORCE_MSG.format(
first = message.from_user.first_name,
last = message.from_user.last_name,
username = None if not message.from_user.username else '@' + message.from_user.username,
mention = message.from_user.mention,
id = message.from_user.id
),
reply_markup = InlineKeyboardMarkup(buttons),
quote = True,
disable_web_page_preview = True
)
@Bot.on_message(filters.command('users') & filters.private & filters.user(ADMINS))
async def get_users(client: Bot, message: Message):
msg = await client.send_message(chat_id=message.chat.id, text=WAIT_MSG)
users = await full_userbase()
await msg.edit(f"{len(users)} users are using this bot")
@Bot.on_message(filters.private & filters.command('broadcast') & filters.user(ADMINS))
async def send_text(client: Bot, message: Message):
if message.reply_to_message:
query = await full_userbase()
broadcast_msg = message.reply_to_message
total = 0
successful = 0
blocked = 0
deleted = 0
unsuccessful = 0
pls_wait = await message.reply("<i>Broadcast ho rha till then FUCK OFF </i>")
for chat_id in query:
try:
await broadcast_msg.copy(chat_id)
successful += 1
except FloodWait as e:
await asyncio.sleep(e.x)
await broadcast_msg.copy(chat_id)
successful += 1
except UserIsBlocked:
await del_user(chat_id)
blocked += 1
except InputUserDeactivated:
await del_user(chat_id)
deleted += 1
except:
unsuccessful += 1
pass
total += 1
status = f"""<b><u>Broadcast Completed</u>
Total Users: <code>{total}</code>
Successful: <code>{successful}</code>
Blocked Users: <code>{blocked}</code>
Deleted Accounts: <code>{deleted}</code>
Unsuccessful: <code>{unsuccessful}</code></b>"""
return await pls_wait.edit(status)
else:
msg = await message.reply(REPLY_ERROR)
await asyncio.sleep(8)
await msg.delete()
| 8,250 | Python | .py | 193 | 31.393782 | 258 | 0.539279 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,112 | useless.py | erotixe_FileShareBot2/plugins/useless.py | from bot import Bot
from pyrogram.types import Message
from pyrogram import filters
from config import ADMINS, BOT_STATS_TEXT, USER_REPLY_TEXT
from datetime import datetime
from helper_func import get_readable_time
@Bot.on_message(filters.command('stats') & filters.user(ADMINS))
async def stats(bot: Bot, message: Message):
now = datetime.now()
delta = now - bot.uptime
time = get_readable_time(delta.seconds)
await message.reply(BOT_STATS_TEXT.format(uptime=time))
@Bot.on_message(filters.private)
async def useless(_,message: Message):
if USER_REPLY_TEXT:
await message.reply(USER_REPLY_TEXT)
| 627 | Python | .py | 16 | 36.25 | 64 | 0.768092 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,113 | cbb.py | erotixe_FileShareBot2/plugins/cbb.py | #(©)Codexbotz
from pyrogram import __version__
from bot import Bot
from config import OWNER_ID
from pyrogram.types import Message, InlineKeyboardMarkup, InlineKeyboardButton, CallbackQuery
@Bot.on_callback_query()
async def cb_handler(client: Bot, query: CallbackQuery):
data = query.data
if data == "about":
await query.message.edit_text(
text = f"<b>\n‚óã ·¥Ñ Ä·¥á·¥Ä·¥õ·¥è Ä : <a href='https://t.me/veldxd'>–º…™–∫—î è</a>\n‚óã ü·¥Ä…¥…¢·¥ú·¥Ä…¢·¥á : <code>Eng Sub & Dub</code>\n‚óã Main Channel : <a href=https://t.me/team_netflix>Team Netflix</a>\n‚óã Anime Channel : <a href=https://t.me/anime_cruise_netflix> Anime cruise</a>\n</b>",
disable_web_page_preview = True,
reply_markup = InlineKeyboardMarkup(
[
[
InlineKeyboardButton("üîí Close", callback_data = "close")
]
]
)
)
elif data == "close":
await query.message.delete()
try:
await query.message.reply_to_message.delete()
except:
pass
| 1,124 | Python | .py | 26 | 33.692308 | 315 | 0.572993 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,114 | __init__.py | erotixe_FileShareBot2/plugins/__init__.py | #(©)Codexbotz
#@iryme
from aiohttp import web
from .route import routes
async def web_server():
web_app = web.Application(client_max_size=30000000)
web_app.add_routes(routes)
return web_app
| 210 | Python | .py | 8 | 22.875 | 55 | 0.753846 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,115 | channel_post.py | erotixe_FileShareBot2/plugins/channel_post.py | #(©)Codexbotz
import asyncio
from pyrogram import filters, Client
from pyrogram.types import Message, InlineKeyboardMarkup, InlineKeyboardButton
from pyrogram.errors import FloodWait
from bot import Bot
from config import ADMINS, CHANNEL_ID, DISABLE_CHANNEL_BUTTON
from helper_func import encode
@Bot.on_message(filters.private & filters.user(ADMINS) & ~filters.command(['start','users','broadcast','batch','genlink','stats']))
async def channel_post(client: Client, message: Message):
reply_text = await message.reply_text("Please Wait...!", quote = True)
try:
post_message = await message.copy(chat_id = client.db_channel.id, disable_notification=True)
except FloodWait as e:
await asyncio.sleep(e.x)
post_message = await message.copy(chat_id = client.db_channel.id, disable_notification=True)
except Exception as e:
print(e)
await reply_text.edit_text("Something went Wrong..!")
return
converted_id = post_message.id * abs(client.db_channel.id)
string = f"get-{converted_id}"
base64_string = await encode(string)
link = f"https://t.me/{client.username}?start={base64_string}"
reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton("üîÅ Share URL", url=f'https://telegram.me/share/url?url={link}')]])
await reply_text.edit(f"<b>Here is your link</b>\n\n{link}", reply_markup=reply_markup, disable_web_page_preview = True)
if not DISABLE_CHANNEL_BUTTON:
await post_message.edit_reply_markup(reply_markup)
@Bot.on_message(filters.channel & filters.incoming & filters.chat(CHANNEL_ID))
async def new_post(client: Client, message: Message):
if DISABLE_CHANNEL_BUTTON:
return
converted_id = message.id * abs(client.db_channel.id)
string = f"get-{converted_id}"
base64_string = await encode(string)
link = f"https://t.me/{client.username}?start={base64_string}"
reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton("üîÅ Share URL", url=f'https://telegram.me/share/url?url={link}')]])
try:
await message.edit_reply_markup(reply_markup)
except Exception as e:
print(e)
pass
| 2,154 | Python | .py | 42 | 46.166667 | 132 | 0.718022 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,116 | route.py | erotixe_FileShareBot2/plugins/route.py | #(©)Codexbotz
#rymme
from aiohttp import web
routes = web.RouteTableDef()
@routes.get("/", allow_head=True)
async def root_route_handler(request):
return web.json_response("ᴘᴏᴡᴇʀᴇᴅ ʙʏ : ᴛᴇᴀᴍ ɴᴇᴛғʟɪx")
| 244 | Python | .py | 7 | 27.714286 | 57 | 0.757576 | erotixe/FileShareBot2 | 8 | 6 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,117 | builddebateteam.py | tevslin_debate_team/builddebateteam.py | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 29 15:31:00 2024
@author: tevslin
Uses AgentBukder in autogen to build a debate team.
Modify building_task below to change how the team is built.
"""
from dotenv import load_dotenv
import os
import json
load_dotenv()
llm="gpt-4-1106-preview"
tdict={"model":llm,"api_key":os.getenv('OPENAI_API_KEY')}
os.environ['OAI_CONFIG_LIST']="["+json.dumps(tdict)+"]"
config_file_or_env = 'OAI_CONFIG_LIST' # modify path
default_llm_config = {
'temperature': 0
}
from autogen.agentchat.contrib.agent_builder import AgentBuilder
builder = AgentBuilder(config_file_or_env=config_file_or_env, builder_model='gpt-4-1106-preview', agent_model='gpt-4-1106-preview')
building_task=""""Conduct a debate between agents on a given proposition.
The proposition is given to the debaters by a debate moderator agent who is the chat manager.
The debaters speak in the order in which they are listed below after they have
been given the proposition by the moderator. Each debater speaks once
and only once.
The debaters are :
1.affirmative constructive
2.negative contructive
3.affirmative rebutalist
4.negative rebutalist
The affirmative debaters must not rebut eachother
and the rebutalists must only rebut points actually made by their opponents and may not introduce
new arguments. Debaters may not terminate the session.
There is also a judge which gives each debater a score between 0 and 5 and gives a reason for the score and does terminate the session.
The judge must be created last.
The judge terminates the debate after passing judgment,
The agents created should all stream their output.""" #this line added 2/3/24
agent_list, agent_configs = builder.build(building_task, default_llm_config, coding=False)
import autogen
def start_task(execution_task: str, agent_list: list, llm_config: dict):
config_list = autogen.config_list_from_json(config_file_or_env, filter_dict={"model": ["gpt-4-1106-preview"]})
group_chat = autogen.GroupChat(agents=agent_list, messages=[], max_round=15)
manager = autogen.GroupChatManager(
groupchat=group_chat, llm_config={"config_list": config_list, **llm_config}
)
agent_list[0].initiate_chat(manager, message=execution_task)
os.environ['AUTOGEN_USE_DOCKER']='False'
for agent in agent_list:
print(agent.name)
proposition=input("enter proposition for test debate. Empty entry to skip test. ")
if len(proposition)>0: #if test wanted, do it
start_task(
#execution_task="Debate the proposition that Vermont should devote available limited resources only to mitigating the effects of climate change rather than attempting to prevent it.",
execution_task="Debate the proposition that Apple is better than Microsoft.",
agent_list=agent_list,
llm_config=default_llm_config
)
file_name=input('enter name for debate team file. ".json" will be appended. Empty entry to skip save.')
if len(file_name)>0: #if save wanted
saved_path = builder.save(file_name+'.json')
| 3,118 | Python | .py | 60 | 47.666667 | 192 | 0.747028 | tevslin/debate_team | 8 | 2 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,118 | debatemanager.py | tevslin_debate_team/debatemanager.py | # -*- coding: utf-8 -*-
"""
Created on Thu Feb 1 17:38:40 2024
@author: tevslin
debate management modules
"""
class debate:
def __init__(self,api_key,llm="gpt-4-1106-preview",
saved_team=r'https://raw.githubusercontent.com/tevslin/debate_team/main/debateteam.json'):
import os
import json
self.llm=llm
tdict={"model":llm,"api_key":api_key}
os.environ['OAI_CONFIG_LIST']="["+json.dumps(tdict)+"]"
self.config_file_or_env='OAI_CONFIG_LIST'
self.saved_team=saved_team
self.llm_config={'temperature': 0}
os.environ['AUTOGEN_USE_DOCKER']='False'
def load_team(self):
import requests
import tempfile
import os
from urllib.parse import urlparse
from autogen.agentchat.contrib.agent_builder import AgentBuilder
parsed=urlparse(self.saved_team) #checking for url or file designation
if parsed.scheme and parsed.netloc: #if it is a url
response=requests.get(self.saved_team)
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
tmp_file_name = tmp_file.name
# Write the content to the temporary file
tmp_file.write(response.content)
file_name=tmp_file_name
else: # if it is a file
file_name=self.saved_team
new_builder = AgentBuilder(config_file_or_env=self.config_file_or_env)
self.agent_list, self.agent_config = new_builder.load(file_name)
#self.agent_list, self.agent_config = new_builder.load(r"C:\Users\tevsl\anaconda3\envs\autobuild\debateteam.json")
if not self.saved_team==file_name: #if temp file
os.remove(tmp_file_name) #remove it
def do_debate(self,proposition):
import autogen
config_list = autogen.config_list_from_json(self.config_file_or_env,
filter_dict={"model": [self.llm]})
group_chat = autogen.GroupChat(agents=self.agent_list, messages=[], max_round=15)
manager = autogen.GroupChatManager(
groupchat=group_chat, llm_config={"config_list": config_list, **self.llm_config}
)
self.agent_list[0].initiate_chat(manager, message=proposition)
if __name__ == '__main__':
from dotenv import load_dotenv
import os
load_dotenv()
dm=debate(os.getenv('OPENAI_API_KEY'))
dm.load_team()
proposition=" "
while len(proposition)>0: #loop unitl user terminates
proposition=input("what proposition would you like to debate? hit enter to terminate. ")
if len(proposition)>0: #if not termonating
dm.do_debate(f'Debate the proposition {proposition}')
| 2,889 | Python | .py | 59 | 37.135593 | 123 | 0.625457 | tevslin/debate_team | 8 | 2 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,119 | ststreamer.py | tevslin_debate_team/ststreamer.py | # -*- coding: utf-8 -*-
"""
Created on Sun Feb 4 16:01:01 2024
@author: tevsl
"""
from io import StringIO
import streamlit as st
class ObservableStringIO(StringIO):
def __init__(self,*args, **kwargs):
super().__init__(*args, **kwargs)
self.change_callback = None
self.container=st.empty()
self.text=""
def write(self, s):
# Call the original write method
super().write(s)
# Notify about the change
self.text+=s
self.container.markdown(
body=self.text,
unsafe_allow_html=False,
) | 618 | Python | .py | 22 | 20.818182 | 42 | 0.573604 | tevslin/debate_team | 8 | 2 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,120 | streamlit_app.py | tevslin_debate_team/streamlit_app.py | # filename: debate_app.py
import streamlit as st
import debatemanager
import ststreamer
from contextlib import redirect_stdout
# Function to create and load the debate team
def create_debate_team(api_key):
dm = debatemanager.debate(api_key)
dm.load_team()
return dm
# Function to capture the console output
def capture_console_output(func, *args, **kwargs):
f = ststreamer.ObservableStringIO()
with redirect_stdout(f):
func(*args, **kwargs)
output = f.getvalue()
return output
# Initialize session state
if 'api_key' not in st.session_state:
st.session_state['api_key'] = None
if 'dm' not in st.session_state:
st.session_state['dm'] = None
# App title
st.title("AI Debate Team - Bot v Bot")
# Sidebar for API key input
if st.session_state['api_key'] is None:
with st.sidebar:
api_key = st.text_input("Enter your ChatGPT API key (Tier 1 or higher account):", type="password")
if api_key:
st.session_state['api_key'] = api_key
with st.sidebar:
st.markdown("[feature requests](https://github.com/tevslin/debate_team/discussions)", unsafe_allow_html=True)
st.markdown("[bug reports](https://github.com/tevslin/debate_team/issues)", unsafe_allow_html=True)
st.markdown("[source code](https://github.com/tevslin/debate_team)", unsafe_allow_html=True)
st.markdown("[more info](https://blog.tomevslin.com/2024/02/an-ai-debate.html)", unsafe_allow_html=True)
# If API key is provided, create and load the debate team
if st.session_state['api_key'] and st.session_state['dm'] is None:
with st.spinner("Creating debate team..."):
st.session_state['dm'] = create_debate_team(st.session_state['api_key'])
# Once the debate team is created, ask for a debate proposition
if st.session_state['dm']:
proposition = st.text_input("Enter a debate proposition:")
if proposition:
full_proposition = f"Debate the proposition that {proposition}"
with st.spinner(f"Debating the proposition: {proposition}. Please be patient."):
# Redirect console output and perform the debate
output = capture_console_output(st.session_state['dm'].do_debate, full_proposition)
| 2,261 | Python | .py | 47 | 42.319149 | 114 | 0.69863 | tevslin/debate_team | 8 | 2 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,121 | rengine-tool.py | glownd_rengine-tool/rengine-tool.py | import requests
from urllib3.exceptions import InsecureRequestWarning
import argparse
from getpass import getpass
#Import custom classes
from Classes.re_authorize import REAuthorize
from Classes.re_target import RETarget
from Classes.re_scan import REScan
from Classes.re_engine import REEngine
from Classes.re_organization import REOrganization
from Classes.re_project import REProject
from Classes.re_user import REUser
#Supress HTTPS warnings
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
parent_parser = argparse.ArgumentParser(add_help=False)
main_parser = argparse.ArgumentParser()
option_subparsers = main_parser.add_subparsers(title="options",
dest="options")
main_parser.add_argument("-oj", action="store_true",help="JSON output")
#Top Level Commands
auth_parser = option_subparsers.add_parser("authorize", help="",parents=[parent_parser])
target_parser = option_subparsers.add_parser("target", help="",parents=[parent_parser])
organization_parser = option_subparsers.add_parser("organization", help="",parents=[parent_parser])
project_parser = option_subparsers.add_parser("project", help="",parents=[parent_parser])
scan_parser = option_subparsers.add_parser("scan", help="",parents=[parent_parser])
engine_parser = option_subparsers.add_parser("engine", help="",parents=[parent_parser])
user_parser = option_subparsers.add_parser("user", help="",parents=[parent_parser])
#User Actions
user_action_subparser = user_parser.add_subparsers(title="user_action",dest="user_action_command")
user_add_parser = user_action_subparser.add_parser("add", help="Add User", parents=[parent_parser])
#Target Actions
target_action_subparser = target_parser.add_subparsers(title="target_action",dest="target_action_command")
target_add_parser = target_action_subparser.add_parser("add", help="Add target", parents=[parent_parser])
target_delete_parser = target_action_subparser.add_parser("delete", help="Remove target", parents=[parent_parser])
target_list_parser = target_action_subparser.add_parser("list", help="List targets", parents=[parent_parser])
target_listvulns_parser = target_action_subparser.add_parser("list-vulns", help="List target vulnerabilities", parents=[parent_parser])
target_listips_parser = target_action_subparser.add_parser("list-ips", help="List target IPs", parents=[parent_parser])
target_listtech_parser = target_action_subparser.add_parser("list-tech", help="List target technologies", parents=[parent_parser])
target_listports_parser = target_action_subparser.add_parser("list-ports", help="List target ports", parents=[parent_parser])
target_listeps_parser = target_action_subparser.add_parser("list-eps", help="List target endpoints", parents=[parent_parser])
#Organization Actions
organization_action_subparser = organization_parser.add_subparsers(title="organization_action",dest="organization_action_command")
organization_add_parser = organization_action_subparser.add_parser("add", help="Add organization", parents=[parent_parser])
organization_remove_parser = organization_action_subparser.add_parser("remove", help="Remove organization", parents=[parent_parser])
organization_list_parser = organization_action_subparser.add_parser("list", help="List organizations", parents=[parent_parser])
organization_targets_parser = organization_action_subparser.add_parser("list-targets", help="List organization targets", parents=[parent_parser])
#Project Actions
# project_action_subparser = project_parser.add_subparsers(title="project_action",dest="project_action_command")
# project_add_parser = project_action_subparser.add_parser("add", help="Add project", parents=[parent_parser])
# project_remove_parser = project_action_subparser.add_parser("remove", help="Remove project", parents=[parent_parser])
# project_list_parser = project_action_subparser.add_parser("list", help="List projects", parents=[parent_parser])
#Scan Actions
scan_action_subparser = scan_parser.add_subparsers(title="scan_action",dest="scan_action_command")
scan_list_parser = scan_action_subparser.add_parser("list", help="List scans", parents=[parent_parser])
scan_start_parser = scan_action_subparser.add_parser("start", help="Start scan", parents=[parent_parser])
scan_stop_parser = scan_action_subparser.add_parser("stop", help="Stop scan", parents=[parent_parser])
scan_delete_parser = scan_action_subparser.add_parser("delete", help="Delete scan", parents=[parent_parser])
scan_status_parser = scan_action_subparser.add_parser("status", help="Get the status of scans", parents=[parent_parser])
scan_listips_parser = scan_action_subparser.add_parser("list-ips", help="Get IP Addresses from scan", parents=[parent_parser])
scan_listeps_parser = scan_action_subparser.add_parser("list-eps", help="Get Endpoints from scan", parents=[parent_parser])
scan_listtech_parser = scan_action_subparser.add_parser("list-tech", help="List found technoligies in scan", parents=[parent_parser])
scan_listports_parser = scan_action_subparser.add_parser("list-ports", help="List found ports in scan", parents=[parent_parser])
scan_listvulns_parser = scan_action_subparser.add_parser("list-vulns", help="List scan vulnerabilities", parents=[parent_parser])
scan_listscanlogs_parser = scan_action_subparser.add_parser("list-scanlogs", help="List a scans logs", parents=[parent_parser])
#Engine Actions
engine_action_subparser = engine_parser.add_subparsers(title="engine_action",dest="engine_action_command")
engine_list_parser = engine_action_subparser.add_parser("list", help="List engines", parents=[parent_parser])
#Set up authorization parser
auth_parser.add_argument("-b", metavar="--base-url", action="store",help="URL (ie: https://localhost/)", default="https://localhost/")
auth_parser.add_argument("-u", metavar="--user", action="store",help="ReNgine Username")
auth_parser.add_argument("-p", metavar="--password", action="store",help="ReNgine Password")
auth_parser.add_argument("-d", action="store_true",help="Deletes your session. You should always do this once finished with the tool")
#Target Parsers Setup
##Setup Target Add Parser
target_add_parser.add_argument("-s", metavar="--slug", action="store",help="ReNgine Project Name / Slug", required=True)
target_add_parser.add_argument("-t", metavar="--target", action="store",help="Target", required=True)
target_add_parser.add_argument("-d", metavar="--desc", action="store",help="Target Description", default="")
target_add_parser.add_argument("-h1", metavar="--team", action="store",help="H1 Team Handle")
#Target
##list -- Nothing to do here
##remove
target_delete_parser.add_argument("-ti", metavar="--target-id", action="store",help="Target", required=True)
##listvulns
target_listvulns_parser.add_argument("-ti", metavar="--target-id", action="store",help="Target", required=True)
##listIPs
target_listips_parser.add_argument("-ti", metavar="--target-id", action="store",help="Target", required=True)
##listTech
target_listtech_parser.add_argument("-ti", metavar="--target-id", action="store",help="Target", required=True)
##listPorts
target_listports_parser.add_argument("-ti", metavar="--target-id", action="store",help="Target", required=True)
##listEndPoints
target_listeps_parser.add_argument("-ti", metavar="--target-id", action="store",help="Target", required=True)
target_listeps_parser.add_argument("-pn", metavar="--slug", action="store",help="Project name / slug", required=True)
#Scan
##delete
scan_delete_group = scan_delete_parser.add_mutually_exclusive_group(required=True)
scan_delete_group.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID")
scan_delete_group.add_argument("-ssi", metavar="--subscan-id", action="store",help="Sub-scan ID")
##list
scan_list_parser.add_argument("-pn", metavar="--slug", action="store",help="Project name / slug")
##start
scan_start_parser.add_argument("-pn", metavar="--slug", action="store",help="Project name / slug", required=True)
scan_start_parser.add_argument("-ti", metavar="--target-id", action="store",help="Target ID", required=True)
scan_start_parser.add_argument("-ei", metavar="--engine-id", action="store",help="Engine ID", required=True)
##stop
scan_stop_group = scan_stop_parser.add_mutually_exclusive_group(required=True)
scan_stop_group.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID")
scan_stop_group.add_argument("-ssi", metavar="--subscan-id", action="store",help="Sub-scan ID")
##status
scan_status_parser.add_argument("-pn", metavar="--slug", action="store",help="Project name / slug", required=True)
##listvulns
scan_listvulns_parser.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID", required=True)
##listIPs
scan_listips_parser.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID", required=True)
##listScanLogs
scan_listscanlogs_parser.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID", required=True)
scan_listscanlogs_parser.add_argument("-wo", action="store_true",help="Print command output", required=True)
##listTechnology
scan_listtech_parser.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID", required=True)
##ListPorts
scan_listports_parser.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID", required=True)
##listEndpoints
scan_listeps_parser.add_argument("-si", metavar="--scan-id", action="store",help="Scan ID", required=True)
#Organization
##list
organization_targets_parser.add_argument("-oi", metavar="--organization-id", action="store",help="Organization ID", required=True)
##add
organization_add_parser.add_argument("-on", metavar="--organization-name", action="store",help="Organization Name", required=True)
organization_add_parser.add_argument("-d", metavar="--organization-description", action="store",help="Organization Description", required=True)
organization_add_parser.add_argument("-pn", metavar="--slug", action="store",help="Project Name / Slug", required=True)
organization_add_parser.add_argument("-ti", metavar="--target-ids", action="store",help="Target IDs (seperate multiple with commas)", required=True)
##remove
organization_remove_parser.add_argument("-oi", metavar="--organization-id", action="store",help="Organization ID", required=True)
#User
user_add_parser.add_argument("-u", metavar="--username", action="store",help="New user's name")
user_add_parser.add_argument("-p", metavar="--password", action="store",help="New user's password")
user_add_parser.add_argument("-pn", metavar="--slug", action="store",help="ReNgine Project Name / Slug", required=True)
user_add_parser.add_argument("-r", metavar="--role", action="store",help="New user's role", required=True)
args = main_parser.parse_args()
s: requests.Session
#Authorize
if(args.options == 'authorize'):
REAuthorize(args)
else:
s = REAuthorize.getSession()
match args.options.lower():
case 'target':
RETarget(args, s)
case 'scan':
REScan(args, s)
case 'engine':
REEngine(args, s)
case 'organization':
REOrganization(args, s)
case 'project':
REProject(args, s)
case 'user':
REUser(args, s)
case default:
#Lets do nothing
pass | 11,169 | Python | .py | 161 | 67.515528 | 148 | 0.757962 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,122 | re_target.py | glownd_rengine-tool/Classes/re_target.py | import argparse
import requests
from urllib3.exceptions import InsecureRequestWarning
import json
from tabulate import tabulate
class RETarget():
def __init__(self, args:argparse.Namespace, s:requests.Session):
if(args.target_action_command):
match args.target_action_command.lower():
case 'add':
self.addTarget(args, s)
case 'list':
self.listTargets(args, s)
case 'list-vulns':
self.listVulns(args, s)
case 'list-ips':
self.listIPs(args, s)
case 'list-tech':
self.listTech(args, s)
case 'list-ports':
self.listPorts(args, s)
case 'list-eps':
self.listEndpoints(args, s)
case 'delete':
self.deleteTarget(args, s)
case default:
print("What are we doing?")
else:
print("No action given, use -h to view actions")
@staticmethod
def addTarget(args, s):
baseUrl = s.cookies['hostname']
addTargetUrl = baseUrl + 'api/add/target/'
csrf_token = s.cookies['csrftoken']
attr = {'description': args.d, 'domain_name': args.t, "slug": args.s,'h1_team_handle': args.h1, }
headers = {'Referer': addTargetUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.post(addTargetUrl, json=attr, headers=headers, verify=False)
#Show Success/Failure
if "Domain successfully added" in r.text:
print("Successfully added: " + args.t)
else:
print("Error adding: " + args.t)
@staticmethod
def listTargets(args, s):
baseUrl = s.cookies['hostname']
listTargetsUrl = baseUrl + 'api/listTargets/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listTargetsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.get(listTargetsUrl, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['results']:
id = i['id']
name = i['name']
org = i['organization']
mrs = i['most_recent_scan']
slug = str(i['project']['id']) + ' | ' + i['project']['slug']
ssd = i['start_scan_date']
data.append([id, name, slug, org, ssd, mrs])
print (tabulate(data, headers=["ID", "Name", "Slug", "Org", "Scan Started", "Recent Scan"]))
@staticmethod
def listVulns(args, s):
baseUrl = s.cookies['hostname']
listVulnsUrl = baseUrl + 'api/listVulnerability/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listVulnsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
attr = {'target_id': args.ti}
r = s.get(listVulnsUrl, json=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['results']:
id = i['id']
name = i['name']
severity = i['severity']
description = i['description']
cvss_score = i['cvss_score']
open_status = i['open_status']
subdomain_name = i['subdomain']['name']
#Loop through CVEs
cves = ""
for cve in i['cve_ids']:
cves = cves + cve['name'] + ', '
data.append([id, name, severity, cvss_score, open_status, description, subdomain_name, cves])
print (tabulate(data, headers=["ID", "Name", "Severity", "CVSS", "Open", "Description", "Subdomain", "CVEs"]))
@staticmethod
def listIPs(args, s):
baseUrl = s.cookies['hostname']
listIPsUrl = baseUrl + 'api/queryIps/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listIPsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
params = {'target_id': args.ti}
r = s.get(listIPsUrl, params=params, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['ips']:
id = i['id']
address = i['address']
is_cdn = i['is_cdn']
version = i['version']
is_private = i['is_private']
reverse_pointer = i['reverse_pointer']
geo_iso = i['geo_iso']
#TODO: Need to get a scan with ports & ip_subscan_ids to see the output
#TODO: Loop through ports / ip_subscan_idsS
data.append([id, address, is_cdn, version, is_private, reverse_pointer, geo_iso])
print (tabulate(data, headers=["ID", "Address", "IsCDN", "Version", "IsPrivate", "Reverse Pointer", "GeoISO"]))
@staticmethod
def listTech(args, s):
baseUrl = s.cookies['hostname']
listTechUrl = baseUrl + 'api/queryTechnologies/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listTechUrl,'Content-type': 'application/x-www-form-urlencoded', 'X-CSRFToken': csrf_token}
attr = {'target_id': args.ti}
r = s.get(listTechUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['technologies']:
name = i['name']
data.append([name])
print (tabulate(data, headers=["Name"]))
@staticmethod
def listEndpoints(args, s):
baseUrl = s.cookies['hostname']
listEndpointsUrl = baseUrl + 'api/listEndpoints/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listEndpointsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
attr = {'target_id': args.ti, 'project': args.pn}
r = s.get(listEndpointsUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['results']:
url = i['http_url']
title = i['page_title']
status = i['http_status']
webserver = i['webserver']
data.append([url, title, status, webserver])
print (tabulate(data, headers=["URL", "Title", "Status", "Webserver"]))
@staticmethod
def listPorts(args, s):
baseUrl = s.cookies['hostname']
listPortsUrl = baseUrl + 'api/queryPorts/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listPortsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
attr = {'target_id': args.ti}
r = s.get(listPortsUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['ports']:
number = i['number']
service = i['service_name']
description = i['description']
uncommon = i['is_uncommon']
data.append([number, service, description, uncommon])
print (tabulate(data, headers=["Port", "Service", "Desc", "Uncommon"]))
@staticmethod
def deleteTarget(args, s):
baseUrl = s.cookies['hostname']
deleteTargetUrl = baseUrl + 'target/delete/target/' + args.ti
csrf_token = s.cookies['csrftoken']
data = {"csrfmiddlewaretoken": csrf_token}
headers = {'Referer': deleteTargetUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.post(deleteTargetUrl, json=data, headers=headers, verify=False)
try:
j = r.json()
if(j["status"] == "true"):
print("SUCCESS")
else:
print(r.text)
except:
print("ERROR: " + r.text) | 8,796 | Python | .py | 200 | 31.94 | 123 | 0.542448 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,123 | re_project.py | glownd_rengine-tool/Classes/re_project.py | import argparse
import requests
from urllib3.exceptions import InsecureRequestWarning
import json
from tabulate import tabulate
class REProject():
def __init__(self, args:argparse.Namespace, s:requests.Session):
if(args.project_action_command):
match args.project_action_command.lower():
case 'list':
self.listProjects(args, s)
case default:
print("What are we doing?")
else:
print("No action given, use -h to view actions")
@staticmethod
def listProjects(args, s):
pass | 613 | Python | .py | 18 | 25.166667 | 68 | 0.630769 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,124 | re_authorize.py | glownd_rengine-tool/Classes/re_authorize.py | import argparse
import pathlib
import requests
from urllib3.exceptions import InsecureRequestWarning
from getpass import getpass
import pickle
class StrToBytes:
def __init__(self, fileobj):
self.fileobj = fileobj
def read(self, size):
return self.fileobj.read(size).encode()
def readline(self, size=-1):
return self.fileobj.readline(size).encode()
class REAuthorize():
def __init__(self, args:argparse.Namespace):
if args.d == True:
self.deleteSession()
else:
#Set username & password
username = self.getUsername(args.u)
password = self.getPassword(args.p)
#Set URLs
baseUrl = self.cleanBaseUrl(args.b)
#Start session and authorize
self.authorize(baseUrl, username, password)
@staticmethod
def authorize(baseUrl, username, password):
try:
loginUrl = baseUrl + 'login/'
s = requests.Session()
s.cookies.set("hostname", baseUrl, domain="local.local")
r1 = s.get(loginUrl, verify=False)
csrf_token = r1.cookies['csrftoken']
r2 = s.post(loginUrl, data=dict(username=username,password=password,csrfmiddlewaretoken=csrf_token,next='/'), headers=dict(Referer=loginUrl), verify=False)
#Lets make sure everything went okay, and save the session if it has
if('Invalid username or password.' in r2.text):
print('Invalid username or password!')
elif(r2.status_code == 200):
print("AUTHORIZED - Saving session into .rengineSession file")
#Save session
with open('.rengineSession', 'wb') as f:
pickle.dump(s, f)
print("SAVED")
else:
print('ERROR AUTHORIZING - Check your username/password and base URL. Status Code: ' + r2.status_code)
except Exception as error:
print('ERROR!')
print(error)
@staticmethod
def deleteSession():
pathlib.Path.unlink('.rengineSession')
print('Deleted session -- good on you for great security practices!')
@staticmethod
def getUsername(username):
if not username:
username = input("Enter username: ")
return username
@staticmethod
def getPassword(password):
if not password:
password = getpass("Enter password: ")
return password
@staticmethod
def cleanBaseUrl(url):
#Add a forward slash at the end of the base URL if it isn't there to save some users a headache
if url[-1] != "/":
url = url + "/"
return url
@staticmethod
def getSession() -> requests.session:
with open('.rengineSession', 'rb') as f:
session = pickle.load(f)
return session
| 2,947 | Python | .py | 73 | 29.945205 | 167 | 0.608881 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,125 | re_organization.py | glownd_rengine-tool/Classes/re_organization.py | import argparse
import requests
from urllib3.exceptions import InsecureRequestWarning
import json
from tabulate import tabulate
class REOrganization():
def __init__(self, args:argparse.Namespace, s:requests.Session):
if(args.organization_action_command):
match args.organization_action_command.lower():
case 'list':
self.listOrganizations(args, s)
case 'list-targets':
self.listOrganizationTargets(args, s)
case 'add':
self.addOrganization(args, s)
case 'remove':
self.removeOrganization(args, s)
case default:
print("What are we doing?")
else:
print("No action given, use -h to view actions")
@staticmethod
def listOrganizations(args, s):
baseUrl = s.cookies['hostname']
listOrganizationsUrl = baseUrl + 'api/listOrganizations/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listOrganizationsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.get(listOrganizationsUrl, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['organizations']:
id = i['id']
name = i['name']
description = i['description']
pid = i['project']
dids = i['domains']
data.append([id, name, description, pid, dids])
print (tabulate(data, headers=["ID", "Name", "Description", "Project ID", "Domain IDs"]))
@staticmethod
def listOrganizationTargets(args, s):
baseUrl = s.cookies['hostname']
listOrganizationsUrl = baseUrl + 'api/queryTargetsInOrganization/'
csrf_token = s.cookies['csrftoken']
params = {"organization_id": args.oi}
headers = {'Referer': listOrganizationsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.get(listOrganizationsUrl, params=params, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['organization']:
id = i['id']
name = i['name']
description = i['description']
pid = i['project']
dids = i['domains']
domains = []
for d in j['domains']:
domains.append(d['name'])
data.append([id, name, description, pid, dids, domains])
print (tabulate(data, headers=["ID", "Name", "Description", "Project ID", "Domain IDs", "Domains"]))
@staticmethod
def addOrganization(args, s):
baseUrl = s.cookies['hostname']
addOrganizationUrl = baseUrl + '/target/' + args.pn + '/add/organization'
csrf_token = s.cookies['csrftoken']
data = {"name": args.on, "description": args.d}
domains = []
if ',' in args.ti:
for args.ti in args.ti.split(','):
domains.append(int(args.ti))
data["domains"] = domains
else:
data["domains"] = args.ti
headers = {'Referer': addOrganizationUrl,'Content-type': 'application/x-www-form-urlencoded', 'X-CSRFToken': csrf_token}
r = s.post(addOrganizationUrl, data=data, headers=headers, verify=False)
if(r.status_code == 200):
print("Looks successful!")
else:
print("ERROR: " + r.status_code)
@staticmethod
def removeOrganization(args, s):
baseUrl = s.cookies['hostname']
removeOrganizationUrl = baseUrl + '/target/delete/organization/' + str(args.oi)
csrf_token = s.cookies['csrftoken']
headers = {'Referer': removeOrganizationUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.post(removeOrganizationUrl, headers=headers, verify=False)
if(r.status_code == 200):
print("Looks successful!")
else:
print("ERROR: " + r.status_code) | 4,413 | Python | .py | 99 | 32.919192 | 128 | 0.574218 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,126 | re_user.py | glownd_rengine-tool/Classes/re_user.py | import argparse
import requests
from urllib3.exceptions import InsecureRequestWarning
from getpass import getpass
from tabulate import tabulate
class REUser():
def __init__(self, args:argparse.Namespace, s:requests.Session):
if(args.user_action_command):
match args.user_action_command.lower():
case 'add':
self.addUser(args, s)
case default:
print("What are we doing?")
else:
print("No action given, use -h to view actions")
@staticmethod
def addUser(args, s):
#Set URLs
baseUrl = s.cookies['hostname']
addUserUrl = baseUrl + args.pn + '/admin_interface/update?mode=create'
username = REUser.getUsername(args.u)
password = REUser.getPassword(args.p)
#Add new rengine user
csrf_token = s.cookies['csrftoken']
data = {"username": username,"role": args.r, "password": password}
headers = {'Referer': addUserUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.post(addUserUrl, json=data, headers=headers, verify=False)
if(r.status_code == 200):
print("SUCCESS!")
else:
print('ERROR: ' + r.text)
@staticmethod
def getUsername(username):
if not username:
username = input("Enter username: ")
return username
@staticmethod
def getPassword(password):
if not password:
password = getpass("Enter password: ")
return password | 1,592 | Python | .py | 41 | 29.170732 | 103 | 0.6125 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,127 | re_scan.py | glownd_rengine-tool/Classes/re_scan.py | import argparse
import requests
from urllib3.exceptions import InsecureRequestWarning
import json
from tabulate import tabulate
import datetime
class REScan():
def __init__(self, args:argparse.Namespace, s:requests.Session):
if(args.scan_action_command):
match args.scan_action_command.lower():
case 'list':
self.listScans(args, s)
case 'status':
self.listScanStatus(args, s)
case 'list-vulns':
self.listVulns(args, s)
case 'list-ips':
self.listIPs(args, s)
case 'list-tech':
self.listTech(args, s)
case 'list-ports':
self.listPorts(args, s)
case 'list-eps':
self.listEndpoints(args, s)
case 'list-scanlogs':
self.listScanLogs(args, s)
case 'start':
self.startScan(args, s)
case 'stop':
self.stopScan(args, s)
case 'delete':
self.deleteScan(args,s)
case default:
print("What are we doing?")
else:
print("No action given, use -h to view actions")
@staticmethod
def listScans(args, s):
baseUrl = s.cookies['hostname']
listScansUrl = baseUrl + 'api/listScanHistory/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listScansUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.get(listScansUrl, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for scan in j:
id = scan['id']
progress = scan['current_progress']
start_date = '' if scan['start_scan_date'] is None else datetime.datetime.strptime(scan['start_scan_date'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d %H:%M:%S")
stop_date = '' if scan['stop_scan_date'] is None else datetime.datetime.strptime(scan['stop_scan_date'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d %H:%M:%S")
domain = scan['domain']['name']
scan_type = scan['scan_type']['engine_name']
data.append([id, domain, start_date, stop_date, progress, scan_type])
print (tabulate(data, headers=["ID", "Domain", "Start", "Stop", "Progress", "Type"]))
@staticmethod
def listScanStatus(args, s):
baseUrl = s.cookies['hostname']
listScanStatusUrl = baseUrl + 'api/scan_status/'
csrf_token = s.cookies['csrftoken']
params = {'project': args.pn}
headers = {'Referer': listScanStatusUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.get(listScanStatusUrl, params=params, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = REScan.formatScanStatus(j)
print (tabulate(data, headers=["Status", "ID", "Domain", "Start", "Stop", "Progress", "Type"]))
def formatScanStatus(j):
data = []
types = ['pending','scanning','completed']
for t in types:
for scan in j['scans'][t]:
status = t.upper()
id = scan['id']
progress = scan['current_progress']
start_date = '' if scan['start_scan_date'] is None else datetime.datetime.strptime(scan['start_scan_date'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d %H:%M:%S")
stop_date = '' if scan['stop_scan_date'] is None else datetime.datetime.strptime(scan['stop_scan_date'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d %H:%M:%S")
domain = scan['domain']['name']
scan_type = scan['scan_type']['engine_name']
data.append([status, id, domain, start_date, stop_date, progress, scan_type])
return data
@staticmethod
def listIPs(args, s):
baseUrl = s.cookies['hostname']
listIPsUrl = baseUrl + 'api/queryIps/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listIPsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
params = {'scan_id': args.si}
r = s.get(listIPsUrl, params=params, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['ips']:
id = i['id']
address = i['address']
is_cdn = i['is_cdn']
version = i['version']
is_private = i['is_private']
reverse_pointer = i['reverse_pointer']
geo_iso = i['geo_iso']
#TODO: Need to get a scan with ports & ip_subscan_ids to see the output
#TODO: Loop through ports / ip_subscan_idsS
data.append([id, address, is_cdn, version, is_private, reverse_pointer, geo_iso])
print (tabulate(data, headers=["ID", "Address", "IsCDN", "Version", "IsPrivate", "Reverse Pointer", "GeoISO"]))
@staticmethod
def listEndpoints(args, s):
baseUrl = s.cookies['hostname']
listEndpointsUrl = baseUrl + 'api/queryEndpoints/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listEndpointsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
attr = {'scan_id': args.si}
r = s.get(listEndpointsUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['endpoints']:
url = i['http_url']
title = i['page_title']
status = i['http_status']
webserver = i['webserver']
data.append([url, title, status, webserver])
print (tabulate(data, headers=["URL", "Title", "Status", "Webserver"]))
@staticmethod
def listTech(args, s):
baseUrl = s.cookies['hostname']
listTechUrl = baseUrl + 'api/queryTechnologies/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listTechUrl,'Content-type': 'application/x-www-form-urlencoded', 'X-CSRFToken': csrf_token}
attr = {'scan_id': args.si}
r = s.get(listTechUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['technologies']:
name = i['name']
data.append([name])
print (tabulate(data, headers=["Name"]))
@staticmethod
def listPorts(args, s):
baseUrl = s.cookies['hostname']
listPortsUrl = baseUrl + 'api/queryPorts/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listPortsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
attr = {'scan_id': args.si}
r = s.get(listPortsUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['ports']:
number = i['number']
service = i['service_name']
description = i['description']
uncommon = i['is_uncommon']
data.append([number, service, description, uncommon])
print (tabulate(data, headers=["Port", "Service", "Desc", "Uncommon"]))
@staticmethod
def listVulns(args, s):
baseUrl = s.cookies['hostname']
listVulnsUrl = baseUrl + 'api/listVulnerability/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listVulnsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
attr = {'scan_history': args.si}
r = s.get(listVulnsUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['results']:
id = i['id']
name = i['name']
severity = i['severity']
description = i['description']
cvss_score = i['cvss_score']
open_status = i['open_status']
subdomain_name = i['subdomain']['name']
#Loop through CVEs
cves = ""
for cve in i['cve_ids']:
cves = cves + cve['name'] + ', '
data.append([id, name, severity, cvss_score, open_status, description, subdomain_name, cves])
print (tabulate(data, headers=["ID", "Name", "Severity", "CVSS", "Open", "Description", "Subdomain", "CVEs"]))
@staticmethod
def listScanLogs(args, s):
baseUrl = s.cookies['hostname']
listScanLogsUrl = baseUrl + 'api/listScanLogs/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listScanLogsUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
attr = {'scan_id': args.si}
r = s.get(listScanLogsUrl, params=attr, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['results']:
time = '' if i['time'] is None else datetime.datetime.strptime(i['time'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d %H:%M:%S")
command = i['command']
return_code = i['return_code']
output = '' if i['output'] is None else i['output']
if(args.wo):
print('Time:\t' + time + '\nCommand:\t' + command + '\nReturn Code:\t' + str(return_code) + '\nOutput:\t' + output + '\n')
else:
print('Time:\t' + time + '\nCommand:\t' + command + '\nReturn Code:\t' + str(return_code) + '\n')
#TODO: This should eventually be modified to accept more options
@staticmethod
def startScan(args, s):
#Set URLs
baseUrl = s.cookies['hostname']
startScanUrl = baseUrl + 'scan/' + args.pn + '/start/' + args.ti
#Start scan on target
csrf_token = s.cookies['csrftoken']
data = '?csrfmiddlewaretoken=' + csrf_token + '&scan_mode=' + args.ei + '&importSubdomainTextArea=&outOfScopeSubdomainTextarea=&filterPath='
headers = {'Referer': startScanUrl,'Content-type': 'application/x-www-form-urlencoded', 'X-CSRFToken': csrf_token}
r = s.post(startScanUrl, data=data, headers=headers, verify=False)
if("Scan history" in r.text):
print("SUCCESS")
else:
print("FAILURE: Something went wrong!")
@staticmethod
def stopScan(args, s):
baseUrl = s.cookies['hostname']
stopScanUrl = baseUrl + 'api/action/stop/scan/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': stopScanUrl,'Content-type': 'application/json', 'X-Csrftoken': csrf_token}
if(args.si):
data = {"scan_id": args.si}
elif(args.ssi):
data = {"subscan_id": args.ssi}
else:
"ERROR: No scan/sub-scan ID provided."
r = s.post(stopScanUrl, json=data, headers=headers, verify=False)
j = r.json()
if(j["status"] == True):
print("SUCCESS!")
else:
print("ERROR: " + j["message"])
@staticmethod
def deleteScan(args, s):
if(args.ssi):
REScan.deleteSubScan(args, s)
else:
#Set URLs
baseUrl = s.cookies['hostname']
deleteScanUrl = baseUrl + 'scan/delete/scan/' + args.si
#Start scan on target
csrf_token = s.cookies['csrftoken']
data = {"csrfmiddlewaretoken": csrf_token}
headers = {'Referer': deleteScanUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.post(deleteScanUrl, json=data, headers=headers, verify=False)
if(r.status_code == 200):
print("SUCCESS!")
else:
print('ERROR: ' + r.text)
@staticmethod
def deleteSubScan(args, s):
#Set URLs
baseUrl = s.cookies['hostname']
deleteSubScanUrl = baseUrl + 'api/action/rows/delete/'
#Start scan on target
csrf_token = s.cookies['csrftoken']
data = {"type":"subscan","rows":[args.ssi]}
headers = {'Referer': deleteSubScanUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.post(deleteSubScanUrl, json=data, headers=headers, verify=False)
j = r.json()
if(j["status"] == True):
print("SUCCESS!")
else:
print("ERROR: " + r.text) | 13,772 | Python | .py | 297 | 34.313131 | 178 | 0.548392 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,128 | re_engine.py | glownd_rengine-tool/Classes/re_engine.py | import argparse
import requests
from urllib3.exceptions import InsecureRequestWarning
import json
from tabulate import tabulate
class REEngine():
def __init__(self, args:argparse.Namespace, s:requests.Session):
if(args.engine_action_command):
match args.engine_action_command.lower():
case 'list':
self.listEngines(args, s)
case default:
print("What are we doing?")
else:
print("No action given, use -h to view actions")
@staticmethod
def listEngines(args, s):
baseUrl = s.cookies['hostname']
listEnginesUrl = baseUrl + 'api/listEngines/'
csrf_token = s.cookies['csrftoken']
headers = {'Referer': listEnginesUrl,'Content-type': 'application/json', 'X-CSRFToken': csrf_token}
r = s.get(listEnginesUrl, headers=headers, verify=False)
j = r.json()
#If JSON output
if(args.oj):
print(json.dumps(j, indent=2))
#Lets do some formating for non-json output
else:
data = []
for i in j['engines']:
id = i['id']
name = i['engine_name']
tasks = i['tasks']
data.append([id, name, tasks])
print (tabulate(data, headers=["ID", "Name", "Tasks"]))
| 1,366 | Python | .py | 35 | 28.457143 | 107 | 0.573933 | glownd/rengine-tool | 8 | 1 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,129 | sensor.py | markgdev_home-assistant_renogy/custom_components/renogy/sensor.py | """Platform for sensor integration."""
from __future__ import annotations
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo, CONNECTION_BLUETOOTH
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from random import randint
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
import json
from .const import DOMAIN
# address = "80:6F:B0:0F:BD:C1"
SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
"remainingCapacity": SensorEntityDescription(
key="remainingCapacity",
native_unit_of_measurement="Ah",
name="Remaining Capacity",
state_class=SensorStateClass.MEASUREMENT,
),
"totalCapacity": SensorEntityDescription(
key="totalCapacity",
native_unit_of_measurement="Ah",
name="Total Capacity",
state_class=SensorStateClass.MEASUREMENT,
),
"current": SensorEntityDescription(
key="current",
native_unit_of_measurement="A",
name="Current",
state_class=SensorStateClass.MEASUREMENT,
),
"voltage": SensorEntityDescription(
key="voltage",
native_unit_of_measurement="v",
name="Voltage",
state_class=SensorStateClass.MEASUREMENT,
),
"dischargeCurentLimit": SensorEntityDescription(
key="dischargeCurentLimit",
native_unit_of_measurement="A",
name="Discharge Curent Limit",
state_class=SensorStateClass.MEASUREMENT,
),
"chargeCurentLimit": SensorEntityDescription(
key="chargeCurentLimit",
native_unit_of_measurement="A",
name="Charge Curent Limit",
state_class=SensorStateClass.MEASUREMENT,
),
"alternatorVoltage": SensorEntityDescription(
key="alternatorVoltage",
native_unit_of_measurement="v",
name="Alternator Voltage",
state_class=SensorStateClass.MEASUREMENT,
),
"alternatorCurrent": SensorEntityDescription(
key="alternatorCurrent",
native_unit_of_measurement="A",
name="Alternator Current",
state_class=SensorStateClass.MEASUREMENT,
),
"alternatorPower": SensorEntityDescription(
key="alternatorPower",
native_unit_of_measurement="w",
name="Alternator Power",
state_class=SensorStateClass.MEASUREMENT,
),
"solarVoltage": SensorEntityDescription(
key="solarVoltage",
native_unit_of_measurement="v",
name="Solar Voltage",
state_class=SensorStateClass.MEASUREMENT,
),
"solarCurrent": SensorEntityDescription(
key="solarCurrent",
native_unit_of_measurement="A",
name="Solar Current",
state_class=SensorStateClass.MEASUREMENT,
),
"solarPower": SensorEntityDescription(
key="solarPower",
native_unit_of_measurement="w",
name="Solar Power",
state_class=SensorStateClass.MEASUREMENT,
),
"cell1Voltage": SensorEntityDescription(
key="cell1Voltage",
native_unit_of_measurement="v",
name="Cell 1 Voltage",
state_class=SensorStateClass.MEASUREMENT,
),
"cell2Voltage": SensorEntityDescription(
key="cell2Voltage",
native_unit_of_measurement="v",
name="Cell 2 Voltage",
state_class=SensorStateClass.MEASUREMENT,
),
"cell3Voltage": SensorEntityDescription(
key="cell3Voltage",
native_unit_of_measurement="v",
name="Cell 3 Voltage",
state_class=SensorStateClass.MEASUREMENT,
),
"cell4Voltage": SensorEntityDescription(
key="cell4Voltage",
native_unit_of_measurement="v",
name="Cell 4 Voltage",
state_class=SensorStateClass.MEASUREMENT,
),
"cell1Temperature": SensorEntityDescription(
key="cell1Temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
name="Cell 1 Temperature",
state_class=SensorStateClass.MEASUREMENT,
),
"cell2Temperature": SensorEntityDescription(
key="cell2Temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
name="Cell 2 Temperature",
state_class=SensorStateClass.MEASUREMENT,
),
"cell3Temperature": SensorEntityDescription(
key="cell3Temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
name="Cell 3 Temperature",
state_class=SensorStateClass.MEASUREMENT,
),
"cell4Temperature": SensorEntityDescription(
key="cell4Temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
name="Cell 4 Temperature",
state_class=SensorStateClass.MEASUREMENT,
),
}
async def async_setup_entry(hass, config_entry, async_add_entities) -> None:
"""Set up the sensor platform."""
address = config_entry.data.get("mac")
friendlyName = config_entry.data.get("friendlyName")
print(f"In sensor: {address}")
coordinator: DataUpdateCoordinator[str] = hass.data[DOMAIN][address]
entities = []
# print(SENSORS_MAPPING_TEMPLATE)
for device, values in coordinator.data.items():
for sensor in values:
# print(SENSORS_MAPPING_TEMPLATE.get(sensor))
entities.append(
RenogySensor(
coordinator,
coordinator,
values.get("address"),
SENSORS_MAPPING_TEMPLATE.get(sensor),
sensor,
friendlyName,
)
)
async_add_entities(entities)
# async_add_entities(
# [
# BatteryRemainingCapacity(coordinator, coordinator.data, 48)
# # BatteryRemainingCapacity(coordinator, coordinator.data, 49),
# ]
# )
# config_entry.async_on_unload(
# # only start after all platforms have had a chance to subscribe
# coordinator.async_start()
# )
class RenogySensor(CoordinatorEntity[DataUpdateCoordinator[str]], SensorEntity):
"""Renogy Sensor"""
def __init__(
self,
coordinator: DataUpdateCoordinator,
renogyList: dict,
deviceAddress,
entity_description,
sensorName: str,
friendlyName: str,
) -> None:
"""init."""
super().__init__(coordinator)
self._deviceaddress = deviceAddress
self._sensorname = sensorName
self._friendlyName = friendlyName
self._attr_name = self._sensorname
# f"Renogy {self._deviceaddress} {sensorName}"
self._attr_unique_id = f"{self._deviceaddress}_{sensorName}"
# print(sensorName)
if entity_description is not None:
# print(f"entity description for {sensorName}: {entity_description}")
self.entity_description = entity_description
else:
print(f"No entity description for {sensorName}")
# self._attr_device_info = DeviceInfo(
# connections={
# (
# CONNECTION_BLUETOOTH,
# self._deviceaddress,
# )
# },
# name=f"Renogy Name {self._deviceaddress}",
# manufacturer="Renogy",
# model="Renogy model",
# )
_deviceaddress = ""
_sensorname = ""
_attr_has_entity_name = True
# _attr_name = "Battery Remaining Capacity"
# _attr_native_unit_of_measurement = "Ah"
# _attr_suggested_display_precision = 2
# _attr_device_class = SensorDeviceClass.ENERGY_STORAGE
# _attr_state_class = SensorStateClass.MEASUREMENT
@property
def state(self) -> None:
"""Return the state of the sensor."""
# print(self._deviceaddress, self._sensorname)
# print(self.coordinator.data.get(self._deviceaddress).get(self._sensorname))
return self.coordinator.data.get(self._deviceaddress).get(self._sensorname)
@property
def device_info(self) -> DeviceInfo:
"""Return the device info."""
return DeviceInfo(
identifiers={
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self._deviceaddress)
},
connections={
(
CONNECTION_BLUETOOTH,
self._deviceaddress,
)
},
name=f"Renogy {self._friendlyName} {self._deviceaddress}",
# name=f"Renogy Name {self._deviceaddress}",
manufacturer="Renogy",
model="Renogy model",
# manufacturer=self.light.manufacturername,
# model=self.light.productname,
# sw_version=self.light.swversion,
# via_device=(hue.DOMAIN, self.api.bridgeid),
)
| 9,088 | Python | .py | 248 | 28.620968 | 85 | 0.645479 | markgdev/home-assistant_renogy | 8 | 0 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,130 | config_flow.py | markgdev_home-assistant_renogy/custom_components/renogy/config_flow.py | """Config flow for renogy integration."""
from __future__ import annotations
import logging
from typing import Any
import voluptuous as vol
from homeassistant import config_entries
# from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
# TODO adjust the data schema to the data that you need
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required("friendlyName"): str,
vol.Required("mac"): str,
vol.Required("batteries"): str,
vol.Required("controllers"): str,
# vol.Required(CONF_CONTROLLERS): str,
}
)
class PlaceholderHub:
"""Placeholder class to make tests pass.
TODO Remove this placeholder class and replace with things from your PyPI package.
"""
def __init__(self, mac: str, batteries: list) -> None:
"""Initialize."""
self.mac = mac
self.batteries = batteries
print(f"mac in init: {self.mac}, {batteries}")
async def authenticate(self, username: str, password: str) -> bool:
"""Test if we can authenticate with the host."""
return True
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
"""Validate the user input allows us to connect.
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
"""
# TODO validate the data can be used to set up a connection.
# If your PyPI package is not built with async, pass your methods
# to the executor:
# await hass.async_add_executor_job(
# your_validate_func, data[CONF_USERNAME], data[CONF_PASSWORD]
# )
batteryIds = data.get("batteries")
if "," in batteryIds:
batteryIds = [int(i) for i in batteryIds.split(",")]
controllerIds = data.get("controllers")
if "," in batteryIds:
batteryIds = [int(i) for i in controllerIds.split(",")]
mac = data["mac"]
friendlyName = data["friendlyName"]
hub = PlaceholderHub(mac, batteryIds)
# print(f"Got mac {mac}, {batteryIds}")
# if not await hub.authenticate(data[CONF_USERNAME], data[CONF_PASSWORD]):
# raise InvalidAuth
# If you cannot connect:
# throw CannotConnect
# If the authentication is wrong:
# InvalidAuth
# Return info that you want to store in the config entry.
return {"title": friendlyName}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for renogy."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
| 3,678 | Python | .py | 89 | 34.685393 | 86 | 0.664513 | markgdev/home-assistant_renogy | 8 | 0 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,131 | const.py | markgdev_home-assistant_renogy/custom_components/renogy/const.py | """Constants for the renogy integration."""
DOMAIN = "renogy"
DEFAULT_SCAN_INTERVAL = 30
# CONF_MAC = None
# CONF_BATTERIES = None
# CONF_CONTROLLERS = None
| 160 | Python | .py | 6 | 25.166667 | 43 | 0.741722 | markgdev/home-assistant_renogy | 8 | 0 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,132 | __init__.py | markgdev_home-assistant_renogy/custom_components/renogy/__init__.py | """The renogy integration."""
from __future__ import annotations
from bleak import BleakClient, BleakError
from bleak.backends.device import BLEDevice
from bleak_retry_connector import (
close_stale_connections_by_address,
establish_connection,
)
from homeassistant.components import bluetooth
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
import logging
from datetime import timedelta
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
from .renogy.device import getStats
# TODO List the platforms that you want to support.
# For your initial PR, limit it to 1 platform.
PLATFORMS: list[Platform] = [Platform.SENSOR]
_LOGGER = logging.getLogger(__name__)
# address = "80:6F:B0:0F:BD:C1"
address = None
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up renogy from a config entry."""
global address
hass.data.setdefault(DOMAIN, {})
print("starting up renogy")
# print(entry.data)
address = entry.data.get("mac")
# print(address)
batteryListStr = entry.data.get("batteries")
batteryList = []
if "," in batteryListStr:
batteryList = [int(i) for i in batteryListStr.split(",")]
else:
batteryList.append(int(batteryListStr))
controllerListStr = entry.data.get("controllers")
controllerList = []
if "," in controllerListStr:
controllerList = [int(i) for i in controllerListStr.split(",")]
else:
controllerList.append(int(controllerListStr))
assert address is not None
await close_stale_connections_by_address(address)
# print("closed stale connections")
ble_device = bluetooth.async_ble_device_from_address(hass, address)
# print("got a device")
if not ble_device:
# print("Couldn't connect to device, hopefully it's just gone out for the day...")
# return False
raise ConfigEntryNotReady(
f"Could not find Renogy device with address {address}"
)
# return None
async def _async_update_method() -> dict:
# else:
client = await establish_connection(BleakClient, ble_device, ble_device.address)
return await getStats(
client=client, batteryList=batteryList, controllerList=controllerList
)
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=DOMAIN,
update_method=_async_update_method,
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
)
await coordinator.async_config_entry_first_refresh()
hass.data[DOMAIN][address] = coordinator
# TODO 1. Create API instance
# TODO 2. Validate the API connection (and authentication)
# TODO 3. Store an API object for your platforms to access
# hass.data[DOMAIN][entry.entry_id] = MyApi(...)
# hass.states.set(f"{DOMAIN}.world", "Mark")
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
print(f"Unloading renogy {address}")
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(address)
return unload_ok
| 3,470 | Python | .py | 84 | 36.071429 | 90 | 0.723081 | markgdev/home-assistant_renogy | 8 | 0 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,133 | device.py | markgdev_home-assistant_renogy/custom_components/renogy/renogy/device.py | import asyncio
from bleak import BleakClient, BleakError, BleakGATTCharacteristic
from bleak.backends.device import BLEDevice
from bleak_retry_connector import establish_connection
import time
from .Utils import bytes_to_int, int_to_bytes, crc16_modbus
import binascii
WRITE_SERVICE_UUID = "0000ffd1-0000-1000-8000-00805f9b34fb"
NOTIFY_SERVICE_UUID = "0000fff1-0000-1000-8000-00805f9b34fb"
waiting = False
waitStart = 0
waitTimeout = 10
gotReturnVal = False
returnVal = None
batteryRegisterInfo = {
"cell1Voltage": {
"description": "Cell 1 voltage",
"register": 5001,
"wordSize": 1,
"multiplier": 0.1,
},
"cell2Voltage": {
"description": "Cell 2 voltage",
"register": 5002,
"wordSize": 1,
"multiplier": 0.1,
},
"cell3Voltage": {
"description": "Cell 3 voltage",
"register": 5003,
"wordSize": 1,
"multiplier": 0.1,
},
"cell4Voltage": {
"description": "Cell 4 voltage",
"register": 5004,
"wordSize": 1,
"multiplier": 0.1,
},
"cell1Temperature": {
"description": "Cell 1 Temperature",
"register": 5018,
"wordSize": 1,
"multiplier": 0.1,
},
"cell2Temperature": {
"description": "Cell 2 Temperature",
"register": 5019,
"wordSize": 1,
"multiplier": 0.1,
},
"cell3Temperature": {
"description": "Cell 3 Temperature",
"register": 5020,
"wordSize": 1,
"multiplier": 0.1,
},
"cell4Temperature": {
"description": "Cell 4 Temperature",
"register": 5021,
"wordSize": 1,
"multiplier": 0.1,
},
"remainingCapacity": {
"description": "Remain capacity",
"register": 5044,
"wordSize": 2,
"multiplier": 0.001,
},
"totalCapacity": {
"description": "Total capacity",
"register": 5046,
"wordSize": 2,
"multiplier": 0.001,
},
"current": {
"description": "Current",
"register": 5042,
"wordSize": 1,
"multiplier": 0.01,
},
"voltage": {
"description": "Voltage",
"register": 5043,
"wordSize": 1,
"multiplier": 0.1,
},
"cycleCount": {
"description": "Cycle count",
"register": 5048,
"wordSize": 1,
"multiplier": 1,
},
"dischargeCurentLimit": {
"description": "Discharge Current Limit",
"register": 5052,
"wordSize": 1,
"multiplier": 0.01,
},
"chargeCurentLimit": {
"description": "Charge Current Limit",
"register": 5051,
"wordSize": 1,
"multiplier": 0.01,
},
}
# batteryList = [48, 49]
# controllerList = [97]
controllerRegisterInfo = {
"alternatorVoltage": {
"description": "Alternator Voltage",
"register": 0x104,
"wordSize": 1,
"multiplier": 0.1,
},
"alternatorCurrent": {
"description": "Alternator Current",
"register": 0x105,
"wordSize": 1,
"multiplier": 0.1,
},
"alternatorPower": {
"description": "Alternator Power",
"register": 0x106,
"wordSize": 1,
"multiplier": 1,
},
"solarVoltage": {
"description": "Solar Voltage",
"register": 0x107,
"wordSize": 1,
"multiplier": 0.1,
},
"solarCurrent": {
"description": "Solar Current",
"register": 0x108,
"wordSize": 1,
"multiplier": 0.1,
},
"solarPower": {
"description": "Solar Power",
"register": 0x109,
"wordSize": 1,
"multiplier": 1,
},
}
def notification_handler(sender: BleakGATTCharacteristic, data: bytearray):
global returnVal
global gotReturnVal
# print("Wooo got something")
id = data[0]
mode = data[1]
length = data[2]
start = 3
end = 3 + length
val = int.from_bytes(data[start:end], byteorder="big", signed=True)
# print(data[3:7].hex())
# print(id, mode, length, val/1000)
# print(data.hex())
# print(f"{sender}: {data}")
returnVal = val
# print(returnVal)
gotReturnVal = True
# exit(0)
async def getStats(
client: BleakClient, batteryList: list, controllerList: list
) -> dict:
# print("In device.py!!", client)
# print(f"Connected: {client.is_connected}")
# print(batteryList)
def create_generic_read_request(device_id, function, regAddr, readWrd):
data = None
if regAddr != None and readWrd != None:
data = []
data.append(device_id)
data.append(function)
data.append(int_to_bytes(regAddr, 0))
data.append(int_to_bytes(regAddr, 1))
data.append(int_to_bytes(readWrd, 0))
data.append(int_to_bytes(readWrd, 1))
crc = crc16_modbus(bytes(data))
data.append(crc[0])
data.append(crc[1])
# logging.debug("{} {} => {}".format("create_request_payload", regAddr, data))
return data
async def get_modbus_value(device_id, regAddr, wordLen, multiplier):
global returnVal
global gotReturnVal
writeData = bytes(create_generic_read_request(device_id, 3, regAddr, wordLen))
waitStart = time.time()
gotReturnVal = False
# print(f"About to send: {writeData.hex()}")
await client.write_gatt_char(WRITE_SERVICE_UUID, writeData, response=True)
# while (waitStart < time.time() - waitTimeout) and gotReturnVal == False:
while gotReturnVal == False:
# print(waitStart, time.time() - waitTimeout, time.time(), waitTimeout, returnVal, gotReturnVal)
# print("Waiting")
await asyncio.sleep(0.01)
# time.sleep(1)
# print(f"Got: {returnVal} for dev: {device_id}, reg: {regAddr}")
return "%.3f" % (returnVal * multiplier)
await client.start_notify(NOTIFY_SERVICE_UUID, notification_handler)
# print("To send:", bytes(writeData).hex())
MODEL_NBR_UUID = "00002a24-0000-1000-8000-00805f9b34fb"
model_number = await client.read_gatt_char(MODEL_NBR_UUID)
# print("Model Number: {0}".format("".join(map(chr, model_number))))
READ_UUID = "0000ffd4-0000-1000-8000-00805f9b34fb"
# READ_UUID = NOTIFY_SERVICE_UUID
# ba = await client.read_gatt_char(READ_UUID)
# print(await get_modbus_value(48, 5044, 2))
# print(await get_modbus_value(49, 5044, 2))
retList = {}
for battery in batteryList:
batteryDict = {"address": battery, "type": "battery"}
# print(f"Battery: {battery}")
for k, v in batteryRegisterInfo.items():
# print(k, v.get("register"))
modbusValue = await get_modbus_value(
battery,
v.get("register"),
v.get("wordSize"),
v.get("multiplier", 1),
)
# print(f"{k}: {modbusValue}")
batteryDict[k] = modbusValue
retList[battery] = batteryDict
for controller in controllerList:
controllerDict = {"address": controller, "type": "controller"}
# print(f"Controller: {controller}")
for k, v in controllerRegisterInfo.items():
# print(k, v.get("register"))
modbusValue = await get_modbus_value(
controller, v.get("register"), v.get("wordSize"), v.get("multiplier", 1)
)
# print(f"{k}: {modbusValue}")
controllerDict[k] = modbusValue
retList[controller] = controllerDict
# print(retList)
await client.disconnect()
return retList
| 7,694 | Python | .py | 240 | 24.5375 | 108 | 0.581283 | markgdev/home-assistant_renogy | 8 | 0 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,134 | Utils.py | markgdev_home-assistant_renogy/custom_components/renogy/renogy/Utils.py | # Reads data from a list of bytes, and converts to an int
def bytes_to_int(bs, offset, length, signed=False, scale=1):
ret = 0
if len(bs) < (offset + length):
return ret
if length > 0:
byteorder = "big"
start = offset
end = offset + length
else:
byteorder = "little"
start = offset + length + 1
end = offset + 1
return round(
int.from_bytes(bs[start:end], byteorder=byteorder, signed=signed) * scale, 2
)
# Converts an integer into 2 bytes (16 bits)
# Returns either the first or second byte as an int
def int_to_bytes(i, pos=0):
if pos == 0:
return int(format(i, "016b")[:8], 2)
if pos == 1:
return int(format(i, "016b")[8:], 2)
return 0
def parse_temperature(raw_value, unit):
sign = raw_value >> 7
celcius = -(raw_value - 128) if sign == 1 else raw_value
return format_temperature(celcius, unit)
def format_temperature(celcius, unit="F"):
return (celcius * 9 / 5) + 32 if unit.strip() == "F" else celcius
def filter_fields(data, fields_str):
fields = (
[x.strip() for x in fields_str.split(",")] if len(fields_str) > 0 else []
) # trim spaces
if len(fields) > 0 and set(fields).issubset(data):
return {key: data[key] for key in fields}
return data
CRC16_LOW_BYTES = (
0x00,
0xC0,
0xC1,
0x01,
0xC3,
0x03,
0x02,
0xC2,
0xC6,
0x06,
0x07,
0xC7,
0x05,
0xC5,
0xC4,
0x04,
0xCC,
0x0C,
0x0D,
0xCD,
0x0F,
0xCF,
0xCE,
0x0E,
0x0A,
0xCA,
0xCB,
0x0B,
0xC9,
0x09,
0x08,
0xC8,
0xD8,
0x18,
0x19,
0xD9,
0x1B,
0xDB,
0xDA,
0x1A,
0x1E,
0xDE,
0xDF,
0x1F,
0xDD,
0x1D,
0x1C,
0xDC,
0x14,
0xD4,
0xD5,
0x15,
0xD7,
0x17,
0x16,
0xD6,
0xD2,
0x12,
0x13,
0xD3,
0x11,
0xD1,
0xD0,
0x10,
0xF0,
0x30,
0x31,
0xF1,
0x33,
0xF3,
0xF2,
0x32,
0x36,
0xF6,
0xF7,
0x37,
0xF5,
0x35,
0x34,
0xF4,
0x3C,
0xFC,
0xFD,
0x3D,
0xFF,
0x3F,
0x3E,
0xFE,
0xFA,
0x3A,
0x3B,
0xFB,
0x39,
0xF9,
0xF8,
0x38,
0x28,
0xE8,
0xE9,
0x29,
0xEB,
0x2B,
0x2A,
0xEA,
0xEE,
0x2E,
0x2F,
0xEF,
0x2D,
0xED,
0xEC,
0x2C,
0xE4,
0x24,
0x25,
0xE5,
0x27,
0xE7,
0xE6,
0x26,
0x22,
0xE2,
0xE3,
0x23,
0xE1,
0x21,
0x20,
0xE0,
0xA0,
0x60,
0x61,
0xA1,
0x63,
0xA3,
0xA2,
0x62,
0x66,
0xA6,
0xA7,
0x67,
0xA5,
0x65,
0x64,
0xA4,
0x6C,
0xAC,
0xAD,
0x6D,
0xAF,
0x6F,
0x6E,
0xAE,
0xAA,
0x6A,
0x6B,
0xAB,
0x69,
0xA9,
0xA8,
0x68,
0x78,
0xB8,
0xB9,
0x79,
0xBB,
0x7B,
0x7A,
0xBA,
0xBE,
0x7E,
0x7F,
0xBF,
0x7D,
0xBD,
0xBC,
0x7C,
0xB4,
0x74,
0x75,
0xB5,
0x77,
0xB7,
0xB6,
0x76,
0x72,
0xB2,
0xB3,
0x73,
0xB1,
0x71,
0x70,
0xB0,
0x50,
0x90,
0x91,
0x51,
0x93,
0x53,
0x52,
0x92,
0x96,
0x56,
0x57,
0x97,
0x55,
0x95,
0x94,
0x54,
0x9C,
0x5C,
0x5D,
0x9D,
0x5F,
0x9F,
0x9E,
0x5E,
0x5A,
0x9A,
0x9B,
0x5B,
0x99,
0x59,
0x58,
0x98,
0x88,
0x48,
0x49,
0x89,
0x4B,
0x8B,
0x8A,
0x4A,
0x4E,
0x8E,
0x8F,
0x4F,
0x8D,
0x4D,
0x4C,
0x8C,
0x44,
0x84,
0x85,
0x45,
0x87,
0x47,
0x46,
0x86,
0x82,
0x42,
0x43,
0x83,
0x41,
0x81,
0x80,
0x40,
)
CRC16_HIGH_BYTES = (
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
0x01,
0xC0,
0x80,
0x41,
0x01,
0xC0,
0x80,
0x41,
0x00,
0xC1,
0x81,
0x40,
)
# Calculate CRC-16 for Modbus
def crc16_modbus(data: bytes):
crc_high = 0xFF
crc_low = 0xFF
for byte in data:
index = crc_high ^ int(byte)
crc_high = crc_low ^ CRC16_HIGH_BYTES[index]
crc_low = CRC16_LOW_BYTES[index]
return bytes([crc_high, crc_low])
| 6,787 | Python | .py | 562 | 7.042705 | 84 | 0.489855 | markgdev/home-assistant_renogy | 8 | 0 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,135 | ChatGPT.py | Adexxxx_ChatGpt-telegram-bot/with context/ChatGPT.py | from openai import OpenAI
import sqlite3
import re
import requests
TOKEN = 'YOUR_BOT_API_KEY' # Вставляем свои токены от бота и от чат гпт
client = OpenAI(api_key='YOUR_OPENAI_API_KEY')
def gpt(text: str, id: int, m_id: int):
try:
url = f"https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id={id}&text=Генерируется ответ♻️&reply_to_message_id={m_id}" # Даём юзеру понять, что приняли его сообщение
data = requests.get(url).json()
rl = f"https://api.telegram.org/bot{TOKEN}/sendChatAction?chat_id={id}&action=typing" # Для визуала делаем действие боту, типо он печатает, как реальный человек
ata = requests.get(rl).json()
connect = sqlite3.connect('gpt_us.db') # Подключаемся к бд, чтобы достать последние два ответа бота. Они будут нужны для чат гпт
cursor = connect.cursor()
sql_query = f"""
SELECT con, con2 FROM ChatGPT WHERE id = '{id}'
"""
con = list(cursor.execute(sql_query).fetchone())[0] # Берём два последних сообщения
con2 = list(cursor.execute(sql_query).fetchone())[1]
# Передаём инфу к чат гпт
completion = client.chat.completions.create(
model = 'gpt-3.5-turbo', # Версию можно менять по своему усмотрению
messages = [
{"role": "system", "content" : "chipi chipi chapa chapa"}, # В поле content указываем "личность" бота на английском языке
{'role': 'user', 'content': f'{text}'}, # Здесь передаётся сам запрос
{'role': 'assistant', 'content': f'{con} {con2}'} # Передаём контекст - последние два ответа бота
],
temperature = 0.9 # Значение от 0 до 1. Отвечает за количество выразительности
)
ppp = re.compile('[a-zA-Z]') # Нужно для проверки дальше
english_text = completion.choices[0].message.content # Получаем ответ нейросети на сообщение пользователя
if ppp.match(english_text): # Проверка на английский язык. Если нейронка вывела ответ на англе, то, возможно, это баг(так как запрос был, вероятнее всего на русском).
completion = client.chat.completions.create( # Если ответ всё таки на английском, то прогоняем вновь, только без контекста. Поразительно, но работает.
model = 'gpt-3.5-turbo',
messages = [
{"role": "system", "content" : "chipi chipi chapa chapa"},
{'role': 'user', 'content': f'{text}'}
],
temperature = 0.9
)
english_text = completion.choices[0].message.content
try:
sql_query = f"UPDATE ChaTGPT SET con = '{english_text}', con2 = '{con}' WHERE id = {id}" # После обработки запроса обновляем ответы для контекста в бд
cursor.execute(sql_query)
except sqlite3.OperationalError: # На случай ошибок
sql_query = f"UPDATE ChaTGPT SET con = '', con2 = '{con}' WHERE id = {id}"
cursor.execute(sql_query)
connect.commit() # Сохраняем и закрываем бд
connect.close()
urll = f"https://api.telegram.org/bot{TOKEN}/deleteMessage?chat_id={id}&message_id={data['result']['message_id']}" # Удаляем сообщение о принятии запроса пользователя
print(requests.get(urll).json())
mes = f"https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id={id}&text={english_text}&parse_mode=Markdown&reply_to_message_id={m_id}" # Отсылаем юзеру ответ на его запрос
print(requests.get(mes).json())
except: # На случай превышения лимитов запросов к api
mess = f"https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id={id}&text=❌ Что-то пошло не так. Попробуй снова через 30 секунд&parse_mode=Markdown&reply_to_message_id={m_id}"
print(requests.get(mess).json())
| 4,991 | Python | .py | 56 | 60.053571 | 189 | 0.608386 | Adexxxx/ChatGpt-telegram-bot | 8 | 5 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,136 | main.py | Adexxxx_ChatGpt-telegram-bot/with context/main.py | from aiogram import Bot, Dispatcher, executor, types
from aiogram.types import Message
from ChatGPT import gpt
import sqlite3
import threading
TOKEN = 'YOUR_API_KEY'
bot = Bot(TOKEN)
dp = Dispatcher(bot)
def create_table(): # –°–æ–∑–¥–∞—ë–º –±–¥, –µ—Å–ª–∏ –µ—ë –µ—â—ë –Ω–µ—Ç
connect = sqlite3.connect('gpt_us.db')
cursor = connect.cursor()
sql_query = """
CREATE TABLE IF NOT EXISTS ChatGPT (
id INT,
con TEXT,
con2 TEXT
)
"""
cursor.execute(sql_query)
connect.commit()
connect.close()
@dp.message_handler(commands='start') # –û–±—Ä–∞–±–æ—Ç–∫–∞ –∫–æ–º–∞–Ω–¥—ã /start
async def start(message: Message):
connect = sqlite3.connect('gpt_us.db') # –ü–æ–¥–∫–ª—é—á–∞–µ–º—Å—è –∫ –±–¥
cursor = connect.cursor()
sql_query = f"""
SELECT * FROM ChatGPT WHERE id = '{int(message.chat.id)}'
"""
user = cursor.execute(sql_query).fetchone()
if user == None: # –ü—Ä–æ–≤–µ—Ä—è–µ–º, –µ—Å—Ç—å –ª–∏ —é–∑–µ—Ä –≤ –±–¥. –î–æ–±–∞–≤–ª—è–µ–º
sql_query = f"INSERT INTO ChatGPT(id, con, con2) VALUES ('{message.from_user.id}', '', '')"
cursor.execute(sql_query)
await message.answer('–ü—Ä–∏–≤–µ—Ç, —ç—Ç–æ —á–∞—Ç-–±–æ—Ç –Ω–∞ –æ—Å–Ω–æ–≤–µ –º–æ–¥–µ–ª–∏ gpt 3.5üî•\n–ü—Ä–∏—Å—Ç—É–ø–∏–º!') # –ü—Ä–∏–≤–µ—Ç—Å—Ç–≤—É–µ–º
connect.commit() # –ó–∞–∫—Ä—ã–≤–∞–µ–º –±–¥
connect.close()
@dp.message_handler(commands='reset') # –§—É–Ω–∫—Ü–∏—è –æ—Ç—á–∏—Å—Ç–∫–∏ –∫–æ–Ω—Ç–µ–∫—Å—Ç–∞ –≤ –±–¥.
async def reset(message: Message):
connect = sqlite3.connect('gpt_us.db')
cursor = connect.cursor()
sql_query = f"UPDATE ChatGPT SET con = '', con2 = '' WHERE id = {message.from_user.id}"
cursor.execute(sql_query)
await message.answer('✅История диалога отчищена✅')
connect.commit()
connect.close()
@dp.message_handler(content_types=types.ContentType.TEXT) # –û–±—Ä–∞–±–∞—Ç—ã–≤–∞–µ–º –∑–∞–ø—Ä–æ—Å
async def mes(message: types.Message):
thread = threading.Thread(target=gpt, args=(message.text, message.from_user.id, message.message_id)) # –ó–∞–ø—É—Å–∫–∞–µ–º –≤ –Ω–æ–≤–æ–º –ø–æ—Ç–æ–∫–µ –æ–±—Ä–∞–±–æ—Ç—á–∏–∫
thread.start()
if __name__ == '__main__':
create_table()
executor.start_polling(dp)
| 2,570 | Python | .py | 51 | 40.823529 | 181 | 0.508779 | Adexxxx/ChatGpt-telegram-bot | 8 | 5 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,137 | ChatGPT.py | Adexxxx_ChatGpt-telegram-bot/wihout context/ChatGPT.py | from openai import OpenAI
client = OpenAI(api_key='YOUR_API_KEY')
def gpt(text):
completion = client.chat.completions.create(
model = 'gpt-3.5-turbo', #Модель можно выбрать на свой вкус
messages = [
{"role": "system", "content" : "You are a bot assistant imitating a real person."}, #Тут задаётся личность нейросети. Натсраивается по своему усмотрению (на английском языке)
{'role': 'user', 'content': f'{text}'} #Запрос от пользователя, который и обрабатывает нейросеть
],
temperature = 0.5 #Количество вольности нейросети от 0 до 1. Чем больше, тем больше выразительности и воды
)
english_text = completion.choices[0].message.content
return english_text
| 1,046 | Python | .py | 13 | 56.307692 | 191 | 0.622166 | Adexxxx/ChatGpt-telegram-bot | 8 | 5 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,138 | main.py | Adexxxx_ChatGpt-telegram-bot/wihout context/main.py | from aiogram import Bot, Dispatcher, executor, types
from aiogram.types import Message
from ChatGPT import gpt
TOKEN = 'YOUR_TOKEN' #–¢–æ–∫–µ–Ω —Ç–µ–ª–µ–≥—Ä–∞–º –±–æ—Ç–∞
bot = Bot(TOKEN)
dp = Dispatcher(bot)
@dp.message_handler(commands='start') #–û–±—Ä–∞–±–æ—Ç–∫–∞ –∫–æ–º–∞–Ω–¥—ã /start
async def start(message: Message):
await message.answer('–ü—Ä–∏–≤–µ—Ç, —ç—Ç–æ —á–∞—Ç-–±–æ—Ç –Ω–∞ –æ—Å–Ω–æ–≤–µ –º–æ–¥–µ–ª–∏ gpt 3.5üî•\n–ü—Ä–∏—Å—Ç—É–ø–∏–º!')
@dp.message_handler(content_types=types.ContentType.TEXT) #–õ—é–±–æ–π —Ç–µ–∫—Å—Ç = –∑–∞–ø—Ä–æ—Å –∫ chatgpt.
async def mes(message: types.Message):
await message.answer('Генерируется ответ♻️') #Даём понять пользователю, что бот работает
await message.reply(gpt(message.text)) # type: ignore –¢—É—Ç –∑–∞–ø—Ä–æ—Å –ø—Ä–∏–Ω–∏–º–∞–µ—Ç—Å—è, –æ—Ç–¥–∞—ë—Ç—Å—è –Ω–∞ –æ–±—Ä–∞–±–æ—Ç–∫—É –∏ –≤—ã–≤–æ–¥–∏—Ç—Å—è
await bot.delete_message(chat_id=message.chat.id, message_id=message.message_id + 1) #–£–¥–∞–ª—è–µ–º –ø–µ—Ä–≤–æ–µ —Å–æ–æ–±—â–µ–Ω–∏–µ. –¢–æ–ª—å–∫–æ –¥–ª—è —ç—Å—Ç–µ—Ç–∏–∫–∏
if __name__ == '__main__': #–ó–∞–ø—É—Å–∫–∞–µ–º –ø—Ä–æ–≤–µ—Ä–∫—É –ø–æ—Å—Ç—É–ø–∞—é—â–∏—Ö –≤ –±–æ—Ç–∞ —Å–æ–æ–±—â–µ–Ω–∏–π.
executor.start_polling(dp)
| 1,340 | Python | .py | 16 | 78.875 | 182 | 0.503846 | Adexxxx/ChatGpt-telegram-bot | 8 | 5 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,139 | conftest.py | pantos-io_common/tests/conftest.py | import pytest
from pantos.common.protocol import get_supported_protocol_versions
@pytest.fixture(scope='session', params=get_supported_protocol_versions())
def protocol_version(request):
return request.param
| 215 | Python | .py | 5 | 40.6 | 74 | 0.830918 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,140 | test_signer.py | pantos-io_common/tests/test_signer.py | from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from pantos.common.blockchains.enums import Blockchain
from pantos.common.signer import SignerError
from pantos.common.signer import get_signer
def test_signer_init_unable_to_load_key():
with pytest.raises(SignerError):
get_signer('', '')
@patch('pantos.common.signer._signer', None)
@patch('pantos.common.signer.Crypto')
@patch('pantos.common.signer.getpass')
def test_signer_load_signer_correct_path(mocked_getpass, mocked_crypto):
get_signer('', None)
mocked_getpass.getpass.assert_called_once_with(
'Password for decrypting the pem file')
mocked_crypto.PublicKey.ECC.import_key.assert_called_once_with(
'', passphrase=mocked_getpass.getpass())
@patch('pantos.common.signer._signer', None)
@patch('pantos.common.signer.Crypto')
def test_signer_load_signer_correct_value(mocked_crypto):
get_signer('test', 'mocked_password')
mocked_crypto.Signature.eddsa.new.assert_called_once()
@patch('pantos.common.signer._signer', None)
@patch('pantos.common.signer.Crypto')
@patch('pantos.common.signer.getpass')
def test_signer_sign_message_correct(mocked_getpass, mocked_crypto):
signer = get_signer('', None)
signer.sign_message('')
mocked_crypto.Signature.eddsa.new().sign.assert_called_once()
@patch('pantos.common.signer.Crypto')
@patch('pantos.common.signer.getpass')
def test_signer_sign_message_error(mocked_getpass, mocked_crypto):
signer = get_signer('', None)
message = MagicMock()
message.encode.side_effect = Exception
with pytest.raises(SignerError):
signer.sign_message(message)
@patch('pantos.common.signer.Crypto')
@patch('pantos.common.signer.getpass')
def test_signer_verify_message_correct(mocked_getpass, mocked_crypto):
signer = get_signer('', None)
result = signer.verify_message('message', '')
assert result is True
@patch('pantos.common.signer.Crypto')
@patch('pantos.common.signer.getpass')
def test_signer_verify_message_false(mocked_getpass, mocked_crypto):
signer = get_signer('', None)
result = signer.verify_message('message', 'signature')
assert result is False
@patch('pantos.common.signer.Crypto')
@patch('pantos.common.signer.getpass')
def test_signer_verify_message_raises_exception(mocked_getpass, mocked_crypto):
signer = get_signer('', None)
message = MagicMock()
message.encode.side_effect = Exception
with pytest.raises(SignerError):
signer.verify_message(message, '')
@patch('pantos.common.signer.Crypto')
@patch('pantos.common.signer.getpass')
def test_build_message(mocked_getpass, mocked_crypto):
signer = get_signer('', None)
message = signer.build_message('-', 0, 0, [Blockchain.ETHEREUM])
assert message == '0-0-[<Blockchain.ETHEREUM: 0>]'
| 2,839 | Python | .py | 64 | 40.59375 | 79 | 0.747994 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,141 | test_servicenodes.py | pantos-io_common/tests/test_servicenodes.py | import unittest.mock
import uuid
import pytest
import requests
from pantos.common.blockchains.enums import Blockchain
from pantos.common.entities import ServiceNodeBid
from pantos.common.servicenodes import ServiceNodeClient
from pantos.common.servicenodes import ServiceNodeClientError
from pantos.common.types import BlockchainAddress
mock_transfer_request = ServiceNodeClient.SubmitTransferRequest(
'url', Blockchain(0), Blockchain(1), BlockchainAddress('sender_addr'),
BlockchainAddress('recipient_addr'), BlockchainAddress('source_token'),
BlockchainAddress('destination_token'), 77,
ServiceNodeBid(Blockchain(0), Blockchain(1), 77, 1337, 77, 'signature'), 2,
22, 'signature')
mock_service_node_request = json = {
'source_blockchain_id': 0,
'destination_blockchain_id': 1,
'sender_address': 'sender_addr',
'recipient_address': 'recipient_addr',
'source_token_address': 'source_token',
'destination_token_address': 'destination_token',
'amount': 77,
'bid': {
'fee': 77,
'execution_time': 1337,
'valid_until': 77,
'signature': 'signature'
},
'nonce': 2,
'valid_until': 22,
'signature': 'signature'
}
mock_bid_response = {
'fee': 100,
'execution_time': 200,
'valid_until': 300,
'signature': 'mock_signature'
}
mock_response_header = requests.utils.CaseInsensitiveDict(
{'Content-Type': 'application/json'})
mock_response_header_html = requests.utils.CaseInsensitiveDict(
{'Content-Type': 'text/html; charset=UTF-8'})
@unittest.mock.patch.object(ServiceNodeClient,
'_ServiceNodeClient__build_transfer_url')
@unittest.mock.patch('pantos.common.servicenodes.requests.post')
def test_submit_transfer_correct(mocked_post, mocked_build_transfer_url):
uuid_string = '123e4567-e89b-12d3-a456-426655440000'
mocked_post().json.return_value = {'task_id': uuid_string}
result = ServiceNodeClient().submit_transfer(mock_transfer_request)
assert type(result) == uuid.UUID
assert str(result) == uuid_string
mocked_build_transfer_url.assert_called_once_with(
mock_transfer_request.service_node_url)
mocked_post.assert_called_with(mocked_build_transfer_url(),
json=mock_service_node_request,
timeout=None)
mocked_post().raise_for_status.assert_called_once()
mocked_post().json.assert_called_with()
@unittest.mock.patch.object(ServiceNodeClient,
'_ServiceNodeClient__build_transfer_url')
@unittest.mock.patch('pantos.common.servicenodes.locals',
return_value=['response'])
@unittest.mock.patch('pantos.common.servicenodes.requests.post')
def test_submit_transfer_exception(mocked_post, mocked_locals,
mocked_build_transfer_url):
mocked_post(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_post().json.return_value = {'message': 'specific error message'}
mocked_post().headers = mock_response_header
with pytest.raises(ServiceNodeClientError, match='specific error message'):
ServiceNodeClient().submit_transfer(mock_transfer_request)
@unittest.mock.patch.object(ServiceNodeClient,
'_ServiceNodeClient__build_transfer_url')
@unittest.mock.patch('pantos.common.servicenodes.locals',
return_value=['response'])
@unittest.mock.patch('pantos.common.servicenodes.requests.post')
def test_submit_transfer_no_response_message_exception(
mocked_post, mocked_locals, mocked_build_transfer_url):
mocked_post(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_post().headers = mock_response_header
mocked_post().json.return_value = {}
with pytest.raises(ServiceNodeClientError):
ServiceNodeClient().submit_transfer(mock_transfer_request)
@unittest.mock.patch.object(ServiceNodeClient,
'_ServiceNodeClient__build_transfer_url')
@unittest.mock.patch('pantos.common.servicenodes.locals',
return_value=['response'])
@unittest.mock.patch('pantos.common.servicenodes.requests.post')
def test_submit_transfer_html_response_exception(mocked_post, mocked_locals,
mocked_build_transfer_url):
mocked_post(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_post().headers = mock_response_header_html
with pytest.raises(ServiceNodeClientError):
ServiceNodeClient().submit_transfer(mock_transfer_request)
assert not mocked_post.json.called
def test_build_transfer_url_no_slash_correct():
url = 'some_url'
result = ServiceNodeClient()._ServiceNodeClient__build_transfer_url(url)
assert result == 'some_url/transfer'
def test_build_transfer_url_with_slash_correct():
url = 'some_url/'
result = ServiceNodeClient()._ServiceNodeClient__build_transfer_url(url)
assert result == 'some_url/transfer'
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_bids_correct(mocked_get):
url = 'mock_url'
source_blockchain = Blockchain.ETHEREUM
destination_blockchain = Blockchain.BNB_CHAIN
mocked_get().json.return_value = [mock_bid_response]
bids = ServiceNodeClient().bids(url, source_blockchain,
destination_blockchain)
assert bids[0] == ServiceNodeBid(source_blockchain, destination_blockchain,
mock_bid_response['fee'],
mock_bid_response['execution_time'],
mock_bid_response['valid_until'],
mock_bid_response['signature'])
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_bids_url_has_slash_correc(mocked_get):
url = 'mock_url/'
source_blockchain = Blockchain.ETHEREUM
destination_blockchain = Blockchain.BNB_CHAIN
mocked_get().json.return_value = [mock_bid_response]
bids = ServiceNodeClient().bids(url, source_blockchain,
destination_blockchain)
assert bids[0] == ServiceNodeBid(source_blockchain, destination_blockchain,
mock_bid_response['fee'],
mock_bid_response['execution_time'],
mock_bid_response['valid_until'],
mock_bid_response['signature'])
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_bids_service_node_client_error(mocked_get):
url = 'mock_url'
source_blockchain = Blockchain.ETHEREUM
destination_blockchain = Blockchain.BNB_CHAIN
mocked_get(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_get().json.return_value = {'message': 'specific error message'}
mocked_get().headers = mock_response_header
with pytest.raises(ServiceNodeClientError, match='specific error message'):
ServiceNodeClient().bids(url, source_blockchain,
destination_blockchain)
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_bids_service_node_no_response_message_client_error(mocked_get):
url = 'mock_url'
source_blockchain = Blockchain.ETHEREUM
destination_blockchain = Blockchain.BNB_CHAIN
mocked_get(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_get().headers = mock_response_header
mocked_get().json.return_value = {}
with pytest.raises(ServiceNodeClientError):
ServiceNodeClient().bids(url, source_blockchain,
destination_blockchain)
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_bids_service_node_html_response_client_error(mocked_get):
url = 'mock_url'
source_blockchain = Blockchain.ETHEREUM
destination_blockchain = Blockchain.BNB_CHAIN
mocked_get(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_get().headers = mock_response_header_html
with pytest.raises(ServiceNodeClientError):
ServiceNodeClient().bids(url, source_blockchain,
destination_blockchain)
assert not mocked_get.json.called
def test_build_bids_url_no_slash_correct():
url = 'some_url'
result = ServiceNodeClient()._ServiceNodeClient__build_bids_url(url, 1, 4)
expected = 'some_url/bids?source_blockchain=1&destination_blockchain=4'
assert result == expected
def test_build_bids_url_with_slash_correct():
url = 'some_url/'
result = ServiceNodeClient()._ServiceNodeClient__build_bids_url(url, 1, 4)
expected = 'some_url/bids?source_blockchain=1&destination_blockchain=4'
assert result == expected
@unittest.mock.patch('pantos.common.servicenodes.ServiceNodeTransferStatus')
@unittest.mock.patch('pantos.common.servicenodes.BlockchainAddress')
@unittest.mock.patch('pantos.common.servicenodes.Blockchain')
@unittest.mock.patch('pantos.common.servicenodes.uuid')
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_status_correct(mocked_get, mocked_uuid, mocked_blockchain,
mocked_blockchain_address, mocked_status):
task_id = uuid.UUID('cf9ff19f-b691-46c6-8645-08d05309ea84')
result = ServiceNodeClient().status('', task_id)
mocked_get.assert_called_once_with(
'/transfer/cf9ff19f-b691-46c6-8645-08d05309ea84/status', timeout=None)
mocked_get().json.assert_called_once_with()
mocked_json_result = mocked_get(
'/transfer/cf9ff19f-b691-46c6-8645-08d05309ea84/status').json()
assert result.task_id == mocked_uuid.UUID(mocked_json_result['task_id'])
assert result.source_blockchain == mocked_blockchain(
mocked_json_result['source_blockchain_id'])
assert result.destination_blockchain == mocked_blockchain(
mocked_json_result['destination_blockchain_id'])
assert result.sender_address == mocked_blockchain_address(
mocked_json_result['sender_address'])
assert result.recipient_address == mocked_blockchain_address(
mocked_json_result['recipient_address'])
assert result.source_token_address == mocked_blockchain_address(
mocked_json_result['source_token_address'])
assert result.destination_token_address == mocked_blockchain_address(
mocked_json_result['destination_token_address'])
assert result.token_amount == mocked_json_result['amount']
assert result.status == mocked_status.from_name(
mocked_json_result['status'])
assert result.transfer_id == mocked_json_result['transfer_id']
assert result.transaction_id == mocked_json_result['transaction_id']
@unittest.mock.patch('pantos.common.servicenodes.locals',
return_value=['response'])
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_status_exception(mocked_get, mocked_locals):
task_id = uuid.UUID('cf9ff19f-b691-46c6-8645-08d05309ea84')
mocked_get(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_get().json.return_value = {'message': 'specific error message'}
mocked_get().headers = mock_response_header
with pytest.raises(ServiceNodeClientError, match='specific error message'):
ServiceNodeClient().status('', task_id)
@unittest.mock.patch('pantos.common.servicenodes.locals',
return_value=['response'])
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_status_no_response_message_exception(mocked_get, mocked_locals):
task_id = uuid.UUID('cf9ff19f-b691-46c6-8645-08d05309ea84')
mocked_get(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_get().headers = mock_response_header
mocked_get().json.return_value = {}
with pytest.raises(ServiceNodeClientError):
ServiceNodeClient().status('', task_id)
@unittest.mock.patch('pantos.common.servicenodes.locals',
return_value=['response'])
@unittest.mock.patch('pantos.common.servicenodes.requests.get')
def test_status_html_response_message_exception(mocked_get, mocked_locals):
task_id = uuid.UUID('cf9ff19f-b691-46c6-8645-08d05309ea84')
mocked_get(
).raise_for_status.side_effect = requests.exceptions.RequestException
mocked_get().headers = mock_response_header_html
with pytest.raises(ServiceNodeClientError):
ServiceNodeClient().status('', task_id)
assert not mocked_get.json.called
def test_build_status_url_no_slash_correct():
url = 'some_url'
task_id = 'some_task_id'
result = ServiceNodeClient()._ServiceNodeClient__build_status_url(
url, task_id)
assert result == 'some_url/transfer/some_task_id/status'
def test_build_status_url_with_slash_correct():
url = 'some_url/'
task_id = 'some_task_id'
result = ServiceNodeClient()._ServiceNodeClient__build_status_url(
url, task_id)
assert result == 'some_url/transfer/some_task_id/status'
| 13,175 | Python | .py | 255 | 43.972549 | 79 | 0.702488 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,142 | test_health.py | pantos-io_common/tests/test_health.py | import unittest.mock
import pytest
from pantos.common.blockchains.base import GENERAL_RPC_ERROR_MESSAGE
from pantos.common.blockchains.enums import Blockchain
from pantos.common.exceptions import NotInitializedError
from pantos.common.health import NodesHealth
from pantos.common.health import check_blockchain_nodes_health
NODE_RPC_DOMAIN_1 = 'domain.example.com'
NODE_RPC_DOMAIN_2 = 'domain.example2.com'
@unittest.mock.patch(
'pantos.common.health._blockchain_nodes',
{Blockchain.ETHEREUM: ([NODE_RPC_DOMAIN_1, NODE_RPC_DOMAIN_2], 10)})
@unittest.mock.patch('pantos.common.health.get_blockchain_utilities')
def test_check_blockchain_nodes_health_correct(
mocked_get_blockchain_utilities):
mocked_get_blockchain_utilities().get_unhealthy_nodes.return_value = [
(NODE_RPC_DOMAIN_1, GENERAL_RPC_ERROR_MESSAGE),
(NODE_RPC_DOMAIN_2, GENERAL_RPC_ERROR_MESSAGE)
]
expected_result = {
Blockchain.ETHEREUM: NodesHealth(
healthy_total=0, unhealthy_total=2,
unhealthy_nodes=[(NODE_RPC_DOMAIN_1, GENERAL_RPC_ERROR_MESSAGE),
(NODE_RPC_DOMAIN_2, GENERAL_RPC_ERROR_MESSAGE)])
}
result = check_blockchain_nodes_health()
assert result == expected_result
@unittest.mock.patch('pantos.common.health._blockchain_nodes', {})
def test_check_blockchain_nodes_health_uninitialized_nodes():
with pytest.raises(NotInitializedError):
check_blockchain_nodes_health()
| 1,480 | Python | .py | 31 | 42.290323 | 77 | 0.743056 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,143 | test_exceptions.py | pantos-io_common/tests/test_exceptions.py | import itertools
import pytest
from pantos.common.exceptions import BaseError
from pantos.common.exceptions import ErrorCreator
class _SuperclassError(BaseError):
pass
class _SubclassError(_SuperclassError):
pass
class _SpecializedErrorWithMessage(_SuperclassError):
_message = 'specialized error message'
def __init__(self, **kwargs):
super().__init__(self._message, **kwargs)
class _SpecializedErrorWithoutMessage(_SuperclassError):
pass
class _Superclass(ErrorCreator[_SuperclassError]):
pass
class _Subclass(_Superclass):
@classmethod
def get_error_class(cls):
return _SubclassError
@pytest.mark.parametrize('third_kwarg', [None, ('third', True)])
@pytest.mark.parametrize('second_kwarg', [None, ('second', 'some text')])
@pytest.mark.parametrize('first_kwarg', [None, ('first', 100)])
@pytest.mark.parametrize(
'specialized_error_class',
[None, _SpecializedErrorWithMessage, _SpecializedErrorWithoutMessage])
def test_error_creator_create_error_correct(specialized_error_class,
first_kwarg, second_kwarg,
third_kwarg):
set_custom_message = (specialized_error_class
is not _SpecializedErrorWithMessage)
message = ('custom error message'
if set_custom_message else specialized_error_class._message)
kwargs = {
kwarg[0]: kwarg[1]
for kwarg in [first_kwarg, second_kwarg, third_kwarg]
if kwarg is not None
}
error = _Subclass()._create_error(
message if set_custom_message else None,
specialized_error_class=specialized_error_class, **kwargs)
assert isinstance(error, Exception)
assert isinstance(error, BaseError)
assert isinstance(error, _SuperclassError)
assert isinstance(error, _SubclassError)
if specialized_error_class is not None:
assert isinstance(error, specialized_error_class)
assert not any(
isinstance(error, error_class) for error_class in
[_SpecializedErrorWithMessage, _SpecializedErrorWithoutMessage]
if error_class is not specialized_error_class)
assert message in str(error)
assert all(
str(part) in str(error)
for part in itertools.chain.from_iterable(kwargs.items()))
| 2,333 | Python | .py | 55 | 35.2 | 75 | 0.69571 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,144 | test_protocol.py | pantos-io_common/tests/test_protocol.py | import unittest.mock
import pytest
import semantic_version # type: ignore
from pantos.common.protocol import get_latest_protocol_version
from pantos.common.protocol import get_supported_protocol_versions
from pantos.common.protocol import is_supported_protocol_version
def _to_semantic_version(version):
return semantic_version.Version(version)
def _to_semantic_versions(*versions):
return {_to_semantic_version(version) for version in versions}
_SUPPORTED_PROTOCOL_VERSIONS_SMALL = _to_semantic_versions('1.0.0')
_LATEST_PROTOCOL_VERSION_SMALL = _to_semantic_version('1.0.0')
_SUPPORTED_PROTOCOL_VERSIONS_MEDIUM = _to_semantic_versions(
'1.0.0', '0.1.0', '0.1.1')
_LATEST_PROTOCOL_VERSION_MEDIUM = _to_semantic_version('1.0.0')
_SUPPORTED_PROTOCOL_VERSIONS_LARGE = _to_semantic_versions(
'1.0.0', '0.9.5', '2.0.10', '0.1.0', '2.0.9', '2.0.0', '0.1.1')
_LATEST_PROTOCOL_VERSION_LARGE = _to_semantic_version('2.0.10')
@pytest.mark.parametrize(
'supported_protocol_versions, latest_protocol_version',
[(_SUPPORTED_PROTOCOL_VERSIONS_SMALL, _LATEST_PROTOCOL_VERSION_SMALL),
(_SUPPORTED_PROTOCOL_VERSIONS_MEDIUM, _LATEST_PROTOCOL_VERSION_MEDIUM),
(_SUPPORTED_PROTOCOL_VERSIONS_LARGE, _LATEST_PROTOCOL_VERSION_LARGE)])
def test_get_latest_protocol_version_correct(supported_protocol_versions,
latest_protocol_version):
with unittest.mock.patch(
'pantos.common.protocol._SUPPORTED_PROTOCOL_VERSIONS',
supported_protocol_versions):
assert get_latest_protocol_version() == latest_protocol_version
@pytest.mark.parametrize('supported_protocol_versions', [
_SUPPORTED_PROTOCOL_VERSIONS_SMALL, _SUPPORTED_PROTOCOL_VERSIONS_MEDIUM,
_SUPPORTED_PROTOCOL_VERSIONS_LARGE
])
def test_get_supported_protocol_versions_correct(supported_protocol_versions):
with unittest.mock.patch(
'pantos.common.protocol._SUPPORTED_PROTOCOL_VERSIONS',
supported_protocol_versions):
assert get_supported_protocol_versions() == sorted(
supported_protocol_versions)
@pytest.mark.parametrize(
'supported_protocol_versions, protocol_version, is_supported',
[(_SUPPORTED_PROTOCOL_VERSIONS_SMALL, '1.0.0', True),
(_SUPPORTED_PROTOCOL_VERSIONS_SMALL, '1.1.0', False),
(_SUPPORTED_PROTOCOL_VERSIONS_MEDIUM, '0.1.0', True),
(_SUPPORTED_PROTOCOL_VERSIONS_MEDIUM, '2.0.0', False),
(_SUPPORTED_PROTOCOL_VERSIONS_LARGE, '2.0.0', True),
(_SUPPORTED_PROTOCOL_VERSIONS_LARGE, '3.0.1', False)])
def test_is_supported_protocol_version_correct(supported_protocol_versions,
protocol_version, is_supported):
with unittest.mock.patch(
'pantos.common.protocol._SUPPORTED_PROTOCOL_VERSIONS',
supported_protocol_versions):
assert is_supported_protocol_version(
_to_semantic_version(protocol_version)) is is_supported
| 2,975 | Python | .py | 54 | 48.203704 | 79 | 0.713301 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,145 | test_logging.py | pantos-io_common/tests/test_logging.py | import enum
import json
import logging
import logging.handlers
import pathlib
import sys
import tempfile
import unittest.mock
import json_log_formatter # type: ignore
import pytest
from pantos.common.blockchains.enums import Blockchain
from pantos.common.logging import _HUMAN_READABLE_LOG_FORMAT
from pantos.common.logging import LogFile
from pantos.common.logging import LogFormat
from pantos.common.logging import _DataDogJSONFormatter
from pantos.common.logging import _HumanReadableFormatter
from pantos.common.logging import initialize_logger
_LOG_MESSAGE = 'test message'
_LOG_EXTRA_KEY_1 = 'first test key'
_LOG_EXTRA_VALUE_1 = 'extra test message'
_LOG_EXTRA_KEY_2 = 'second test key'
_LOG_EXTRA_VALUE_2 = [1, 2, 3]
_LOG_EXTRA_KEY_3 = 'blockchain'
_LOG_EXTRA_VALUE_3 = Blockchain.AVALANCHE
_LOG_ERROR_MESSAGE = 'error message'
_LOG_FILE_NAME = 'test.log'
class _LogFileTest(enum.Enum):
NO_LOG_FILE = 1
LOG_FILE_EXISTING = 2
LOG_DIRECTORY_EXISTING = 3
LOG_DIRECTORY_NOT_EXISTING = 4
@pytest.fixture
def human_readable_formatter():
return _HumanReadableFormatter()
@pytest.fixture
def datadog_custom_formatter():
return _DataDogJSONFormatter()
@pytest.fixture
def root_logger():
root_logger = logging.getLogger()
root_logger.handlers.clear()
return root_logger
def test_human_readable_formatter_format_correct(root_logger,
human_readable_formatter):
log_record = root_logger.makeRecord(
'', logging.ERROR, '', 0, _LOG_MESSAGE, (), None, extra={
_LOG_EXTRA_KEY_1: _LOG_EXTRA_VALUE_1,
_LOG_EXTRA_KEY_2: _LOG_EXTRA_VALUE_2
})
formatted_log = human_readable_formatter.format(log_record)
assert _LOG_MESSAGE in formatted_log
assert _LOG_EXTRA_KEY_1 in formatted_log
assert _LOG_EXTRA_VALUE_1 in formatted_log
assert _LOG_EXTRA_KEY_2 in formatted_log
assert str(_LOG_EXTRA_VALUE_2) in formatted_log
def test_datadog_custom_formatter_format_correct(root_logger,
datadog_custom_formatter):
log_record = root_logger.makeRecord(
'', logging.ERROR, '', 0, _LOG_MESSAGE, (), None, extra={
_LOG_EXTRA_KEY_1: _LOG_EXTRA_VALUE_1,
_LOG_EXTRA_KEY_2: _LOG_EXTRA_VALUE_2,
_LOG_EXTRA_KEY_3: _LOG_EXTRA_VALUE_3,
})
formatted_log = datadog_custom_formatter.format(log_record)
json_formatted_log = json.loads(formatted_log)
assert isinstance(json_formatted_log, dict)
assert json_formatted_log[_LOG_EXTRA_KEY_1] == _LOG_EXTRA_VALUE_1
assert json_formatted_log[_LOG_EXTRA_KEY_2] == _LOG_EXTRA_VALUE_2
assert json_formatted_log[
_LOG_EXTRA_KEY_3] == _LOG_EXTRA_VALUE_3.name.capitalize()
def test_datadog_custom_formatter_format_error_correct(
root_logger, datadog_custom_formatter):
try:
raise ValueError(_LOG_ERROR_MESSAGE)
except Exception:
exc_info = sys.exc_info()
log_record = root_logger.makeRecord(
'', logging.ERROR, '', 0, _LOG_MESSAGE, (), exc_info, extra={
_LOG_EXTRA_KEY_1: _LOG_EXTRA_VALUE_1,
_LOG_EXTRA_KEY_2: _LOG_EXTRA_VALUE_2,
_LOG_EXTRA_KEY_3: _LOG_EXTRA_VALUE_3
})
formatted_log = datadog_custom_formatter.format(log_record)
json_formatted_log = json.loads(formatted_log)
assert isinstance(json_formatted_log, dict)
assert json_formatted_log[_LOG_EXTRA_KEY_1] == _LOG_EXTRA_VALUE_1
assert json_formatted_log[_LOG_EXTRA_KEY_2] == _LOG_EXTRA_VALUE_2
assert json_formatted_log[
_LOG_EXTRA_KEY_3] == _LOG_EXTRA_VALUE_3.name.capitalize()
assert json_formatted_log['message'] == _LOG_MESSAGE
@pytest.mark.parametrize('logger', [
logging.getLogger(),
logging.getLogger('test1'),
logging.getLogger('test2').getChild('test3')
])
@pytest.mark.parametrize('log_format',
[log_format for log_format in LogFormat])
@pytest.mark.parametrize('standard_output', [True, False])
@pytest.mark.parametrize('log_file_test',
[log_file_test for log_file_test in _LogFileTest])
@pytest.mark.parametrize('max_bytes', [0, 1024 * 1024, 10 * 1024 * 1024])
@pytest.mark.parametrize('backup_count', [0, 1, 10])
@pytest.mark.parametrize('debug', [True, False])
@pytest.mark.parametrize('initial_handler', [True, False])
def test_initialize_logger_correct(logger, log_format, standard_output,
log_file_test, max_bytes, backup_count,
debug, initial_handler):
log_file = _create_log_file(log_file_test, max_bytes, backup_count)
number_handlers = sum([standard_output, log_file is not None])
if initial_handler:
logger.addHandler(logging.StreamHandler())
initialize_logger(logger, log_format, standard_output, log_file, debug)
assert len(logger.handlers) == number_handlers
standard_output_handler = False
rotating_file_handler = False
for handler in logger.handlers:
_check_log_format(log_format, handler)
assert isinstance(handler, logging.StreamHandler)
if isinstance(handler, logging.handlers.RotatingFileHandler):
assert not rotating_file_handler
assert log_file is not None
assert log_file.file_path.exists()
assert pathlib.Path(handler.baseFilename) == log_file.file_path
assert handler.maxBytes == max_bytes
assert handler.backupCount == backup_count
rotating_file_handler = True
else:
assert not standard_output_handler
assert standard_output
assert handler.stream == sys.stdout
standard_output_handler = True
assert logger.level == (logging.DEBUG if debug else logging.INFO)
_delete_log_file(log_file)
@pytest.mark.parametrize('log_format',
[log_format for log_format in LogFormat])
def test_initialize_logger_log_correct(root_logger, log_format):
file_path = pathlib.Path(tempfile.mkstemp()[1])
log_file = LogFile(file_path, 0, 0)
initialize_logger(root_logger, log_format, False, log_file, False)
root_logger.log(
logging.INFO, _LOG_MESSAGE, extra={
_LOG_EXTRA_KEY_1: _LOG_EXTRA_VALUE_1,
_LOG_EXTRA_KEY_2: _LOG_EXTRA_VALUE_2
})
with file_path.open() as log_file:
log_entry = log_file.readline()
assert _LOG_MESSAGE in log_entry
assert _LOG_EXTRA_KEY_1 in log_entry
assert _LOG_EXTRA_VALUE_1 in log_entry
assert _LOG_EXTRA_KEY_2 in log_entry
assert str(_LOG_EXTRA_VALUE_2) in log_entry
file_path.unlink()
@unittest.mock.patch('pantos.common.logging.pathlib.Path.mkdir')
def test_initialize_logger_permission_error(mocked_mkdir, root_logger):
mocked_mkdir.side_effect = PermissionError
directory_path = pathlib.Path(tempfile.mkdtemp())
file_path = directory_path / 'test' / _LOG_FILE_NAME
log_file = LogFile(file_path, 0, 0)
with pytest.raises(OSError):
initialize_logger(root_logger, LogFormat.JSON, False, log_file, False)
directory_path.rmdir()
@pytest.mark.parametrize('log_format',
[log_format for log_format in LogFormat])
def test_log_format_from_name_correct(log_format):
assert LogFormat.from_name(log_format.name.lower()) is log_format
assert LogFormat.from_name(log_format.name.upper()) is log_format
def test_log_format_from_name_error():
with pytest.raises(NameError):
LogFormat.from_name('unknown_log_format')
def _check_log_format(log_format, handler):
if log_format is LogFormat.HUMAN_READABLE:
assert isinstance(handler.formatter, _HumanReadableFormatter)
assert handler.formatter._fmt == _HUMAN_READABLE_LOG_FORMAT
elif log_format is LogFormat.JSON:
assert isinstance(handler.formatter,
json_log_formatter.VerboseJSONFormatter)
else:
raise NotImplementedError
def _create_log_file(log_file_test, max_bytes, backup_count):
if log_file_test is _LogFileTest.NO_LOG_FILE:
return None
elif log_file_test is _LogFileTest.LOG_FILE_EXISTING:
file_path = pathlib.Path(tempfile.mkstemp()[1])
elif log_file_test is _LogFileTest.LOG_DIRECTORY_EXISTING:
file_path = pathlib.Path(tempfile.mkdtemp()) / _LOG_FILE_NAME
elif log_file_test is _LogFileTest.LOG_DIRECTORY_NOT_EXISTING:
file_path = pathlib.Path(tempfile.mkdtemp()) / 'test' / _LOG_FILE_NAME
else:
raise NotImplementedError
return LogFile(file_path, max_bytes, backup_count)
def _delete_log_file(log_file):
if log_file is None:
return
temp_dir_path = pathlib.Path(tempfile.gettempdir())
path = log_file.file_path
while True:
if path == temp_dir_path:
return
elif path.is_dir():
path.rmdir()
else:
path.unlink()
path = path.parent
| 8,980 | Python | .py | 203 | 37.270936 | 78 | 0.680695 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,146 | test_configuration.py | pantos-io_common/tests/test_configuration.py | import pathlib
from unittest.mock import mock_open
from unittest.mock import patch
import pytest
from pantos.common.configuration import Config
from pantos.common.configuration import ConfigError
def test_validate_one_not_present():
# Test with valid data
validation_schema = {
'private_key_path': {
'type': 'string',
'required': True,
'one_not_present': 'private_key_value'
},
'private_key_value': {
'type': 'string',
'required': True,
'one_not_present': 'private_key_path'
}
}
config_dict = {'private_key_path': 'path', 'private_key_value': ''}
config = Config('')
result = config._Config__validate(config_dict, validation_schema)
assert result is not None
# Test with invalid data
config_dict = {'private_key_path': '', 'private_key_value': ''}
with pytest.raises(ConfigError):
config._Config__validate(config_dict, validation_schema)
@patch('pathlib.Path.is_file')
@patch('builtins.open', new_callable=mock_open, read_data="loaded")
def test_validate_load_if_file(mock_open, mock_is_file):
# Test with valid data
validation_schema = {
'private_key_path': {
'type': 'string',
'coerce': 'load_if_file'
},
}
config_dict = {'private_key_path': 'not a path'}
config = Config('')
mock_is_file.return_value = True
result = config._Config__validate(config_dict, validation_schema)
assert result == {'private_key_path': 'loaded'}
# Test with invalid data
mock_is_file.return_value = False
result = config._Config__validate(config_dict, validation_schema)
assert result == {'private_key_path': 'not a path'}
def test_validate_load_if_long_string_not_file():
validation_schema = {
'private_key_path': {
'type': 'string',
'coerce': 'load_if_file'
},
}
config_dict = {'private_key_path': '{ a }' * 100000}
config = Config('')
result = config._Config__validate(config_dict, validation_schema)
assert result == {'private_key_path': '{ a }' * 100000}
@patch('pathlib.Path.is_file')
def test_validate_load_os_error_escalates(mock_is_file):
validation_schema = {
'private_key_path': {
'type': 'string',
'coerce': 'load_if_file'
},
}
config_dict = {'private_key_path': '/tmp/invalid'} # nosec
config = Config('')
mock_is_file.return_value = True
with pytest.raises(ConfigError):
config._Config__validate(config_dict, validation_schema)
@patch('pathlib.Path.is_file')
@patch('dotenv.load_dotenv')
@patch('pyaml_env.parse_config')
@patch('builtins.open', new_callable=mock_open, read_data="data")
def test_parse_file(mock_open, mock_parse_config, mock_load_dotenv,
mock_is_file):
mock_is_file.return_value = True
mock_load_dotenv.return_value = True
mock_parse_config.return_value = {'key': 'value'}
config = Config('config.yaml')
result = config._Config__parse_file(
pathlib.Path('config.yaml')) # Accessing private function
assert result == {'key': 'value'}
mock_load_dotenv.assert_called_once()
mock_parse_config.assert_called_once()
@patch('pathlib.Path.is_file')
@patch('dotenv.load_dotenv')
@patch('pyaml_env.parse_config')
@patch('builtins.open', new_callable=mock_open, read_data="data")
def test_parse_file_error(mock_open, mock_parse_config, mock_load_dotenv,
mock_is_file):
mock_is_file.return_value = True
mock_load_dotenv.side_effect = Exception()
mock_parse_config.return_value = {'key': 'value'}
config = Config('config.yaml')
result = config._Config__parse_file(
pathlib.Path('config.yaml')) # Accessing private function
assert result == {'key': 'value'}
| 3,875 | Python | .py | 99 | 32.909091 | 73 | 0.643124 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,147 | test_entities.py | pantos-io_common/tests/test_entities.py | import pytest
from pantos.common.entities import ServiceNodeTransferStatus
@pytest.mark.parametrize('name, status',
[('accepted', ServiceNodeTransferStatus.ACCEPTED),
('failed', ServiceNodeTransferStatus.FAILED),
('submitted', ServiceNodeTransferStatus.SUBMITTED),
('reverted', ServiceNodeTransferStatus.REVERTED),
('confirmed', ServiceNodeTransferStatus.CONFIRMED)])
def test_service_node_transfer_status_from_name_correct(name, status):
assert ServiceNodeTransferStatus.from_name(name) == status
def test_service_node_transfer_status_from_name_raises_error():
with pytest.raises(NameError):
ServiceNodeTransferStatus.from_name('confirmedz')
| 793 | Python | .py | 13 | 48.461538 | 78 | 0.68129 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,148 | test_restapi.py | pantos-io_common/tests/test_restapi.py | import json
import unittest.mock
import pytest
import werkzeug.exceptions
from pantos.common.blockchains.base import GENERAL_RPC_ERROR_MESSAGE
from pantos.common.blockchains.base import UnhealthyNode
from pantos.common.blockchains.enums import Blockchain
from pantos.common.exceptions import NotInitializedError
from pantos.common.health import NodesHealth
from pantos.common.restapi import NodesHealthResource
from pantos.common.restapi import bad_request
from pantos.common.restapi import conflict
from pantos.common.restapi import forbidden
from pantos.common.restapi import internal_server_error
from pantos.common.restapi import no_content_response
from pantos.common.restapi import not_acceptable
from pantos.common.restapi import ok_response
from pantos.common.restapi import resource_not_found
@pytest.fixture
def error_message():
return 'error message'
@pytest.fixture
def error_message_list():
return [
'first error message', 'second error message', 'third error message'
]
@pytest.fixture
def error_message_dict():
return {
'first_property': 'first error message',
'second_property': 'second error message',
'third_property': 'third error message'
}
@unittest.mock.patch('pantos.common.restapi.check_blockchain_nodes_health')
def test_nodes_health_resource_correct(mocked_check_blockchain_nodes_health):
mocked_check_blockchain_nodes_health.return_value = {
Blockchain.ETHEREUM: NodesHealth(1, 0, []),
Blockchain.BNB_CHAIN: NodesHealth(0, 2, [
UnhealthyNode('node1_domain', GENERAL_RPC_ERROR_MESSAGE),
UnhealthyNode('node2_domain', GENERAL_RPC_ERROR_MESSAGE)
])
}
nodes_health_resource = NodesHealthResource()
response = nodes_health_resource.get()
assert response.status_code == 200
assert json.loads(response.data) == {
'Ethereum': {
'healthy_total': 1,
'unhealthy_total': 0,
'unhealthy_nodes': []
},
'Bnb_chain': {
'healthy_total': 0,
'unhealthy_total': 2,
'unhealthy_nodes': [{
'node_domain': 'node1_domain',
"status": GENERAL_RPC_ERROR_MESSAGE
}, {
'node_domain': "node2_domain",
'status': GENERAL_RPC_ERROR_MESSAGE
}]
}
}
@unittest.mock.patch('pantos.common.restapi.check_blockchain_nodes_health')
def test_nodes_health_resource_uninitialized_nodes(
mocked_check_blockchain_nodes_health):
mocked_check_blockchain_nodes_health.side_effect = NotInitializedError('')
nodes_health_resource = NodesHealthResource()
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
nodes_health_resource.get()
assert isinstance(exception_info.value,
werkzeug.exceptions.InternalServerError)
assert exception_info.value.data[
'message'] == 'no blockchain nodes have been initialized yet'
@unittest.mock.patch('pantos.common.restapi.check_blockchain_nodes_health')
def test_nodes_health_resource_exception(mocked_check_blockchain_nodes_health):
mocked_check_blockchain_nodes_health.side_effect = Exception
nodes_health_resource = NodesHealthResource()
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
nodes_health_resource.get()
assert isinstance(exception_info.value,
werkzeug.exceptions.InternalServerError)
assert exception_info.value.data['message'] is None
def test_ok_response():
data = {
'first_property': 1,
'second_propery': 'a',
'third_property': True,
'fourth_property': 1.01
}
response = ok_response(data)
assert response.status_code == 200
assert response.mimetype == 'application/json'
assert json.loads(response.data) == data
def test_no_content_response():
response = no_content_response()
assert response.status_code == 204
assert len(response.data) == 0
def test_bad_request_str(error_message):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
bad_request(error_message)
assert isinstance(exception_info.value, werkzeug.exceptions.BadRequest)
assert exception_info.value.data['message'] == error_message
def test_bad_request_list(error_message_list):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
bad_request(error_message_list)
assert isinstance(exception_info.value, werkzeug.exceptions.BadRequest)
assert exception_info.value.data['message'] == error_message_list
def test_bad_request_dict(error_message_dict):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
bad_request(error_message_dict)
assert isinstance(exception_info.value, werkzeug.exceptions.BadRequest)
assert exception_info.value.data['message'] == error_message_dict
def test_forbidden(error_message):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
forbidden(error_message)
assert isinstance(exception_info.value, werkzeug.exceptions.Forbidden)
assert exception_info.value.data['message'] == error_message
def test_resource_not_found(error_message):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
resource_not_found(error_message)
assert isinstance(exception_info.value, werkzeug.exceptions.NotFound)
assert exception_info.value.data['message'] == error_message
def test_not_acceptable_str(error_message):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
not_acceptable(error_message)
assert isinstance(exception_info.value, werkzeug.exceptions.NotAcceptable)
assert exception_info.value.data['message'] == error_message
def test_not_acceptable_list(error_message_list):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
not_acceptable(error_message_list)
assert isinstance(exception_info.value, werkzeug.exceptions.NotAcceptable)
assert exception_info.value.data['message'] == error_message_list
def test_not_acceptable_dict(error_message_dict):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
not_acceptable(error_message_dict)
assert isinstance(exception_info.value, werkzeug.exceptions.NotAcceptable)
assert exception_info.value.data['message'] == error_message_dict
def test_conflict(error_message):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
conflict(error_message)
assert isinstance(exception_info.value, werkzeug.exceptions.Conflict)
assert exception_info.value.data['message'] == error_message
@pytest.mark.parametrize('with_error_message', [True, False])
def test_internal_server_error(with_error_message, error_message):
with pytest.raises(werkzeug.exceptions.HTTPException) as exception_info:
internal_server_error(error_message if with_error_message else None)
assert isinstance(exception_info.value,
werkzeug.exceptions.InternalServerError)
assert exception_info.value.data['message'] == (
error_message if with_error_message else None)
| 7,295 | Python | .py | 151 | 42.072848 | 79 | 0.739382 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,149 | conftest.py | pantos-io_common/tests/blockchains/conftest.py | import atexit
import dataclasses
import pathlib
import tempfile
import uuid
import pytest
from pantos.common.blockchains.base import BlockchainUtilities
from pantos.common.blockchains.base import VersionedContractAbi
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.enums import ContractAbi
_ACCOUNT_ADDRESS = '0x352F6A5abD3564d5016336e5dA91389B7C47f6dd'
_ACCOUNT_KEYSTORE = (
'{"address":"352f6a5abd3564d5016336e5da91389b7c47f6dd","crypto":{"cipher"'
':"aes-128-ctr","ciphertext":"452df17b9bb624246a66b16585c4aece1adefc30cc0'
'0bfc6db3108a771b91033","cipherparams":{"iv":"b14fdbc0984c4b8d1769ff74d1e'
'd8f79"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"s'
'alt":"4a094c0a4f152b3908854074e12c9eca2427d10bff5686a519626d6b07a7dc77"}'
',"mac":"58c24387604f78f55cd962da62681aba710e6aa4afea9d44b52ced29e7c317bd'
'"},"id":"c69596a7-c2f6-4d37-a4fb-cefe4b3f432d","version":3}')
_ACCOUNT_KEYSTORE_PASSWORD = 'Mu(bK{!z'
_ACCOUNT_PRIVATE_KEY = \
'cf10f5c9b5229dbcc5bee72d6309192da944dc837efb703581b5e91795adfab2'
_AVERAGE_BLOCK_TIME = 14
_BLOCKCHAIN_NODE_URL = 'https://some.url'
_FALLBACK_BLOCKCHAIN_NODE_URL = 'https://some2.url'
_INACTIVE_BLOCKCHAINS = [Blockchain.SOLANA]
_REQUIRED_TRANSACTION_CONFIRMATIONS = 20
_TRANSACTION_NETWORK_ID = 1
_TRANSACTION_CONTRACT_ADDRESS = '0xB685E5B2b9fB7a3EbD196f4C0eb8B8AB6d589a12'
_TRANSACTION_FUNCTION_SELECTOR = '0xa9059c1b'
_TRANSACTION_FUNCTION_ARGS = (236421, 956782234, True)
_TRANSACTION_GAS = 90000
_TRANSACTION_MIN_ADAPTABLE_FEE_PER_GAS = int(1e8)
_TRANSACTION_MAX_TOTAL_FEE_PER_GAS = int(1e10)
_TRANSACTION_AMOUNT = int(1e16)
_TRANSACTION_NONCE = 6790134
_TRANSACTION_ADAPTABLE_FEE_INCREASE_FACTOR = 1.101
_TRANSACTION_BLOCKS_UNTIL_RESUBMISSION = 10
_TRANSACTION_ID = \
'0xf8d93e2c7052875c7bfbaeb008c1988f30666a584ed544d9246b3b0a8287bd35'
_TRANSACTION_ADAPTABLE_FEE_PER_GAS = int(1.1e8)
@dataclasses.dataclass
class Account:
address: str
private_key: str
keystore: str
keystore_password: str
keystore_path: pathlib.Path = dataclasses.field(init=False)
def __post_init__(self):
self.keystore_path = pathlib.Path(tempfile.mkstemp()[1])
with self.keystore_path.open('w') as keystore_file:
keystore_file.write(self.keystore)
atexit.register(self.keystore_path.unlink)
@pytest.fixture(scope='package')
def account():
return Account(_ACCOUNT_ADDRESS, _ACCOUNT_PRIVATE_KEY, _ACCOUNT_KEYSTORE,
_ACCOUNT_KEYSTORE_PASSWORD)
@pytest.fixture(scope='package', params=Blockchain)
def blockchain(request):
return request.param
@pytest.fixture(
scope='package', params=[
blockchain for blockchain in Blockchain
if blockchain not in _INACTIVE_BLOCKCHAINS
])
def active_blockchain(request):
return request.param
@pytest.fixture(scope='package')
def average_block_time():
return _AVERAGE_BLOCK_TIME
@pytest.fixture(scope='package')
def blockchain_node_urls():
return [_BLOCKCHAIN_NODE_URL]
@pytest.fixture(scope='package')
def fallback_blockchain_node_urls():
return [_FALLBACK_BLOCKCHAIN_NODE_URL]
@pytest.fixture(scope='package')
def required_transaction_confirmations():
return _REQUIRED_TRANSACTION_CONFIRMATIONS
@pytest.fixture(scope='package')
def transaction_network_id():
return _TRANSACTION_NETWORK_ID
@pytest.fixture(scope='package')
def transaction_contract_address():
return _TRANSACTION_CONTRACT_ADDRESS
@pytest.fixture(scope='package')
def transaction_function_selector():
return _TRANSACTION_FUNCTION_SELECTOR
@pytest.fixture(scope='package')
def transaction_function_args():
return _TRANSACTION_FUNCTION_ARGS
@pytest.fixture(scope='package', params=[None, _TRANSACTION_GAS])
def transaction_gas(request):
return request.param
@pytest.fixture(scope='package')
def transaction_min_adaptable_fee_per_gas():
return _TRANSACTION_MIN_ADAPTABLE_FEE_PER_GAS
@pytest.fixture(scope='package',
params=[None, _TRANSACTION_MAX_TOTAL_FEE_PER_GAS])
def transaction_max_total_fee_per_gas(request):
return request.param
@pytest.fixture(scope='package', params=[None, _TRANSACTION_AMOUNT])
def transaction_amount(request):
return request.param
@pytest.fixture(scope='package')
def transaction_nonce():
return _TRANSACTION_NONCE
@pytest.fixture(scope='package')
def transaction_adaptable_fee_increase_factor():
return _TRANSACTION_ADAPTABLE_FEE_INCREASE_FACTOR
@pytest.fixture(scope='package')
def transaction_blocks_until_resubmission():
return _TRANSACTION_BLOCKS_UNTIL_RESUBMISSION
@pytest.fixture(scope='package')
def transaction_id():
return _TRANSACTION_ID
@pytest.fixture(scope='package')
def transaction_adaptable_fee_per_gas():
return _TRANSACTION_ADAPTABLE_FEE_PER_GAS
@pytest.fixture(scope='package', params=ContractAbi)
def versioned_contract_abi(request, protocol_version):
return VersionedContractAbi(request.param, protocol_version)
@pytest.fixture
def transaction_submission_request(transaction_contract_address,
versioned_contract_abi,
transaction_function_selector,
transaction_function_args, transaction_gas,
transaction_min_adaptable_fee_per_gas,
transaction_max_total_fee_per_gas,
transaction_amount, transaction_nonce):
return BlockchainUtilities.TransactionSubmissionRequest(
transaction_contract_address, versioned_contract_abi,
transaction_function_selector, transaction_function_args,
transaction_gas, transaction_min_adaptable_fee_per_gas,
transaction_max_total_fee_per_gas, transaction_amount,
transaction_nonce)
@pytest.fixture
def transaction_submission_response(transaction_id,
transaction_adaptable_fee_per_gas):
return BlockchainUtilities.TransactionSubmissionResponse(
transaction_id, transaction_adaptable_fee_per_gas)
@pytest.fixture
def transaction_resubmission_request(
transaction_contract_address, versioned_contract_abi,
transaction_function_selector, transaction_function_args,
transaction_gas, transaction_min_adaptable_fee_per_gas,
transaction_max_total_fee_per_gas, transaction_amount,
transaction_nonce, transaction_adaptable_fee_increase_factor):
return BlockchainUtilities.TransactionResubmissionRequest(
transaction_contract_address, versioned_contract_abi,
transaction_function_selector, transaction_function_args,
transaction_gas, transaction_min_adaptable_fee_per_gas,
transaction_max_total_fee_per_gas, transaction_amount,
transaction_nonce, transaction_adaptable_fee_increase_factor)
@pytest.fixture
def transaction_resubmission_request_dict(transaction_resubmission_request):
return transaction_resubmission_request.to_dict()
@pytest.fixture
def transaction_resubmission_response(transaction_id,
transaction_adaptable_fee_per_gas):
return BlockchainUtilities.TransactionResubmissionResponse(
transaction_id, transaction_adaptable_fee_per_gas)
@pytest.fixture
def transaction_submission_start_request(
transaction_contract_address, versioned_contract_abi,
transaction_function_selector, transaction_function_args,
transaction_gas, transaction_min_adaptable_fee_per_gas,
transaction_max_total_fee_per_gas, transaction_amount,
transaction_nonce, transaction_adaptable_fee_increase_factor,
transaction_blocks_until_resubmission):
return BlockchainUtilities.TransactionSubmissionStartRequest(
transaction_contract_address, versioned_contract_abi,
transaction_function_selector, transaction_function_args,
transaction_gas, transaction_min_adaptable_fee_per_gas,
transaction_max_total_fee_per_gas, transaction_amount,
transaction_nonce, transaction_adaptable_fee_increase_factor,
transaction_blocks_until_resubmission)
@pytest.fixture(scope='package')
def internal_transaction_id():
return uuid.uuid4()
| 8,292 | Python | .py | 180 | 40.255556 | 78 | 0.756383 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,150 | test_celo.py | pantos-io_common/tests/blockchains/test_celo.py | import pytest
from pantos.common.blockchains.celo import CeloUtilities
from pantos.common.blockchains.celo import CeloUtilitiesError
from pantos.common.blockchains.enums import Blockchain
@pytest.fixture(scope='module')
def celo_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
return CeloUtilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id)
def test_get_blockchain_correct(celo_utilities):
assert celo_utilities.get_blockchain() is Blockchain.CELO
assert CeloUtilities.get_blockchain() is Blockchain.CELO
def test_get_error_class_correct(celo_utilities):
assert celo_utilities.get_error_class() is CeloUtilitiesError
assert CeloUtilities.get_error_class() is CeloUtilitiesError
| 1,004 | Python | .py | 18 | 47 | 77 | 0.743616 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,151 | test_factory.py | pantos-io_common/tests/blockchains/test_factory.py | import unittest.mock
import pytest
from pantos.common.blockchains.avalanche import AvalancheUtilities
from pantos.common.blockchains.base import BlockchainUtilities
from pantos.common.blockchains.bnbchain import BnbChainUtilities
from pantos.common.blockchains.celo import CeloUtilities
from pantos.common.blockchains.cronos import CronosUtilities
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.factory import _blockchain_utilities
from pantos.common.blockchains.factory import get_blockchain_utilities
from pantos.common.blockchains.factory import initialize_blockchain_utilities
from pantos.common.blockchains.polygon import PolygonUtilities
from pantos.common.blockchains.solana import SolanaUtilities
from pantos.common.blockchains.sonic import SonicUtilities
from pantos.common.exceptions import NotInitializedError
@pytest.fixture(autouse=True)
def clear_blockchain_utilities():
_blockchain_utilities.clear()
@pytest.mark.parametrize('blockchain',
[blockchain for blockchain in Blockchain])
def test_get_blockchain_utilities_initialized(
blockchain, blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
blockchain_utilities_class = _get_blockchain_utilities_class(blockchain)
with unittest.mock.patch.object(blockchain_utilities_class, '__init__',
lambda *args, **kwargs: None):
initialize_blockchain_utilities(blockchain, blockchain_node_urls,
fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id)
blockchain_utilities = get_blockchain_utilities(blockchain)
assert isinstance(blockchain_utilities, BlockchainUtilities)
assert isinstance(blockchain_utilities, blockchain_utilities_class)
@pytest.mark.parametrize('blockchain',
[blockchain for blockchain in Blockchain])
def test_get_blockchain_utilities_not_initialized(blockchain):
with pytest.raises(NotInitializedError):
get_blockchain_utilities(blockchain)
def _get_blockchain_utilities_class(blockchain):
if blockchain is Blockchain.AVALANCHE:
return AvalancheUtilities
if blockchain is Blockchain.BNB_CHAIN:
return BnbChainUtilities
if blockchain is Blockchain.CELO:
return CeloUtilities
if blockchain is Blockchain.CRONOS:
return CronosUtilities
if blockchain is Blockchain.ETHEREUM:
return EthereumUtilities
if blockchain is Blockchain.SONIC:
return SonicUtilities
if blockchain is Blockchain.POLYGON:
return PolygonUtilities
if blockchain is Blockchain.SOLANA:
return SolanaUtilities
raise NotImplementedError
| 3,053 | Python | .py | 59 | 43.355932 | 77 | 0.750335 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,152 | test_tasks.py | pantos-io_common/tests/blockchains/test_tasks.py | import unittest.mock
import uuid
import pytest
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.base import MaxTotalFeePerGasExceededError
from pantos.common.blockchains.factory import initialize_blockchain_utilities
from pantos.common.blockchains.tasks import _transaction_resubmission_task
from pantos.common.blockchains.tasks import \
create_transaction_resubmission_task
from pantos.common.blockchains.tasks import \
get_transaction_resubmission_task_result
from pantos.common.entities import TransactionStatus
class _RetryError(Exception):
def __init__(*args, **kwargs):
# Accept any arguments
pass
@unittest.mock.patch(
'pantos.common.blockchains.tasks._transaction_resubmission_task')
def test_create_transaction_resubmission_task_correct(
mock_transaction_resubmission_task, blockchain, blockchain_node_urls,
fallback_blockchain_node_urls, average_block_time,
required_transaction_confirmations, transaction_network_id,
transaction_submission_start_request, transaction_submission_response):
initialize_blockchain_utilities(blockchain, blockchain_node_urls,
fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id)
internal_transaction_id = create_transaction_resubmission_task(
blockchain, transaction_submission_start_request,
transaction_submission_response)
assert (str(internal_transaction_id) == mock_transaction_resubmission_task.
apply_async.call_args.kwargs['task_id'])
@pytest.mark.parametrize(
'transaction_status',
[TransactionStatus.CONFIRMED, TransactionStatus.REVERTED])
@pytest.mark.parametrize('ready', [True, False])
@unittest.mock.patch('celery.result.AsyncResult')
def test_get_transaction_resubmission_task_result_correct(
mock_async_result, ready, transaction_status, transaction_id):
mock_async_result().ready.return_value = ready
mock_async_result.return_value.state = 'SUCCESS'
mock_async_result().get.return_value = (transaction_status.value,
transaction_id)
task_result = get_transaction_resubmission_task_result(uuid.uuid4())
if ready:
assert task_result[0] is transaction_status
assert task_result[1] == transaction_id
else:
assert task_result is None
@unittest.mock.patch('celery.result.AsyncResult')
def test_get_transaction_resubmission_task_result_error(mock_async_result):
mock_async_result.ready.return_value = True
mock_async_result.return_value.state = 'FAILURE'
mock_async_result().get.side_effect = Exception
with pytest.raises(Exception):
get_transaction_resubmission_task_result(uuid.uuid4())
@pytest.mark.parametrize('transaction_status', TransactionStatus)
@unittest.mock.patch(
'pantos.common.blockchains.tasks.get_blockchain_utilities')
@unittest.mock.patch.object(_transaction_resubmission_task, 'retry',
_RetryError)
def test_transaction_resubmission_task_correct(
mock_get_blockchain_utilities, transaction_status, blockchain,
transaction_blocks_until_resubmission, transaction_id,
transaction_resubmission_request_dict,
transaction_resubmission_response):
mock_get_blockchain_utilities().read_transaction_status.return_value = \
transaction_status
mock_get_blockchain_utilities().resubmit_transaction.return_value = \
transaction_resubmission_response
if transaction_status in [
TransactionStatus.UNINCLUDED, TransactionStatus.UNCONFIRMED
]:
with pytest.raises(_RetryError):
_transaction_resubmission_task(
blockchain.value, transaction_blocks_until_resubmission,
transaction_id, transaction_resubmission_request_dict)
else:
task_result = _transaction_resubmission_task(
blockchain.value, transaction_blocks_until_resubmission,
transaction_id, transaction_resubmission_request_dict)
assert task_result[0] == transaction_status.value
assert task_result[1] == transaction_id
@unittest.mock.patch(
'pantos.common.blockchains.tasks.get_blockchain_utilities')
@unittest.mock.patch.object(_transaction_resubmission_task, 'retry',
_RetryError)
def test_transaction_resubmission_task_read_transaction_status_error(
mock_get_blockchain_utilities, blockchain,
transaction_blocks_until_resubmission, transaction_id,
transaction_resubmission_request_dict):
mock_get_blockchain_utilities().read_transaction_status.side_effect = \
BlockchainUtilitiesError
with pytest.raises(_RetryError):
_transaction_resubmission_task(blockchain.value,
transaction_blocks_until_resubmission,
transaction_id,
transaction_resubmission_request_dict)
@pytest.mark.parametrize(
'error', [MaxTotalFeePerGasExceededError, BlockchainUtilitiesError])
@unittest.mock.patch(
'pantos.common.blockchains.tasks.get_blockchain_utilities')
@unittest.mock.patch.object(_transaction_resubmission_task, 'retry',
_RetryError)
def test_transaction_resubmission_task_resubmit_transaction_error(
mock_get_blockchain_utilities, error, blockchain,
transaction_blocks_until_resubmission, transaction_id,
transaction_resubmission_request_dict):
mock_get_blockchain_utilities().read_transaction_status.return_value = \
TransactionStatus.UNINCLUDED
mock_get_blockchain_utilities().resubmit_transaction.side_effect = error
with pytest.raises(_RetryError):
_transaction_resubmission_task(blockchain.value,
transaction_blocks_until_resubmission,
transaction_id,
transaction_resubmission_request_dict)
| 6,211 | Python | .py | 117 | 43.452991 | 79 | 0.70895 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,153 | test_enums.py | pantos-io_common/tests/blockchains/test_enums.py | import pytest
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.enums import ContractAbi
@pytest.mark.parametrize(('blockchain', 'name_in_pascal_case'),
[(Blockchain.ETHEREUM, 'Ethereum'),
(Blockchain.BNB_CHAIN, 'BnbChain'),
(Blockchain.AVALANCHE, 'Avalanche')])
def test_blockchain_name_in_pascal_case_correct(blockchain,
name_in_pascal_case):
assert blockchain.name_in_pascal_case == name_in_pascal_case
@pytest.mark.parametrize('blockchain',
[blockchain for blockchain in Blockchain])
def test_blockchain_from_name_correct(blockchain):
assert Blockchain.from_name(blockchain.name.lower()) is blockchain
assert Blockchain.from_name(blockchain.name.upper()) is blockchain
def test_blockchain_from_name_error():
with pytest.raises(NameError):
Blockchain.from_name('unknown_blockchain')
@pytest.mark.parametrize(
('contract_abi', 'blockchain', 'file_name'),
[(ContractAbi.PANTOS_HUB, Blockchain.ETHEREUM, 'ethereum_pantos_hub.abi'),
(ContractAbi.STANDARD_TOKEN, Blockchain.BNB_CHAIN,
'bnb_chain_standard_token.abi'),
(ContractAbi.PANTOS_FORWARDER, Blockchain.CELO,
'celo_pantos_forwarder.abi'),
(ContractAbi.PANTOS_TOKEN, Blockchain.AVALANCHE,
'avalanche_pantos_token.abi')])
def test_contract_abi_get_file_name_correct(contract_abi, blockchain,
file_name):
assert contract_abi.get_file_name(blockchain) == file_name
| 1,624 | Python | .py | 30 | 44.066667 | 78 | 0.674448 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,154 | test_ethereum.py | pantos-io_common/tests/blockchains/test_ethereum.py | import importlib.resources
import unittest.mock
import hexbytes
import pytest
import web3
import web3.exceptions
from pantos.common.blockchains.base import GENERAL_RPC_ERROR_MESSAGE
from pantos.common.blockchains.base import NodeConnections
from pantos.common.blockchains.base import ResultsNotMatchingError
from pantos.common.blockchains.base import SingleNodeConnectionError
from pantos.common.blockchains.base import TransactionNonceTooLowError
from pantos.common.blockchains.base import TransactionUnderpricedError
from pantos.common.blockchains.base import UnhealthyNode
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import _NO_ARCHIVE_NODE_LOG_MESSAGE
from pantos.common.blockchains.ethereum import \
_NO_ARCHIVE_NODE_RPC_ERROR_MESSAGE
from pantos.common.blockchains.ethereum import _TRANSACTION_METHOD_NAMES
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.ethereum import EthereumUtilitiesError
from pantos.common.entities import TransactionStatus
_CONTRACT_ABI_PACKAGE = 'tests.blockchains.contracts'
"""Package that contains the contract ABI files."""
_ERC20_CONTRACT_ABI = 'ethereum_erc20.abi'
"""File name of the ERC20 token contract ABI."""
_ERC20_CONTRACT_BYTECODE = 'ethereum_erc20.bytecode'
"""File name of the ERC20 token contract bytecode."""
@pytest.fixture(scope='module')
def w3():
return web3.Web3(web3.EthereumTesterProvider())
@pytest.fixture(scope='module')
def node_connections(w3):
node_connections = NodeConnections[web3.Web3](_TRANSACTION_METHOD_NAMES)
node_connections.add_node_connection(w3)
return node_connections
@pytest.fixture(scope='module')
@unittest.mock.patch.object(EthereumUtilities, 'create_node_connections')
def ethereum_utilities(mock_create_node_connections, blockchain_node_urls,
fallback_blockchain_node_urls, average_block_time,
required_transaction_confirmations,
transaction_network_id, account, node_connections):
ethereum_utilities = EthereumUtilities(
blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id,
default_private_key=(account.keystore, account.keystore_password),
celery_tasks_enabled=True)
mock_create_node_connections.return_value = node_connections
ethereum_utilities.create_node_connections = mock_create_node_connections
return ethereum_utilities
@pytest.fixture
def deployed_erc20(w3, node_connections):
default_account = w3.eth.accounts[0]
with importlib.resources.open_text(
_CONTRACT_ABI_PACKAGE, _ERC20_CONTRACT_BYTECODE) as bytecode_file:
bytecode = bytecode_file.read()
with importlib.resources.open_text(_CONTRACT_ABI_PACKAGE,
_ERC20_CONTRACT_ABI) as abi_file:
erc20_abi = abi_file.read()
erc20_contract = node_connections.eth.contract(abi=erc20_abi,
bytecode=bytecode)
tx_hash = erc20_contract.constructor(1000, 'TOK', 2, 'TOK').transact(
{'from': default_account})
tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash, 180)
return erc20_contract(tx_receipt.contractAddress)
def test_get_address(ethereum_utilities, account):
address = ethereum_utilities.get_address(account.private_key)
assert address == account.address
def test_get_coin_balance_returns_0_correct(ethereum_utilities, account):
assert ethereum_utilities.get_balance(account.address) == 0
def test_get_coin_balance_returns_1000000_correct(ethereum_utilities, w3):
balance = ethereum_utilities.get_balance(w3.eth.accounts[0])
assert balance == w3.to_wei(1000000, 'ether')
def test_get_coin_balance_returns_1_correct(ethereum_utilities, account, w3):
default_account = w3.eth.accounts[0]
w3.eth.send_transaction({
'to': account.address,
'from': default_account,
'value': w3.to_wei(1, 'ether')
})
balance = ethereum_utilities.get_balance(account.address)
assert balance == w3.to_wei(1, 'ether')
def test_get_token_balance_correct(ethereum_utilities, deployed_erc20, w3):
default_account = w3.eth.accounts[0]
balance = ethereum_utilities.get_balance(default_account,
deployed_erc20.address.get())
assert balance == 1000
def test_get_token_balance_wrong_address_raises_error(ethereum_utilities, w3):
default_account = w3.eth.accounts[0]
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.get_balance(default_account, '0x0')
def test_get_coin_balance_wrong_address_raises_error(ethereum_utilities):
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.get_balance('0x0')
def test_get_coin_balance_error(ethereum_utilities, w3):
default_account = w3.eth.accounts[0]
mocked_node_connections = unittest.mock.Mock()
mocked_node_connections.eth.get_balance.side_effect = \
Exception
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.get_balance(
default_account, node_connections=mocked_node_connections)
def test_get_coin_balance_results_not_matching_error(ethereum_utilities, w3):
default_account = w3.eth.accounts[0]
mocked_node_connections = unittest.mock.Mock()
mocked_node_connections.eth.get_balance.side_effect = \
ResultsNotMatchingError
with pytest.raises(ResultsNotMatchingError):
ethereum_utilities.get_balance(
default_account, node_connections=mocked_node_connections)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_get_token_balance_error(mocked_create_contract, ethereum_utilities,
node_connections, deployed_erc20):
mocked_contract = unittest.mock.Mock()
mocked_contract.functions.balanceOf.side_effect = Exception
default_account = node_connections.eth.accounts[0]
mocked_create_contract.return_value = mocked_contract
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.get_balance(default_account,
deployed_erc20.address.get(),
node_connections)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_get_token_results_not_matching_error(mocked_create_contract,
ethereum_utilities, w3,
deployed_erc20):
mocked_contract = unittest.mock.Mock()
mocked_contract.functions.balanceOf.side_effect = ResultsNotMatchingError
default_account = w3.eth.accounts[0]
mocked_create_contract.return_value = mocked_contract
with pytest.raises(ResultsNotMatchingError):
ethereum_utilities.get_balance(default_account,
deployed_erc20.address.get())
def test_get_logs_correct(ethereum_utilities, deployed_erc20):
transfer_event = deployed_erc20.events.Transfer()
assert ethereum_utilities.get_logs(transfer_event, 0, 0) == ()
def test_get_logs_error(ethereum_utilities, deployed_erc20):
transfer_event = deployed_erc20.events.Transfer()
with pytest.raises(Exception):
ethereum_utilities.get_logs(transfer_event, 0, 1000)
def test_is_valid_address(ethereum_utilities):
# Valid Ethereum checksum addresses
assert ethereum_utilities.is_valid_address(
'0x2F64230f0AFFCA54563958caF89c9710f132cFe3')
assert ethereum_utilities.is_valid_address(
'0x5bD723CdfDa91B63aF3ff6BeC26443D5805a478B')
assert ethereum_utilities.is_valid_address(
'0x8C6C886E27477Fcb722c9b25225a99239309eF40')
# Invalid Ethereum checksum addresses
assert not ethereum_utilities.is_valid_address(
'0x2f64230f0affca54563958caf89c9710f132cfe3')
assert not ethereum_utilities.is_valid_address(
'0x5bd723cdfda91b63af3ff6bec26443d5805a478b')
assert not ethereum_utilities.is_valid_address(
'0x8c6c886e27477fcb722c9b25225a99239309ef40')
assert not ethereum_utilities.is_valid_address(None)
assert not ethereum_utilities.is_valid_address(0)
assert not ethereum_utilities.is_valid_address(1)
assert not ethereum_utilities.is_valid_address('')
assert not ethereum_utilities.is_valid_address(' ')
def test_get_transaction_method_names_correct(ethereum_utilities):
web3_transaction_method_name = \
ethereum_utilities._get_transaction_method_names()
assert web3_transaction_method_name == [
'send_transaction', 'replace_transaction', 'modify_transaction',
'send_raw_transaction', 'transact'
]
def test_is_equal_address(ethereum_utilities):
address = '0x2F64230f0AFFCA54563958caF89c9710f132cFe3'
assert ethereum_utilities.is_equal_address(address, address) is True
assert ethereum_utilities.is_equal_address(address.lower(), address) \
is True
assert ethereum_utilities.is_equal_address(address, address.lower()) \
is True
assert ethereum_utilities.is_equal_address(address.lower(),
address.lower()) is True
@unittest.mock.patch.object(EthereumUtilities,
'_create_single_node_connection')
def test_get_unhealthy_nodes_with_unreachable_nodes_correct(
mocked_create_single_node_connection, ethereum_utilities):
mocked_create_single_node_connection.side_effect = \
SingleNodeConnectionError
blockchain_nodes = [
'http://node1.example.com', 'http://node2.example.com',
'http://node3.example.com'
]
expected_unhealthy_nodes = [
UnhealthyNode('node1.example.com', GENERAL_RPC_ERROR_MESSAGE),
UnhealthyNode('node2.example.com', GENERAL_RPC_ERROR_MESSAGE),
UnhealthyNode('node3.example.com', GENERAL_RPC_ERROR_MESSAGE)
]
unhealthy_nodes = ethereum_utilities.get_unhealthy_nodes(blockchain_nodes)
assert unhealthy_nodes == expected_unhealthy_nodes
@unittest.mock.patch.object(EthereumUtilities,
'_create_single_node_connection')
def test_get_unhealthy_nodes_with_reachable_nodes_correct(
mocked_create_single_node_connection, ethereum_utilities):
blockchain_nodes = [
'http://reachable1.example.com', 'http://reachable2.example.com',
'http://reachable3.example.com'
]
unhealthy_nodes = ethereum_utilities.get_unhealthy_nodes(blockchain_nodes)
assert unhealthy_nodes == []
def test_decrypt_private_encrypted_key(ethereum_utilities, account):
with open(account.keystore_path, 'r') as file:
private_key = ethereum_utilities.decrypt_private_key(
file.read(), account.keystore_password)
assert private_key == account.private_key
def test_get_blockchain_correct(ethereum_utilities):
assert ethereum_utilities.get_blockchain() is Blockchain.ETHEREUM
assert EthereumUtilities.get_blockchain() is Blockchain.ETHEREUM
def test_get_error_class_correct(ethereum_utilities):
assert ethereum_utilities.get_error_class() is EthereumUtilitiesError
assert EthereumUtilities.get_error_class() is EthereumUtilitiesError
@unittest.mock.patch.object(EthereumUtilities,
'_EthereumUtilities__retrieve_revert_message',
return_value='revert message')
@pytest.mark.parametrize('transaction_parameters',
[(980, 1, TransactionStatus.CONFIRMED),
(990, 1, TransactionStatus.UNCONFIRMED),
(None, 1, TransactionStatus.UNINCLUDED),
(980, 0, TransactionStatus.REVERTED),
(990, 0, TransactionStatus.UNCONFIRMED),
(None, 0, TransactionStatus.UNINCLUDED)])
def test_read_transaction_status_correct(mocked_retrieve_revert_message,
transaction_parameters,
ethereum_utilities, node_connections,
w3, transaction_id):
mock_transaction_receipt = {
'transactionHash': hexbytes.HexBytes(transaction_id),
'blockNumber': transaction_parameters[0],
'status': transaction_parameters[1]
}
with unittest.mock.patch.object(w3.eth, 'get_transaction_receipt',
return_value=mock_transaction_receipt):
with unittest.mock.patch.object(w3.eth, 'get_block_number',
return_value=1000):
transaction_status = ethereum_utilities.read_transaction_status(
transaction_id, node_connections)
assert transaction_status is transaction_parameters[2]
def test_read_transaction_status_transaction_not_found_correct(
ethereum_utilities, node_connections, transaction_id):
with unittest.mock.patch.object(
node_connections.eth, 'get_transaction_receipt',
side_effect=web3.exceptions.TransactionNotFound):
transaction_status = ethereum_utilities.read_transaction_status(
transaction_id)
assert transaction_status is TransactionStatus.UNINCLUDED
def test_read_transaction_status_error(ethereum_utilities, w3, transaction_id):
with unittest.mock.patch.object(w3.eth, 'get_transaction_receipt',
side_effect=Exception):
with pytest.raises(EthereumUtilitiesError) as exception_info:
ethereum_utilities.read_transaction_status(transaction_id)
assert exception_info.value.details['transaction_id'] == transaction_id
def test_read_transaction_status_results_not_matching_error(
ethereum_utilities, w3, transaction_id):
with unittest.mock.patch.object(w3.eth, 'get_transaction_receipt',
side_effect=ResultsNotMatchingError):
with pytest.raises(ResultsNotMatchingError):
ethereum_utilities.read_transaction_status(transaction_id)
@pytest.mark.parametrize('type_2_transaction', [True, False])
@unittest.mock.patch.object(EthereumUtilities,
'_type_2_transactions_supported')
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_submit_transaction_correct(mock_create_contract,
mock_type_2_transactions_supported,
type_2_transaction, ethereum_utilities, w3,
transaction_submission_request,
transaction_id):
mock_type_2_transactions_supported.return_value = type_2_transaction
with unittest.mock.patch(
'pantos.common.blockchains.ethereum.web3.Account.sign_transaction'
):
with unittest.mock.patch.object(
w3.eth, 'get_block', return_value={'baseFeePerGas': int(1e8)}):
with unittest.mock.patch.object(
w3.eth,
'send_raw_transaction') as mock_send_raw_transaction:
mock_send_raw_transaction().to_0x_hex.return_value = \
transaction_id
response = ethereum_utilities.submit_transaction(
transaction_submission_request)
assert response.transaction_id == transaction_id
def test_submit_transaction_default_private_key_error(
ethereum_utilities, transaction_submission_request):
with unittest.mock.patch.object(ethereum_utilities, '_default_private_key',
None):
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(
transaction_submission_request)
def test_submit_transaction_gas_error(ethereum_utilities,
transaction_submission_request):
transaction_submission_request.gas = 1000
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(transaction_submission_request)
def test_submit_transaction_min_adaptable_fee_per_gas_error(
ethereum_utilities, transaction_submission_request):
transaction_submission_request.min_adaptable_fee_per_gas = -1
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(transaction_submission_request)
def test_submit_transaction_max_total_fee_per_gas_error(
ethereum_utilities, transaction_submission_request):
transaction_submission_request.max_total_fee_per_gas = \
transaction_submission_request.min_adaptable_fee_per_gas - 1
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(transaction_submission_request)
def test_submit_transaction_amount_error(ethereum_utilities,
transaction_submission_request):
transaction_submission_request.amount = -1
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(transaction_submission_request)
def test_submit_transaction_nonce_error(ethereum_utilities,
transaction_submission_request):
transaction_submission_request.nonce = -1
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(transaction_submission_request)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_submit_transaction_max_fee_per_gas_error(
mock_create_contract, ethereum_utilities, w3,
transaction_submission_request, transaction_id):
base_fee_per_gas = int(1e8)
transaction_submission_request.max_total_fee_per_gas = (
base_fee_per_gas +
transaction_submission_request.min_adaptable_fee_per_gas)
with unittest.mock.patch(
'pantos.common.blockchains.ethereum.web3.Account.sign_transaction'
):
with unittest.mock.patch.object(
w3.eth, 'get_block',
return_value={'baseFeePerGas': base_fee_per_gas}):
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(
transaction_submission_request)
@unittest.mock.patch.object(EthereumUtilities,
'_type_2_transactions_supported',
return_value=False)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_submit_transaction_gas_price__error(
mock_create_contract, mock_type_2_transactions_supported,
ethereum_utilities, w3, transaction_submission_request,
transaction_id):
transaction_submission_request.min_adaptable_fee_per_gas = 1
transaction_submission_request.max_total_fee_per_gas = 1
with unittest.mock.patch(
'pantos.common.blockchains.ethereum.web3.Account.sign_transaction'
):
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(
transaction_submission_request)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_submit_transaction_nonce_too_low_error(
mock_create_contract, ethereum_utilities, w3,
transaction_submission_request):
with unittest.mock.patch(
'pantos.common.blockchains.ethereum.web3.Account.sign_transaction'
):
with unittest.mock.patch.object(
w3.eth, 'send_raw_transaction', side_effect=ValueError({
'code': '-32000',
'message': 'nonce too low'
})):
with pytest.raises(TransactionNonceTooLowError):
ethereum_utilities.submit_transaction(
transaction_submission_request)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_submit_transaction_underpriced_error(mock_create_contract,
ethereum_utilities, w3,
transaction_submission_request):
with unittest.mock.patch(
'pantos.common.blockchains.ethereum.web3.Account.sign_transaction'
):
with unittest.mock.patch.object(
w3.eth, 'send_raw_transaction', side_effect=ValueError({
'code': '-32000',
'message': 'transaction underpriced'
})):
with pytest.raises(TransactionUnderpricedError):
ethereum_utilities.submit_transaction(
transaction_submission_request)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_submit_transaction_other_send_error(mock_create_contract,
ethereum_utilities, w3,
transaction_submission_request):
with unittest.mock.patch(
'pantos.common.blockchains.ethereum.web3.Account.sign_transaction'
):
with unittest.mock.patch.object(w3.eth, 'send_raw_transaction',
side_effect=ValueError('some error')):
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities.submit_transaction(
transaction_submission_request)
@unittest.mock.patch.object(EthereumUtilities, 'create_contract')
def test_submit_transaction_results_not_matching_error(
mock_create_contract, ethereum_utilities, w3,
transaction_submission_request):
with unittest.mock.patch(
'pantos.common.blockchains.ethereum.web3.Account.sign_transaction'
):
with unittest.mock.patch.object(w3.eth, 'send_raw_transaction',
side_effect=ResultsNotMatchingError):
with pytest.raises(ResultsNotMatchingError):
ethereum_utilities.submit_transaction(
transaction_submission_request)
@unittest.mock.patch('pantos.common.blockchains.ethereum.web3')
def test_create_single_node_connection_correct(mocked_web3, ethereum_utilities,
blockchain_node_urls):
blockchain_node_url = blockchain_node_urls[0]
result = ethereum_utilities._create_single_node_connection(
blockchain_node_url)
assert result == mocked_web3.Web3(
mocked_web3.Web3.HTTPProvider(blockchain_node_url))
@unittest.mock.patch('pantos.common.blockchains.ethereum.web3')
def test_create_single_node_connection_extra_data_lenght_correct(
mocked_web3, ethereum_utilities, blockchain_node_urls):
mocked_web3.exceptions.ExtraDataLengthError = \
web3.exceptions.ExtraDataLengthError
blockchain_node_url = blockchain_node_urls[0]
mocked_web3.Web3().is_connected.return_value = True
mocked_web3.Web3(
).eth.get_block.side_effect = web3.exceptions.ExtraDataLengthError
result = ethereum_utilities._create_single_node_connection(
blockchain_node_url)
assert result == mocked_web3.Web3(
mocked_web3.Web3.HTTPProvider(blockchain_node_url))
mocked_web3.Web3().middleware_onion.inject.assert_called_once_with(
mocked_web3.middleware.geth_poa_middleware, layer=0)
@unittest.mock.patch('pantos.common.blockchains.ethereum.web3')
def test_create_single_node_connection_error(mocked_web3, blockchain_node_urls,
ethereum_utilities):
blockchain_node_url = blockchain_node_urls[0]
mocked_web3.Web3.side_effect = Exception
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities._create_single_node_connection(blockchain_node_url)
@unittest.mock.patch('pantos.common.blockchains.ethereum.web3')
def test_create_single_node_connection_not_connected_error(
mocked_web3, blockchain_node_urls, ethereum_utilities):
blockchain_node_url = blockchain_node_urls[0]
mocked_web3.Web3().is_connected.return_value = False
with pytest.raises(EthereumUtilitiesError):
ethereum_utilities._create_single_node_connection(blockchain_node_url)
def test_retrieve_revert_message_correct(ethereum_utilities, w3,
node_connections, transaction_id,
transaction_contract_address):
default_account = w3.eth.accounts[0]
with unittest.mock.patch.object(
w3.eth, 'get_transaction', return_value={
'from': default_account,
'to': transaction_contract_address,
'value': 0,
'input': "",
'blockNumber': 1,
}):
with unittest.mock.patch.object(
w3.eth, 'call', side_effect=web3.exceptions.ContractLogicError(
'revert message')):
assert \
ethereum_utilities._EthereumUtilities__retrieve_revert_message(
transaction_id, node_connections) == 'revert message'
def test_retrieve_revert_message_no_archive_node_available_error(
ethereum_utilities, w3, node_connections, transaction_id,
transaction_contract_address):
default_account = w3.eth.accounts[0]
with unittest.mock.patch.object(
w3.eth, 'get_transaction', return_value={
'from': default_account,
'to': transaction_contract_address,
'value': 0,
'input': "",
'blockNumber': 1,
}):
with unittest.mock.patch.object(
w3.eth, 'call', side_effect=ValueError({
'message': f'{_NO_ARCHIVE_NODE_RPC_ERROR_MESSAGE} 0x...'
})):
assert \
ethereum_utilities._EthereumUtilities__retrieve_revert_message(
transaction_id, node_connections) == \
f'unknown {_NO_ARCHIVE_NODE_LOG_MESSAGE}'
def test_retrieve_revert_message_correct_no_error(
ethereum_utilities, w3, node_connections, transaction_id,
transaction_contract_address):
default_account = w3.eth.accounts[0]
with unittest.mock.patch.object(
w3.eth, 'get_transaction', return_value={
'from': default_account,
'to': transaction_contract_address,
'value': 0,
'input': "",
'blockNumber': 1,
}):
with unittest.mock.patch.object(w3.eth, 'call', return_value=''):
assert \
ethereum_utilities._EthereumUtilities__retrieve_revert_message(
transaction_id, node_connections) == 'unknown'
def test_retrieve_revert_message_correct_error(ethereum_utilities, w3,
node_connections,
transaction_id,
transaction_contract_address):
default_account = w3.eth.accounts[0]
with unittest.mock.patch.object(
w3.eth, 'get_transaction', return_value={
'from': default_account,
'to': transaction_contract_address,
'value': 0,
'input': "",
'blockNumber': 1,
}):
with unittest.mock.patch.object(w3.eth, 'call', side_effect=Exception):
assert \
ethereum_utilities._EthereumUtilities__retrieve_revert_message(
transaction_id, node_connections) == 'unknown'
| 27,522 | Python | .py | 510 | 43.037255 | 79 | 0.676558 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,155 | test_bnbchain.py | pantos-io_common/tests/blockchains/test_bnbchain.py | import pytest
from pantos.common.blockchains.bnbchain import BnbChainUtilities
from pantos.common.blockchains.bnbchain import BnbChainUtilitiesError
from pantos.common.blockchains.enums import Blockchain
@pytest.fixture(scope='module')
def bnb_chain_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
return BnbChainUtilities(blockchain_node_urls,
fallback_blockchain_node_urls, average_block_time,
required_transaction_confirmations,
transaction_network_id)
def test_get_blockchain_correct(bnb_chain_utilities):
assert bnb_chain_utilities.get_blockchain() is Blockchain.BNB_CHAIN
assert BnbChainUtilities.get_blockchain() is Blockchain.BNB_CHAIN
def test_get_error_class_correct(bnb_chain_utilities):
assert bnb_chain_utilities.get_error_class() is BnbChainUtilitiesError
assert BnbChainUtilities.get_error_class() is BnbChainUtilitiesError
| 1,097 | Python | .py | 18 | 50.944444 | 79 | 0.738806 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,156 | test_sonic.py | pantos-io_common/tests/blockchains/test_sonic.py | import pytest
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.sonic import SonicUtilities
from pantos.common.blockchains.sonic import SonicUtilitiesError
@pytest.fixture(scope='module')
def sonic_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
return SonicUtilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id)
def test_get_blockchain_correct(sonic_utilities):
assert sonic_utilities.get_blockchain() is Blockchain.SONIC
assert SonicUtilities.get_blockchain() is Blockchain.SONIC
def test_get_error_class_correct(sonic_utilities):
assert sonic_utilities.get_error_class() is SonicUtilitiesError
assert SonicUtilities.get_error_class() is SonicUtilitiesError
| 1,025 | Python | .py | 18 | 47.888889 | 78 | 0.744 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,157 | test_solana.py | pantos-io_common/tests/blockchains/test_solana.py | import pytest
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.solana import SolanaUtilities
from pantos.common.blockchains.solana import SolanaUtilitiesError
@pytest.fixture(scope='module')
def solana_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
return SolanaUtilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id)
def test_get_blockchain_correct(solana_utilities):
assert solana_utilities.get_blockchain() is Blockchain.SOLANA
assert SolanaUtilities.get_blockchain() is Blockchain.SOLANA
def test_get_error_class_correct(solana_utilities):
assert solana_utilities.get_error_class() is SolanaUtilitiesError
assert SolanaUtilities.get_error_class() is SolanaUtilitiesError
def test_is_equal_address_not_implemented(solana_utilities):
with pytest.raises(NotImplementedError):
solana_utilities.is_equal_address('address_one', 'address_two')
| 1,226 | Python | .py | 21 | 49.571429 | 79 | 0.747492 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,158 | test_base.py | pantos-io_common/tests/blockchains/test_base.py | import dataclasses
import importlib
import json
import pathlib
import unittest.mock
import uuid
import pytest
from pantos.common.blockchains.base import _BASE_CONTRACT_ABI_PACKAGE
from pantos.common.blockchains.base import BlockchainUtilities
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.base import MaxTotalFeePerGasExceededError
from pantos.common.blockchains.base import NodeConnectionError
from pantos.common.blockchains.base import NodeConnections
from pantos.common.blockchains.base import ResultsNotMatchingError
from pantos.common.blockchains.base import SingleNodeConnectionError
from pantos.common.blockchains.base import TransactionUnderpricedError
from pantos.common.blockchains.base import VersionedContractAbi
from pantos.common.blockchains.enums import ContractAbi
from pantos.common.entities import TransactionStatus
_CONTRACT_ABI = '''
[
{
"inputs": [],
"name": "getAddress",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "address_",
"type": "address"
}
],
"name": "setAddress",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
}
]
'''
@pytest.fixture
@unittest.mock.patch.object(BlockchainUtilities, '__abstractmethods__', set())
def blockchain_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id, account):
return BlockchainUtilities(
blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id,
default_private_key=(account.keystore, account.keystore_password),
celery_tasks_enabled=True)
@pytest.mark.parametrize('celery_tasks_enabled', [True, False])
@unittest.mock.patch.object(BlockchainUtilities, 'decrypt_private_key')
@unittest.mock.patch.object(BlockchainUtilities, 'get_address')
@unittest.mock.patch.object(BlockchainUtilities, '__abstractmethods__', set())
def test_init_correct(mock_get_address, mock_decrypt_private_key,
celery_tasks_enabled, blockchain_node_urls,
fallback_blockchain_node_urls, average_block_time,
required_transaction_confirmations,
transaction_network_id, account):
mock_get_address.return_value = account.address
mock_decrypt_private_key.return_value = account.private_key
blockchain_utilities = BlockchainUtilities(
blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id,
default_private_key=(account.keystore, account.keystore_password),
celery_tasks_enabled=celery_tasks_enabled)
assert blockchain_utilities.average_block_time == average_block_time
assert (blockchain_utilities.required_transaction_confirmations ==
required_transaction_confirmations)
assert (
blockchain_utilities.transaction_network_id == transaction_network_id)
assert blockchain_utilities._blockchain_node_urls == blockchain_node_urls
assert blockchain_utilities._default_private_key == account.private_key
assert blockchain_utilities._default_address == account.address
assert blockchain_utilities._celery_tasks_enabled == celery_tasks_enabled
@pytest.mark.parametrize('replaced_arg', [2, 3, 4])
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
@unittest.mock.patch.object(BlockchainUtilities, '__abstractmethods__', set())
def test_init_error(mock_get_error_class, replaced_arg, blockchain_node_urls,
fallback_blockchain_node_urls, average_block_time,
required_transaction_confirmations,
transaction_network_id):
args = [
blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id
]
args[replaced_arg] = -1
with pytest.raises(BlockchainUtilitiesError):
BlockchainUtilities(*args)
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
@unittest.mock.patch.object(BlockchainUtilities, '__abstractmethods__', set())
def test_init_no_blockchain_node_uri_error(mock_get_error_class,
average_block_time,
required_transaction_confirmations,
transaction_network_id):
args = [[], [], average_block_time, required_transaction_confirmations,
transaction_network_id]
with pytest.raises(BlockchainUtilitiesError):
BlockchainUtilities(*args)
@unittest.mock.patch.object(BlockchainUtilities,
'_get_transaction_method_names', return_value=[''])
@unittest.mock.patch.object(BlockchainUtilities,
'_create_single_node_connection')
def test_create_node_connection_correct(mocked_create_single_node_connection,
mocked_get_transaction_method_names,
blockchain_utilities):
mocked_create_single_node_connection.return_value = 'test_connection'
node_connections = blockchain_utilities.create_node_connections()
assert node_connections._NodeConnections__node_connections == [
'test_connection'
]
@unittest.mock.patch.object(BlockchainUtilities,
'_get_transaction_method_names', return_value=[''])
@unittest.mock.patch.object(BlockchainUtilities,
'_create_single_node_connection')
def test_create_node_connection_fallback_not_used_twice_correct(
mocked_create_single_node_connection,
mocked_get_transaction_method_names, blockchain_utilities):
blockchain_utilities._blockchain_node_urls = ['node1', 'node2']
blockchain_utilities._fallback_blockchain_node_urls = [
'fallback_node1', 'fallback_node2'
]
mocked_create_single_node_connection.side_effect = \
lambda node_url, _: node_url if node_url.startswith('fallback') \
else exec('raise SingleNodeConnectionError()')
node_connections = blockchain_utilities.create_node_connections()
assert node_connections._NodeConnections__node_connections == [
'fallback_node1', 'fallback_node2'
]
assert blockchain_utilities._fallback_blockchain_node_urls == [
'fallback_node1', 'fallback_node2'
] # testing for unintented side effect
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
@unittest.mock.patch.object(BlockchainUtilities,
'_get_transaction_method_names', return_value=[''])
@unittest.mock.patch.object(BlockchainUtilities,
'_create_single_node_connection')
def test_create_node_connection_no_node_connection_valid(
mocked_create_single_node_connection,
mocked_get_transaction_method_names, mock_get_error_class,
blockchain_utilities):
blockchain_utilities._blockchain_node_urls = ['node1', 'node2']
blockchain_utilities._fallback_blockchain_node_urls = [
'fallback_node1', 'fallback_node2'
]
mocked_create_single_node_connection.side_effect = \
SingleNodeConnectionError()
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.create_node_connections()
@unittest.mock.patch.object(ContractAbi, 'get_file_name')
def test_load_contract_abi_correct(mock_get_file_name, blockchain_utilities,
protocol_version):
abi_file_name = f'{uuid.uuid4()}.abi'
mock_get_file_name.return_value = abi_file_name
module_to_import = (
f'{_BASE_CONTRACT_ABI_PACKAGE}.v{protocol_version.major}_'
f'{protocol_version.minor}_{protocol_version.patch}')
module = importlib.import_module(module_to_import)
abi_file_path = pathlib.Path(module.__file__).parent / abi_file_name
contract_abi = list(ContractAbi)[0]
versioned_contract_abi = VersionedContractAbi(contract_abi,
protocol_version)
contract_abi_list = json.loads(_CONTRACT_ABI)
try:
with abi_file_path.open('w') as abi_file:
abi_file.write(_CONTRACT_ABI)
loaded_contract_abi_list = blockchain_utilities.load_contract_abi(
versioned_contract_abi)
finally:
abi_file_path.unlink()
assert loaded_contract_abi_list == contract_abi_list
# Make sure that a cached version is returned when the function is
# invoked again (loading the ABI again from the file would fail
# since the file has already been deleted)
loaded_contract_abi_list = blockchain_utilities.load_contract_abi(
versioned_contract_abi)
assert loaded_contract_abi_list == contract_abi_list
@unittest.mock.patch.object(ContractAbi, 'get_file_name',
return_value=f'{uuid.uuid4()}.abi')
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_load_contract_abi_error(mock_get_error_class, mock_get_file_name,
blockchain_utilities, protocol_version):
versioned_contract_abi = VersionedContractAbi(
list(ContractAbi)[0], protocol_version)
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.load_contract_abi(versioned_contract_abi)
def test_load_contract_abi_file_existence(blockchain_utilities,
active_blockchain,
versioned_contract_abi):
with unittest.mock.patch.object(blockchain_utilities, 'get_blockchain',
return_value=active_blockchain):
contract_abi = blockchain_utilities.load_contract_abi(
versioned_contract_abi)
assert len(contract_abi) > 0
@pytest.mark.parametrize('underpriced_submissions', [0, 1, 10])
@pytest.mark.parametrize('min_adaptable_fee_per_gas', [0, int(1e6), int(1e9)])
@unittest.mock.patch.object(BlockchainUtilities, 'submit_transaction')
def test_resubmit_transaction_correct(mock_submit_transaction,
min_adaptable_fee_per_gas,
underpriced_submissions,
blockchain_utilities,
transaction_resubmission_request,
transaction_submission_response):
mock_submit_transaction.side_effect = (
[TransactionUnderpricedError] * underpriced_submissions +
[transaction_submission_response])
transaction_resubmission_request.min_adaptable_fee_per_gas = \
min_adaptable_fee_per_gas
response = blockchain_utilities.resubmit_transaction(
transaction_resubmission_request)
assert dataclasses.asdict(response) == dataclasses.asdict(
transaction_submission_response)
assert mock_submit_transaction.call_count == underpriced_submissions + 1
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_resubmit_transaction_min_adaptable_fee_per_gas_error(
mock_get_error_class, blockchain_utilities,
transaction_resubmission_request):
transaction_resubmission_request.min_adaptable_fee_per_gas = -1
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.resubmit_transaction(
transaction_resubmission_request)
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_resubmit_transaction_adaptable_fee_increase_factor_error(
mock_get_error_class, blockchain_utilities,
transaction_resubmission_request):
transaction_resubmission_request.adaptable_fee_increase_factor = 1.001
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.resubmit_transaction(
transaction_resubmission_request)
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_resubmit_transaction_max_total_fee_per_gas_exceeded_error(
mock_get_error_class, blockchain_utilities,
transaction_resubmission_request):
transaction_resubmission_request.max_total_fee_per_gas = \
transaction_resubmission_request.min_adaptable_fee_per_gas
with pytest.raises(MaxTotalFeePerGasExceededError):
blockchain_utilities.resubmit_transaction(
transaction_resubmission_request)
@pytest.mark.parametrize('initial_submission_underpriced', [False, True])
@unittest.mock.patch(
'pantos.common.blockchains.tasks.create_transaction_resubmission_task',
return_value=uuid.uuid4())
@unittest.mock.patch.object(BlockchainUtilities, 'resubmit_transaction')
@unittest.mock.patch.object(BlockchainUtilities, 'submit_transaction')
def test_start_transaction_submission_correct(
mock_submit_transaction, mock_resubmit_transaction,
mock_create_transaction_resubmission_task,
initial_submission_underpriced, blockchain_utilities,
transaction_submission_start_request, transaction_submission_response,
transaction_resubmission_response):
mock_submit_transaction.side_effect = [
TransactionUnderpricedError
if initial_submission_underpriced else transaction_submission_response
]
mock_resubmit_transaction.return_value = transaction_resubmission_response
internal_transaction_id = \
blockchain_utilities.start_transaction_submission(
transaction_submission_start_request)
assert (internal_transaction_id ==
mock_create_transaction_resubmission_task.return_value)
assert mock_submit_transaction.call_count == 1
assert mock_resubmit_transaction.call_count == (
1 if initial_submission_underpriced else 0)
assert mock_create_transaction_resubmission_task.call_count == 1
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_start_transaction_submission_celery_tasks_disabled_error(
mock_get_error_class, blockchain_utilities,
transaction_submission_start_request):
blockchain_utilities._celery_tasks_enabled = False
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.start_transaction_submission(
transaction_submission_start_request)
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_start_transaction_submission_blocks_until_resubmission_error(
mock_get_error_class, blockchain_utilities,
transaction_submission_start_request):
transaction_submission_start_request.blocks_until_resubmission = 0
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.start_transaction_submission(
transaction_submission_start_request)
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_start_transaction_submission_adaptable_fee_increase_factor_error(
mock_get_error_class, blockchain_utilities,
transaction_submission_start_request):
transaction_submission_start_request.adaptable_fee_increase_factor = 1.001
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.start_transaction_submission(
transaction_submission_start_request)
@unittest.mock.patch(
'pantos.common.blockchains.tasks.create_transaction_resubmission_task',
side_effect=Exception)
@unittest.mock.patch.object(BlockchainUtilities, 'submit_transaction')
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_start_transaction_submission_transaction_resubmission_task_error(
mock_get_error_class, mock_submit_transaction,
mock_create_transaction_resubmission_task, blockchain_utilities,
transaction_submission_start_request, transaction_submission_response):
mock_submit_transaction.return_value = transaction_submission_response
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.start_transaction_submission(
transaction_submission_start_request)
@pytest.mark.parametrize('task_result', [
None,
(TransactionStatus.CONFIRMED,
'0xdf6fca0130714b5496fe9f0dbf6991ca996f2a387e6698707f825f98f8725e1c')
])
@unittest.mock.patch(
'pantos.common.blockchains.tasks.get_transaction_resubmission_task_result')
def test_get_transaction_submission_status_correct(
mock_get_transaction_resubmission_task_result, task_result,
blockchain_utilities, internal_transaction_id):
mock_get_transaction_resubmission_task_result.return_value = task_result
response = blockchain_utilities.get_transaction_submission_status(
internal_transaction_id)
if task_result is None:
assert not response.transaction_submission_completed
else:
assert response.transaction_submission_completed
assert response.transaction_status is task_result[0]
assert response.transaction_id == task_result[1]
assert mock_get_transaction_resubmission_task_result.call_count == 1
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_get_transaction_submission_status_celery_tasks_disabled_error(
mock_get_error_class, blockchain_utilities, internal_transaction_id):
blockchain_utilities._celery_tasks_enabled = False
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.get_transaction_submission_status(
internal_transaction_id)
@unittest.mock.patch(
'pantos.common.blockchains.tasks.get_transaction_resubmission_task_result',
side_effect=Exception)
@unittest.mock.patch.object(BlockchainUtilities, 'get_error_class',
return_value=BlockchainUtilitiesError)
def test_get_transaction_submission_status_transaction_resubmission_task_error(
mock_get_error_class, mock_get_transaction_resubmission_task_result,
blockchain_utilities, internal_transaction_id):
with pytest.raises(BlockchainUtilitiesError):
blockchain_utilities.get_transaction_submission_status(
internal_transaction_id)
def test_add_node_connections_correct():
mock_connection = unittest.mock.Mock()
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
node_connections.add_node_connection(mock_connection)
assert node_connections._NodeConnections__node_connections == [
mock_connection, mock_connection
]
def test_getattr_with_no_connections_error():
node_connections = NodeConnections()
with pytest.raises(NodeConnectionError):
node_connections.attribute
def test_getattr_with_valid_connection_correct():
mock_connection = unittest.mock.Mock()
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
assert isinstance(node_connections.attribute, NodeConnections.Wrapper)
def test_call_non_transaction_function_correct():
mock_connection = unittest.mock.Mock()
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
wrapper = node_connections.eth.get_balance
assert not wrapper._Wrapper__is_transaction_function
def test_call_transaction_function():
mock_connection = unittest.mock.Mock()
node_connections = NodeConnections(['send_transaction'])
node_connections.add_node_connection(mock_connection)
wrapper = node_connections.eth.send_transaction
result = wrapper()
assert wrapper._Wrapper__is_transaction_function
assert result == wrapper._Wrapper__objects[0]()
def test_get_wrapper_attribute_result_correct():
mock_connection = unittest.mock.Mock()
mock_connection.eth.get_gas = 100
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
result = node_connections.eth.get_gas.get()
assert result == 100
def test_get_item_correct():
mock_connection = unittest.mock.Mock()
mock_connection.accounts = ['account']
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
account_wrapped = node_connections.accounts[0]
assert account_wrapped._Wrapper__objects[0] == 'account'
def test_compare_results_matching_correct():
mock_connection = unittest.mock.Mock()
mock_connection.get_balance.return_value = 10
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
node_connections.add_node_connection(mock_connection)
balance = node_connections.get_balance().get()
assert balance == 10
def test_compare_results_not_matching_error():
mock_connection = unittest.mock.Mock()
mock_connection_2 = unittest.mock.Mock()
mock_connection.get_balance.return_value = 10
mock_connection_2.get_balance.return_value = 15
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
node_connections.add_node_connection(mock_connection_2)
with pytest.raises(ResultsNotMatchingError):
node_connections.get_balance().get()
def test_get_minimum_result_correct():
mock_connection = unittest.mock.Mock()
mock_connection_2 = unittest.mock.Mock()
mock_connection.get_block_number.return_value = 10
mock_connection_2.get_block_number.return_value = 11
node_connections = NodeConnections()
node_connections.add_node_connection(mock_connection)
node_connections.add_node_connection(mock_connection_2)
assert node_connections.get_block_number().get_minimum_result() == 10
| 22,904 | Python | .py | 436 | 43.665138 | 79 | 0.710897 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,159 | test_avalanche.py | pantos-io_common/tests/blockchains/test_avalanche.py | import pytest
from pantos.common.blockchains.avalanche import AvalancheUtilities
from pantos.common.blockchains.avalanche import AvalancheUtilitiesError
from pantos.common.blockchains.enums import Blockchain
@pytest.fixture(scope='module')
def avalanche_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
return AvalancheUtilities(blockchain_node_urls,
fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id)
def test_get_blockchain_correct(avalanche_utilities):
assert avalanche_utilities.get_blockchain() is Blockchain.AVALANCHE
assert AvalancheUtilities.get_blockchain() is Blockchain.AVALANCHE
def test_get_error_class_correct(avalanche_utilities):
assert avalanche_utilities.get_error_class() is AvalancheUtilitiesError
assert AvalancheUtilities.get_error_class() is AvalancheUtilitiesError
| 1,139 | Python | .py | 19 | 48.684211 | 79 | 0.725966 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,160 | test_cronos.py | pantos-io_common/tests/blockchains/test_cronos.py | import pytest
from pantos.common.blockchains.cronos import CronosUtilities
from pantos.common.blockchains.cronos import CronosUtilitiesError
from pantos.common.blockchains.enums import Blockchain
@pytest.fixture(scope='module')
def cronos_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
return CronosUtilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id)
def test_get_blockchain_correct(cronos_utilities):
assert cronos_utilities.get_blockchain() is Blockchain.CRONOS
assert CronosUtilities.get_blockchain() is Blockchain.CRONOS
def test_get_error_class_correct(cronos_utilities):
assert cronos_utilities.get_error_class() is CronosUtilitiesError
assert CronosUtilities.get_error_class() is CronosUtilitiesError
| 1,046 | Python | .py | 18 | 48.777778 | 79 | 0.744368 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,161 | test_polygon.py | pantos-io_common/tests/blockchains/test_polygon.py | import pytest
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.polygon import PolygonUtilities
from pantos.common.blockchains.polygon import PolygonUtilitiesError
@pytest.fixture(scope='module')
def polygon_utilities(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id):
return PolygonUtilities(blockchain_node_urls,
fallback_blockchain_node_urls, average_block_time,
required_transaction_confirmations,
transaction_network_id)
def test_get_blockchain_correct(polygon_utilities):
assert polygon_utilities.get_blockchain() is Blockchain.POLYGON
assert PolygonUtilities.get_blockchain() is Blockchain.POLYGON
def test_get_error_class_correct(polygon_utilities):
assert polygon_utilities.get_error_class() is PolygonUtilitiesError
assert PolygonUtilities.get_error_class() is PolygonUtilitiesError
| 1,067 | Python | .py | 18 | 49.666667 | 78 | 0.744722 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,162 | configuration.py | pantos-io_common/pantos/common/configuration.py | """Module for loading and parsing a configuration file.
"""
import errno
import importlib.resources
import logging
import os
import pathlib
import typing
import cerberus # type: ignore
import dotenv
import pyaml_env # type: ignore
import yaml
from pantos.common.exceptions import BaseError
_logger = logging.getLogger(__name__)
# Ordered by priority
_CONFIGURATION_PATHS = [
pathlib.Path.home(),
pathlib.Path.home() / '.config',
pathlib.Path('/etc/pantos'),
pathlib.Path('/etc')
]
if pathlib.Path.cwd() != pathlib.Path('/'):
_CONFIGURATION_PATHS.insert(0, pathlib.Path.cwd())
if os.environ.get('PANTOS_CONFIG'):
_logger.info('loading configuration from environment variable '
'PANTOS_CONFIG')
_CONFIGURATION_PATHS.insert(0, pathlib.Path(os.environ['PANTOS_CONFIG']))
class _CustomValidator(cerberus.Validator):
def _validate_one_not_present(self, other: str, field: str, value: str):
if (bool(value)) == (bool(self.document.get(other))):
self._error(field, "only one field can be present: " + other)
def _normalize_coerce_load_if_file(self, value: str):
path = pathlib.Path(value)
try:
# This method may trigger an exception if the path is not valid
if path.is_file():
with open(path, 'r') as file:
return file.read()
except OSError as error:
if error.errno != errno.ENAMETOOLONG:
raise error
return value
class ConfigError(BaseError):
"""Exception class for all configuration errors.
"""
pass
class Config:
"""Class that loads and parses a configuration file and provides
dictionary-like access to the configuration values.
Attributes
----------
default_file_name : str
The default configuration file name to be used when no explicit
file path is provided for loading the configuration.
"""
def __init__(self, default_file_name: str):
"""Initialize a configuration instance.
Parameters
----------
default_file_name : str
The default configuration file name to be used when no
explicit file path is provided for loading the
configuration.
"""
self.default_file_name = default_file_name
self.__config_dict: dict[str, typing.Any] | None = None
def __getitem__(self, key: str) -> typing.Any:
"""Get the configuration value for a given key.
Parameters
----------
key : str
The key to get the configuration value for.
Returns
-------
Any
The configuration value.
Raises
------
ConfigError
If the configuration has not been loaded.
"""
if self.__config_dict is None:
raise ConfigError('configuration not yet loaded')
return self.__config_dict[key]
def __str__(self) -> str:
return str(self.__config_dict)
def is_loaded(self) -> bool:
"""Determine if the configuration has been loaded.
Returns
-------
bool
True if the configuration has been loaded.
"""
return self.__config_dict is not None
def load(self, validation_schema: dict[str, typing.Any],
file_path: str | None = None) -> None:
"""Load the configuration from a file.
Parameters
----------
validation_schema : dict
The Cerberus validation schema used for validating the
loaded configuration.
file_path : str, optional
The path to the configuration file to load. If no file path
is provided, the configuration is loaded from a default
configuration file path.
Raises
------
ConfigError
If the validation schema is invalid, the configuration file
cannot be found, or the configuration file is invalid.
"""
path = self.__find_file(file_path)
_logger.info(f'loading configuration from file {path}')
config_dict = self.__parse_file(path)
# Validate the configuration and add default configuration values
self.__config_dict = self.__validate(config_dict, validation_schema)
def __find_file(self, file_path: str | None) -> pathlib.Path:
if file_path is not None:
# Use the specified configuration file
path = pathlib.Path(file_path)
if not path.is_file():
raise ConfigError(
f'no configuration file found at {file_path}')
return path
# Find the configuration file at common locations
for path in _CONFIGURATION_PATHS:
config_path = path
try:
if config_path.is_dir():
config_path = config_path / self.default_file_name
if config_path.is_file():
return config_path
except OSError:
# Perhaps the path is not readable
_logger.warning(f'error while reading: {config_path}',
exc_info=True)
# Package resource
if importlib.resources.is_resource('pantos', self.default_file_name):
with importlib.resources.path('pantos',
self.default_file_name) as path:
return path
# No configuration file found at common locations
raise ConfigError('no configuration file found')
def __parse_file(self, path: pathlib.Path) -> dict[str, typing.Any]:
# List of potential .env file paths
env_files = [
pathlib.Path(path.as_posix() + '.env'),
pathlib.Path(path.with_suffix('.env').as_posix())
]
if os.environ.get('PANTOS_ENV_FILE'):
_logger.info('loading env variables from environment defined file '
'PANTOS_ENV_FILE')
env_files.insert(0, pathlib.Path(os.environ['PANTOS_ENV_FILE']))
# Extend env_files with .env paths from _CONFIGURATION_PATHS
env_files.extend(
pathlib.Path(str(p)).with_name(
str(p.name) + self.default_file_name + '.env')
for p in _CONFIGURATION_PATHS)
# Iterate over the potential .env file paths
for env_file in env_files:
try:
if env_file.is_file():
dotenv.load_dotenv(env_file)
_logger.info(f'loaded .env from file {env_file}')
break
except OSError:
# Perhaps the path is not readable
_logger.warning(f'error while reading: {env_file}',
exc_info=True)
except Exception:
_logger.error(f'unable to load .env file {env_file}',
exc_info=True)
# Parse the YAML code in the configuration file
try:
return pyaml_env.parse_config(path.as_posix(), default_value='')
except yaml.YAMLError as error:
if hasattr(error, 'problem_mark'):
line = error.problem_mark.line + 1
column = error.problem_mark.column + 1
raise ConfigError('YAML code in configuration file invalid at '
f'line {line} and column {column}')
else:
raise ConfigError('YAML code in configuration file invalid')
def __validate(
self, config_dict: dict[str, typing.Any],
validation_schema: dict[str, typing.Any]) -> dict[str, typing.Any]:
# Create the validator and validate the validation schema
try:
validator = _CustomValidator(validation_schema)
except cerberus.schema.SchemaError as error:
raise ConfigError(f'validation schema invalid: {error}')
# Validate the configuration
if not validator.validate(config_dict):
raise ConfigError(
f'configuration file invalid: {validator.errors}')
# Add default configuration values
return validator.normalized(config_dict)
| 8,269 | Python | .py | 198 | 31.141414 | 79 | 0.594524 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,163 | protocol.py | pantos-io_common/pantos/common/protocol.py | """Module for keeping track of supported Pantos protocol versions.
"""
import typing
import semantic_version # type: ignore
_SUPPORTED_PROTOCOL_VERSIONS: typing.Final[set[semantic_version.Version]] = {
semantic_version.Version('0.1.0')
}
def get_latest_protocol_version() -> semantic_version.Version:
"""Get the latest supported Pantos protocol version.
Returns
-------
semantic_version.Version
The protocol version.
"""
return max(_SUPPORTED_PROTOCOL_VERSIONS)
def get_supported_protocol_versions() -> list[semantic_version.Version]:
"""Get all supported Pantos protocol versions.
Returns
-------
list of semantic_version.Version
The protocol versions.
"""
return sorted(_SUPPORTED_PROTOCOL_VERSIONS)
def is_supported_protocol_version(version: semantic_version.Version) -> bool:
"""Check if a given version is a supported Pantos protocol version.
Parameters
----------
version : semantic_version.Version
The version to check.
Returns
-------
bool
True if the protocol version is supported.
"""
return version in _SUPPORTED_PROTOCOL_VERSIONS
| 1,179 | Python | .py | 35 | 28.8 | 77 | 0.70656 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,164 | logging.py | pantos-io_common/pantos/common/logging.py | """Module for initializing console and file logging.
"""
import dataclasses
import datetime
import enum
import logging
import logging.handlers
import pathlib
import sys
import typing
import json_log_formatter # type: ignore
from pantos.common.blockchains.enums import Blockchain
_HUMAN_READABLE_LOG_FORMAT: typing.Final[str] = (
'%(asctime)s - %(name)s - %(thread)d - %(levelname)s - '
'%(message)s%(extra)s')
@dataclasses.dataclass
class LogFile:
"""Properties of a log file.
Attributes
----------
file_path : pathlib.Path
The path to the log file.
max_bytes : int
Maximum number of bytes the log file can reach before it is
rolled over.
backup_count : int
Number of old log files kept (by appending the file extensions
.1, .2, etc.).
"""
file_path: pathlib.Path
max_bytes: int
backup_count: int
class LogFormat(enum.Enum):
"""Enumeration of available log formats.
"""
HUMAN_READABLE = 1
JSON = 2
@staticmethod
def from_name(name: str) -> 'LogFormat':
"""Find an enumeration member by its name.
Parameters
----------
name : str
The name to search for.
Raises
------
NameError
If no enumeration member can be found for the given name.
"""
name_upper = name.upper()
for log_format in LogFormat:
if name_upper == log_format.name:
return log_format
raise NameError(name)
class _DataDogJSONFormatter(json_log_formatter.VerboseJSONFormatter):
"""Custom JSON formatter tailored for proper DataDog logs.
"""
def mutate_json_record(self, json_record: typing.Dict[str | int,
typing.Any]):
"""Convert fields of `json_record` to needed types.
Parameters
----------
json_record : dict of str or int, typing.Any
The json dictionary object of the log.
Returns
-------
dict of str or int, typing.Any
The mutated dictionary object of the log.
"""
for attribute_name in json_record:
attribute = json_record[attribute_name]
if isinstance(attribute, datetime.datetime):
json_record[attribute_name] = attribute.isoformat()
if isinstance(attribute, Blockchain):
json_record[attribute_name] = attribute.name_in_pascal_case
return json_record
def json_record(self, message: str, extra: typing.Dict[str | int,
typing.Any],
record: typing.Any) -> typing.Dict[str | int, typing.Any]:
"""Prepares a JSON payload which will be logged.
Parameters
----------
message : str
Log message.
extra : dict of str or int, typing.Any
Dictionary that was passed as `extra` parameter.
record : typing.Any
Log record returned by JSONFormatter.format()
Returns
------
dict of str or int, typing.Any
The dictionary of the record.
"""
extra['levelname'] = record.levelname
if 'time' not in extra:
extra['time'] = datetime.datetime.utcnow()
extra['message'] = message
if record.exc_info:
extra['exc_info'] = self.formatException(record.exc_info)
return extra
class _HumanReadableFormatter(logging.Formatter):
"""Formatter for human-readable log messages.
"""
def __init__(self):
log_record = logging.LogRecord(None, None, None, None, None, None,
None)
self.__log_record_attributes = set(log_record.__dict__.keys())
self.__log_record_attributes.add('asctime')
self.__log_record_attributes.add('message')
super().__init__(_HUMAN_READABLE_LOG_FORMAT)
def format(self, log_record: logging.LogRecord) -> str:
# Docstring inherited
extra = ''
for key, value in log_record.__dict__.items():
if key not in self.__log_record_attributes and key != 'extra':
extra += f' - {key}: {value}'
log_record.__dict__['extra'] = extra
return super().format(log_record)
def initialize_logger(logger: logging.Logger = logging.getLogger(),
log_format: LogFormat = LogFormat.HUMAN_READABLE,
standard_output: bool = True,
log_file: typing.Optional[LogFile] = None,
debug: bool = False) -> None:
"""Initialize a logger by setting its log format, output streams,
and the level of logged messages.
Parameters
----------
logger : logging.Logger
The logger to be initialized (default: root logger).
log_format : LogFormat
The format of the log output (default: human-readable format).
standard_output : bool
If True, the messages are logged to the standard output, which
is shown on the console if it is not redirected (default: True).
log_file : LogFile, optional
If given, messages are logged to the specified log file
(default: None).
debug : bool
If True, debug messages are logged (default: False).
Raises
------
OSError
If the logs cannot be written to the specified log file.
"""
logger.handlers.clear()
formatter = _create_formatter(log_format)
if standard_output:
logger.addHandler(_create_standard_output_handler(formatter))
if log_file is not None:
logger.addHandler(_create_rotating_file_handler(log_file, formatter))
logger.setLevel(logging.DEBUG if debug else logging.INFO)
def _create_formatter(log_format: LogFormat) -> logging.Formatter:
if log_format is LogFormat.HUMAN_READABLE:
return _HumanReadableFormatter()
if log_format is LogFormat.JSON:
return _DataDogJSONFormatter()
raise NotImplementedError
def _create_rotating_file_handler(
log_file: LogFile, formatter: logging.Formatter) -> logging.Handler:
if not log_file.file_path.parent.exists():
log_file.file_path.parent.mkdir(parents=True)
handler = logging.handlers.RotatingFileHandler(
log_file.file_path, maxBytes=log_file.max_bytes,
backupCount=log_file.backup_count)
handler.setFormatter(formatter)
return handler
def _create_standard_output_handler(
formatter: logging.Formatter) -> logging.Handler:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
return handler
| 6,703 | Python | .py | 171 | 30.549708 | 78 | 0.624923 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,165 | servicenodes.py | pantos-io_common/pantos/common/servicenodes.py | """Module for communicating with Pantos service nodes.
"""
import dataclasses
import typing
import uuid
import requests
from pantos.common.blockchains.enums import Blockchain
from pantos.common.entities import ServiceNodeBid
from pantos.common.entities import ServiceNodeTransferStatus
from pantos.common.exceptions import BaseError
from pantos.common.types import BlockchainAddress
_TRANSFER_RESOURCE = 'transfer'
_STATUS_RESOURCE = 'status'
_BID_RESOURCE = 'bids'
class ServiceNodeClientError(BaseError):
"""Exception class for all service node client errors.
"""
pass
class ServiceNodeClient:
"""Client for communicating with Pantos service nodes.
"""
@dataclasses.dataclass
class SubmitTransferRequest:
"""Request data for submitting a new token transfer request to a
Pantos service node.
Attributes
----------
service_node_url : str
The chosen service node's base URL.
source_blockchain: Blockchain
The token transfer's source blockchain.
destination_blockchain: Blockchain
The token transfer's destination blockchain.
sender_address: BlockchainAddress
The address of the sender on the source blockchain.
recipient_address: BlockchainAddress
The address of the recipient on the destination blockchain.
source_token_address: BlockchainAddress
The address of the transferred token on the source
blockchain.
destination_token_address: BlockchainAddress
The address of the transferred token on the destination
blockchain.
token_amount: int
The amount of tokens to be transferred.
service_node_bid: ServiceNodeBid
The chosen service node bid.
sender_nonce: int
The unique sender nonce for the new token transfer.
valid_until: int
The timestamp until when the token transfer can be included
on the source blockchain (in seconds since the epoch).
signature: str
The sender's signature for the new token transfer.
"""
service_node_url: str
source_blockchain: Blockchain
destination_blockchain: Blockchain
sender_address: BlockchainAddress
recipient_address: BlockchainAddress
source_token_address: BlockchainAddress
destination_token_address: BlockchainAddress
token_amount: int
service_node_bid: ServiceNodeBid
sender_nonce: int
valid_until: int
signature: str
@dataclasses.dataclass
class TransferStatusResponse:
"""Response data for checking the status of a transfer at a
service node.
Attributes
----------
task_id: uuid.UUID
The unique task ID of the token transfer.
source_blockchain: Blockchain
The token transfer's source blockchain.
destination_blockchain: Blockchain
The token transfer's destination blockchain.
sender_address: BlockchainAddress
The address of the sender on the source blockchain.
recipient_address: BlockchainAddress
The address of the recipient on the destination blockchain.
source_token_address: BlockchainAddress
The address of the transferred token on the source
blockchain.
destination_token_address: BlockchainAddress
The address of the transferred token on the destination
blockchain.
token_amount: int
The amount of tokens transferred.
fee: int
The fee paid to the service node for the transfer.
status: ServiceNodeTransferStatus
The service node transfer status.
transfer_id: int
The Pantos transfer ID.
transaction_id: str
The ID/hash of the token transfer's transaction.
"""
task_id: uuid.UUID
source_blockchain: Blockchain
destination_blockchain: Blockchain
sender_address: BlockchainAddress
recipient_address: BlockchainAddress
source_token_address: BlockchainAddress
destination_token_address: BlockchainAddress
token_amount: int
fee: int
status: ServiceNodeTransferStatus
transfer_id: int
transaction_id: str
def submit_transfer(self, request: SubmitTransferRequest,
timeout: typing.Optional[float] = None) -> uuid.UUID:
"""Submit a new token transfer request to a Pantos service node.
Parameters
----------
request : SubmitTransferRequest
The request data for a new token transfer.
Returns
-------
uuid.UUID
The service node's task ID.
Raises
------
ServiceNodeClientError
If the token transfer request cannot be submitted
successfully.
"""
service_node_request = {
'source_blockchain_id': request.source_blockchain.value,
'destination_blockchain_id': request.destination_blockchain.value,
'sender_address': request.sender_address,
'recipient_address': request.recipient_address,
'source_token_address': request.source_token_address,
'destination_token_address': request.destination_token_address,
'amount': request.token_amount,
'bid': {
'fee': request.service_node_bid.fee,
'execution_time': request.service_node_bid.execution_time,
'valid_until': request.service_node_bid.valid_until,
'signature': request.service_node_bid.signature
},
'nonce': request.sender_nonce,
'valid_until': request.valid_until,
'signature': request.signature
}
transfer_url = self.__build_transfer_url(request.service_node_url)
try:
service_node_response = requests.post(transfer_url,
json=service_node_request,
timeout=timeout)
# Raise an error in case of a 4xx or 5xx response status code
service_node_response.raise_for_status()
task_id = service_node_response.json()['task_id']
return uuid.UUID(task_id)
except (requests.exceptions.RequestException, KeyError):
response_message = self.__read_response_message(
service_node_response)
raise ServiceNodeClientError(
'unable to submit a new token transfer request',
request=request, transfer_url=transfer_url,
response_message=response_message)
def bids(
self, service_node_url: str, source_blockchain: Blockchain,
destination_blockchain: Blockchain,
timeout: typing.Optional[float] = None) \
-> typing.List[ServiceNodeBid]:
"""Retrieve the bids of the service node found at the given
service node url.
Parameters
----------
service_node_url : str
The url of the service node.
source_blockchain : Blockchain
The source blockchain of the bid.
destination_blockchain : Blockchain
The destination blockchain of the bid.
Returns
-------
list of ServiceNodeBid
The list of service node bid given by the service node.
Raises
-------
ServiceNodeClientError
If unable to retrieve the bids.
"""
bids_url = self.__build_bids_url(service_node_url,
str(source_blockchain.value),
str(destination_blockchain.value))
try:
service_node_response = requests.get(bids_url, timeout=timeout)
service_node_response.raise_for_status()
bids = service_node_response.json()
response = []
for bid in bids:
response.append(
ServiceNodeBid(source_blockchain, destination_blockchain,
bid['fee'], bid['execution_time'],
bid['valid_until'], bid['signature']))
return response
except (requests.exceptions.RequestException, ValueError, KeyError):
response_message = self.__read_response_message(
service_node_response)
raise ServiceNodeClientError(
'unable to get the bids of the service node',
service_node_url=service_node_url,
source_blockchain=source_blockchain,
destination_blockchain=destination_blockchain,
response_message=response_message)
def status(
self, service_node_url: str, task_id: uuid.UUID,
timeout: typing.Optional[float] = None) -> TransferStatusResponse:
"""Retrieve the status of a transfer.
Parameters
----------
service_node_url : str
The url of the service node.
task_id : uuid.UUID
The task id of the transfer.
Returns
-------
TransferStatusResponse
The transfer status response.
Raises
------
ServiceNodeClientError
If unable to get the status of a transfer.
"""
status_url = self.__build_status_url(service_node_url, task_id)
try:
service_node_response = requests.get(status_url, timeout=timeout)
service_node_response.raise_for_status()
json_response = service_node_response.json()
transfer_status_response = self.TransferStatusResponse(
uuid.UUID(json_response['task_id']),
Blockchain(json_response['source_blockchain_id']),
Blockchain(json_response['destination_blockchain_id']),
BlockchainAddress(json_response['sender_address']),
BlockchainAddress(json_response['recipient_address']),
BlockchainAddress(json_response['source_token_address']),
BlockchainAddress(json_response['destination_token_address']),
json_response['amount'], json_response['fee'],
ServiceNodeTransferStatus.from_name(json_response['status']),
json_response['transfer_id'], json_response['transaction_id'])
return transfer_status_response
except (requests.exceptions.RequestException, ValueError, KeyError):
response_message = self.__read_response_message(
service_node_response)
raise ServiceNodeClientError(
'unable to get the status of the transfer',
service_node_url=service_node_url, task_id=task_id,
response_message=response_message)
def __build_transfer_url(self, service_node_url: str) -> str:
transfer_url = service_node_url
if not service_node_url.endswith('/'):
transfer_url += '/'
transfer_url += _TRANSFER_RESOURCE
return transfer_url
def __build_bids_url(self, service_node_url: str, source_blockchain: str,
destination_blockchain: str) -> str:
bids_url = service_node_url
if not service_node_url.endswith('/'):
bids_url += '/'
return (f'{bids_url}{_BID_RESOURCE}?'
f'source_blockchain={source_blockchain}&'
f'destination_blockchain={destination_blockchain}')
def __build_status_url(self, service_node_url: str,
task_id: uuid.UUID) -> str:
transfer_url = self.__build_transfer_url(service_node_url)
return f'{transfer_url}/{str(task_id)}/{_STATUS_RESOURCE}'
def __read_response_message(
self, response: requests.Response) -> typing.Optional[str]:
response_message = None
if 'application/json' in response.headers.get('content-type', ''):
response_message = response.json().get('message')
return response_message
| 12,286 | Python | .py | 277 | 32.819495 | 78 | 0.619207 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,166 | health.py | pantos-io_common/pantos/common/health.py | """Module for analyzing the health of the system.
"""
import concurrent.futures
import dataclasses
from pantos.common.blockchains.base import UnhealthyNode
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.factory import get_blockchain_utilities
from pantos.common.exceptions import NotInitializedError
_blockchain_nodes: dict[Blockchain, tuple[list[str],
float | tuple | None]] = {}
@dataclasses.dataclass
class NodesHealth:
"""Entity which provides information about the health status
of the nodes requested for a blockchain network.
Attributes
----------
healthy_total : int
The total number of healthy nodes.
unhealthy_total : int
The total number of unhealthy nodes.
unhealthy_nodes : list[str, str]
The list of unhealthy nodes with their respective status.
"""
healthy_total: int
unhealthy_total: int
unhealthy_nodes: list[UnhealthyNode]
def check_blockchain_nodes_health() -> dict[Blockchain, NodesHealth]:
"""Check the health of the blockchain nodes.
Returns
-------
dict[Blockchain, NodesHealth]
The health status of the blockchain nodes.
Raises
------
NotInitializedError
If the blockchain nodes have not been initialized yet.
"""
if _blockchain_nodes == {}:
raise NotInitializedError(
'the blockchain nodes have not been initialized yet')
nodes_health = {}
with concurrent.futures.ThreadPoolExecutor() as executor:
future_to_blockchain = {
executor.submit(
get_blockchain_utilities(blockchain).get_unhealthy_nodes, # noqa
_blockchain_nodes[blockchain][0],
_blockchain_nodes[blockchain][1]): blockchain
for blockchain in _blockchain_nodes
}
for future in concurrent.futures.as_completed(future_to_blockchain):
blockchain = future_to_blockchain[future]
unhealthy_nodes = future.result()
nodes_health[blockchain] = NodesHealth(
len(_blockchain_nodes[blockchain][0]) - len(unhealthy_nodes),
len(unhealthy_nodes), unhealthy_nodes)
return nodes_health
def initialize_blockchain_nodes(
blockchain_nodes: dict[Blockchain, tuple[list[str],
float | tuple | None]]) \
-> None:
"""Initialize the blockchain nodes.
Parameters
----------
blockchain_nodes : dict[Blockchain, tuple[list[str], float | tuple]]
The blockchain nodes to be initialized.
"""
global _blockchain_nodes
if _blockchain_nodes != blockchain_nodes: # pragma: no cover
_blockchain_nodes = blockchain_nodes
| 2,785 | Python | .py | 69 | 32.463768 | 81 | 0.666543 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,167 | exceptions.py | pantos-io_common/pantos/common/exceptions.py | """Common generic exceptions and related classes for Pantos.
"""
import abc
import typing
from pantos.common.blockchains.enums import Blockchain
E = typing.TypeVar('E', bound='BaseError')
class ErrorCreator(abc.ABC, typing.Generic[E]):
"""Base class that helps to properly create Pantos errors for a
hierachy of related classes (like business-logic interactors or
blockchain clients/utilities).
"""
@classmethod
@abc.abstractmethod
def get_error_class(cls) -> type[E]:
"""Get the main error class of the subclass in the class
hierarchy.
Returns
-------
type[E]
The error class.
"""
pass # pragma: no cover
def _create_error(self, message: typing.Optional[str] = None, *,
specialized_error_class: typing.Optional[type[E]] = None,
**kwargs: typing.Any) -> E:
"""Create an error that can be catched as both the subclass's
main error and (if specified) the specialized error (as well as
their supertypes).
Parameters
----------
message : str, optional
An explanation of the error (only to be specified if neither
the subclass's main error class nor the specialized error
class provides a default message).
specialized_error_class : type[E], optional
The specialized error class.
**kwargs : dict
Additional information about the error as keyword arguments.
Returns
-------
E
The error.
"""
error_class = self.get_error_class()
error_classes: tuple[type[E], ...] = ()
if specialized_error_class is not None:
error_classes += (specialized_error_class, )
error_classes += (error_class, )
class Error(*error_classes): # type: ignore
pass
Error.__name__ = (error_class.__name__ if specialized_error_class
is None else specialized_error_class.__name__)
Error.__qualname__ = Error.__name__
Error.__module__ = error_class.__module__
return Error(
**kwargs) if message is None else Error(message=message, **kwargs)
class BaseError(Exception):
"""Base exception class for all Pantos errors.
Attributes
----------
details : dict
Additional information about the error.
"""
def __init__(self, message: str, **kwargs: typing.Any):
"""Construct an error instance.
Parameters
----------
message : str
An explanation of the error.
**kwargs : dict
Additional information about the error as keyword arguments.
"""
super().__init__(message)
self.details = kwargs
def __str__(self) -> str:
string = f'{super().__str__()}'
if self.details is not None:
for key, value in self.details.items():
value = value.name if isinstance(value, Blockchain) else value
string += f' - {key}: {value}'
return string
class NotInitializedError(BaseError):
"""Error to be raised if a Pantos resource has not been properly
initialized when it is requested to be used.
"""
pass
| 3,311 | Python | .py | 86 | 29.616279 | 79 | 0.600125 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,168 | restapi.py | pantos-io_common/pantos/common/restapi.py | """Common REST API resources.
"""
import dataclasses
import json
import logging
import flask # type: ignore
import flask_restful # type: ignore
from pantos.common.exceptions import NotInitializedError
from pantos.common.health import check_blockchain_nodes_health
_logger = logging.getLogger(__name__)
"""Logger for this module."""
class Live(flask_restful.Resource):
"""Flask resource class which specifies the health/live REST endpoint.
"""
def get(self):
"""Return null response with status 200 if the service is up
and running.
"""
return None # pragma: no cover
class NodesHealthResource(flask_restful.Resource):
"""RESTful resource for the health status of the blockchain nodes.
"""
def get(self):
"""Return the health status of the blockchain nodes.
"""
try:
_logger.info('checking blockchain nodes health')
nodes_health = check_blockchain_nodes_health()
return ok_response({
blockchain.name.capitalize(): dataclasses.asdict(
nodes_health[blockchain])
for blockchain in nodes_health
})
except NotInitializedError:
_logger.warning('no blockchain nodes have been initialized yet')
return internal_server_error(
'no blockchain nodes have been initialized yet')
except Exception:
_logger.critical('cannot check blockchain nodes health',
exc_info=True)
return internal_server_error()
def ok_response(data: list | dict) -> flask.Response:
"""Create a Flask response given some data.
Parameters
----------
data : list or dict
The data that will be wrapped in a Flask response.
Returns
-------
flask.Response
The Flask response object.
"""
return flask.Response(json.dumps(data), status=200,
mimetype='application/json')
def no_content_response() -> flask.Response:
"""Create a Flask response for a successful request without any
content.
Returns
-------
flask.Response
The Flask response object.
"""
return flask.Response(status=204)
def conflict(error_message: str):
"""Raise an HTTPException when a request conflicts
with the current state of the server.
Parameters
----------
error_message : str
The error message.
Raises
------
HTTPException
HTTP exception raised with the code 409.
"""
flask_restful.abort(409, message=error_message)
def not_acceptable(error_messages: str | list | dict):
"""Raise an HTTPException for non-acceptable requests.
Parameters
----------
error_messages : str or list or dict
The error messages.
Raises
------
HTTPException
HTTP exception raised with the code 406.
"""
flask_restful.abort(406, message=error_messages)
def bad_request(error_messages: str | list | dict):
"""Raise an HTTPException for bad requests.
Parameters
----------
error_messages : str or list or dict
The error messages.
Raises
------
HTTPException
HTTP exception raised with the code 400.
"""
flask_restful.abort(400, message=error_messages)
def forbidden(error_message: str):
"""Raise an HTTPException if a prohibited action is refused.
Parameters
----------
error_message : str
The error message.
Raises
------
HTTPException
HTTP exception raised with the code 403.
"""
flask_restful.abort(403, message=error_message)
def resource_not_found(error_message: str):
"""Raise an HTTPException if the resource has not been found.
Parameters
----------
error_message : str
The error message.
Raises
------
HTTPException
HTTP exception raised with the code 404.
"""
flask_restful.abort(404, message=error_message)
def internal_server_error(error_message: str | None = None):
"""Raise an HTTPException for internal server errors.
Parameters
----------
error_message : str, optional
The error message.
Raises
------
HTTPException
HTTP exception raised with the code 500.
"""
flask_restful.abort(500, message=error_message)
| 4,393 | Python | .py | 136 | 25.639706 | 76 | 0.648835 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,169 | entities.py | pantos-io_common/pantos/common/entities.py | """Module that defines Pantos entities.
"""
import dataclasses
import enum
import typing
from pantos.common.blockchains.enums import Blockchain
from pantos.common.types import Amount
from pantos.common.types import BlockchainAddress
@dataclasses.dataclass
class ServiceNodeBid:
"""Entity that represents a Pantos service node bid.
Attributes
----------
source_blockchain : Blockchain
The source blockchain of token transfers covered by the
service node bid.
destination_blockchain : Blockchain
The destination blockchain of token transfers covered by the
service node bid.
fee : Amount
The service node's fee for a token transfer.
execution_time : int
The time (in seconds since the epoch) the service node has
available for executing a token transfer on the source
blockchain.
valid_until : int
The time (in seconds since the epoch) until which the bid is
valid.
signature : str
The signature over the the service node's bid. The signature is over
```
sig(fee,
bid_valid_until,
source_blockchain_id,
destination_blockchain_id,
execution_time)
```
"""
source_blockchain: Blockchain
destination_blockchain: Blockchain
fee: Amount
execution_time: int
valid_until: int
signature: str
@dataclasses.dataclass
class TokenDeploymentRequest:
"""Request data for submitting a new deployment request to the
Pantos token creator service.
Attributes
----------
deployment_blockchain_ids : list of int
The deployment blockchain ids.
token_name : str
The name of the token.
token_symbol : str
The symbol of the token.
token_decimals : int
The number of decimals of the token.
token_pausable : bool
If the token is pausable.
token_burnable : bool
If the token is burnable.
token_supply : int
The supply of the token.
payment_blockchain_id : int
The blockchain id of the blockchain where the deployment
fee will be paid.
payer_address : BlockchainAddress
The blockchain address of the payer.
deployment_fee : int
The deployment fee to be paid.
deployment_fee_signature : str
The deployment fee signature.
deployment_fee_valid_until : int
The timestamp until when the deployment fee can be submitted
to the token creator service (in seconds since the epoch).
bid_fee : Amount
The fee of the bid.
bid_execution_time : int
The execution time of the bid.
bid_valid_until : int
The time (in seconds since the epoch) until the
service node is able to execute the token
transfer on the source blockchain.
bid_signature : str
The signature of the bid given by the service node.
payment_nonce : int
The nonce of the payment.
payment_valid_until : int
The timestamp until when the payment can be submitted
to the service node (in seconds since the epoch).
payment_signature : str
The signature of the payment.
"""
deployment_blockchain_ids: typing.List[int]
token_name: str
token_symbol: str
token_decimals: int
token_pausable: bool
token_burnable: bool
token_supply: int
payment_blockchain_id: int
payer_address: BlockchainAddress
deployment_fee: int
deployment_fee_signature: str
deployment_fee_valid_until: int
bid_fee: Amount
bid_execution_time: int
bid_valid_until: int
bid_signature: str
payment_nonce: int
payment_valid_until: int
payment_signature: str
BlockchainAddressBidPair = typing.Tuple[BlockchainAddress, ServiceNodeBid]
class ServiceNodeTransferStatus(enum.IntEnum):
"""Enumeration of possible transfer status values.
"""
ACCEPTED = 0
FAILED = 1
SUBMITTED = 2
REVERTED = 3
CONFIRMED = 4
@staticmethod
def from_name(name: str) -> 'ServiceNodeTransferStatus':
"""Convert a string to a ServiceNodeTransferStatus object
(if possible).
Parameters
----------
name : str
The name of the status to be converted.
Raises
------
NameError
If the type conversion is not possible.
"""
for status in ServiceNodeTransferStatus:
if name.upper() == status.name:
return status
raise NameError(name)
class TransactionStatus(enum.Enum):
"""Enumeration of blockchain transaction status values.
"""
UNINCLUDED = 0
UNCONFIRMED = 1
CONFIRMED = 2
REVERTED = 3
| 4,739 | Python | .py | 146 | 25.883562 | 76 | 0.676001 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,170 | signer.py | pantos-io_common/pantos/common/signer.py | import getpass
import typing
import Crypto.PublicKey.ECC
import Crypto.Signature.eddsa
class SignerError(Exception):
"""Exception class for signer errors.
"""
def __init__(self, message: str, name: str = 'signer error',
**kwargs: typing.Any):
"""Construct a signer error.
Parameters
----------
message : str
Explanation of the error.
name : str
Human-readable name of the error.
**kwargs : dict
Additional information about the error as keyword arguments.
"""
super().__init__(message, name, **kwargs)
class _Signer:
def __init__(self, pem_value: str, pem_password: str):
"""
Constructor of Signer class.
Parameters
----------
pem_value : str
Value of the encrypted private key.
pem_password : str
Password to unlock the PEM file.
"""
self.__signer = self._load_signer(pem_value, pem_password)
def sign_message(self, message: str) -> str:
"""Sign a message.
Parameters
----------
message : str
The message to be signed.
Returns
-------
str
The signature of the message.
Raises
------
SignerError
If the message cannot be signed.
"""
try:
message_bytes = message.encode()
signature = self.__signer.sign(message_bytes)
return signature.hex()
except Exception:
raise SignerError(
f'unable to compute signature of message: {message}')
def verify_message(self, message: str, signature: str) -> bool:
"""Verify that the message is valid (signed by the same
private key).
Parameters
----------
message : str
The message to be verified.
signature : str
The signature of the message.
Returns
-------
bool
If the signature of the message is valid.
Raises
------
SignerError
If the message cannot be verified.
"""
try:
message_bytes = message.encode()
try:
self.__signer.verify(message_bytes, bytes.fromhex(signature))
return True
except ValueError:
return False
except Exception:
raise SignerError(
f'unable to verify signature of message {message}')
def build_message(self, separator: str = '', *message_parts:
typing.Any) -> str:
"""Build a message.
Parameters
----------
message_parts : typing.Any
Ordered number of message parts to be concatenated into one
message.
separator : str
String which is separating each part of the message.
Returns
-------
str
The built message.
"""
message = ''
for message_part in message_parts:
message += f'{str(message_part)}{separator}'
# cutting of last separator
return message[:-1]
def _load_signer(
self, pem_value: str,
pem_password: str) -> Crypto.Signature.eddsa.EdDSASigScheme:
"""Load the EdDSA signer object from a password-encrypted pem file.
The key must be on the curve Ed25519 or Ed448.
Parameters
----------
pem_value : str
Value of the encrypted private key.
pem_password : str
Password to unlock the PEM file.
Returns
-------
Crypto.Signature.eddsa.EdDSASigScheme
An EdDSA signature object.
Raises
------
SignerError
If the EdDSA signer cannot be loaded.
"""
try:
if pem_password is None:
pem_password = getpass.getpass(
'Password for decrypting the pem file')
private_key = Crypto.PublicKey.ECC.import_key(
pem_value, passphrase=pem_password)
return Crypto.Signature.eddsa.new(private_key,
'rfc8032') # type: ignore
except SignerError:
raise
except Exception:
raise SignerError('cannot load the private key')
_signer: typing.Optional[_Signer] = None
def get_signer(pem_value: str, pem_password: str) -> _Signer:
"""Get a _Signer object.
Parameters
----------
pem_value : str
Value of the encrypted private key.
pem_password : str
Password to unlock the PEM file.
Returns
-------
_Signer
Signer object used for signing and verifying messages.
Raises
------
SignerError
If the signer cannot be gotten.
"""
global _signer
if not _signer:
_signer = _Signer(pem_value, pem_password)
return _signer
| 5,043 | Python | .py | 156 | 22.333333 | 77 | 0.548979 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,171 | types.py | pantos-io_common/pantos/common/types.py | """Module that defines value object types.
"""
import collections.abc
import decimal
import typing
class BlockchainAddress(str):
pass
class PrivateKey(str):
pass
class TokenSymbol(str):
pass
AccountId = typing.Union[BlockchainAddress, PrivateKey]
Amount = typing.Union[int, decimal.Decimal]
ContractFunctionArgs = collections.abc.Sequence[typing.Union[
bool, int, str, collections.abc.Sequence[typing.Union[bool, int, str]]]]
TokenId = typing.Union[BlockchainAddress, TokenSymbol]
| 509 | Python | .py | 16 | 29.0625 | 76 | 0.800416 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,172 | enums.py | pantos-io_common/pantos/common/blockchains/enums.py | """Module that defines blockchain-specific enumerations.
"""
import enum
class Blockchain(enum.IntEnum):
"""Enumeration of supported blockchain networks.
"""
ETHEREUM = 0
BNB_CHAIN = 1
# Decommissioned: BITCOIN_RSK = 2
AVALANCHE = 3
SOLANA = 4
POLYGON = 5
CRONOS = 6
# Renamed from "FANTOM" to "SONIC" on 2024-10-17 due to
# network renaming
SONIC = 7
CELO = 8
# Decomissioned: AURORA = 9
@property
def name_in_pascal_case(self) -> str:
"""The name of the blockchain network in pascal case.
"""
return ''.join(word.capitalize() for word in self.name.split('_'))
@staticmethod
def from_name(name: str) -> 'Blockchain':
"""Find an enumeration member by its name.
Parameters
----------
name : str
The name to search for.
Raises
------
NameError
If no enumeration member can be found for the given name.
"""
name_upper = name.upper()
for blockchain in Blockchain:
if name_upper == blockchain.name:
return blockchain
raise NameError(name)
class ContractAbi(enum.Enum):
"""Enumeration of supported contract ABIs.
"""
STANDARD_TOKEN = 0
PANTOS_TOKEN = 1
PANTOS_HUB = 2
PANTOS_FORWARDER = 3
def get_file_name(self, blockchain: Blockchain) -> str:
"""Get the name of the contract ABI file.
Parameters
----------
blockchain : Blockchain
The blockchain to get the contract ABI file name for.
Returns
-------
str
The name of the contract ABI file.
"""
return f'{blockchain.name.lower()}_{self.name.lower()}.abi'
| 1,777 | Python | .py | 59 | 22.728814 | 74 | 0.592005 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,173 | factory.py | pantos-io_common/pantos/common/blockchains/factory.py | """Factory for Pantos blockchain utilities.
"""
import typing
from pantos.common.blockchains.base import BlockchainUtilities
from pantos.common.blockchains.enums import Blockchain
from pantos.common.exceptions import NotInitializedError
_blockchain_utilities: dict[Blockchain, BlockchainUtilities] = {}
"""Blockchain-specific utilities objects."""
_blockchain_utilities_classes = BlockchainUtilities.find_subclasses()
"""Blockchain-specific utilities classes."""
def initialize_blockchain_utilities(
blockchain: Blockchain, blockchain_node_urls: list[str],
fallback_blockchain_node_urls: list[str], average_block_time: int,
required_transaction_confirmations: int,
transaction_network_id: typing.Optional[int],
default_private_key: typing.Optional[tuple[str, str]] = None,
celery_tasks_enabled: bool = False) -> None:
"""Initialize the utilities for the specified blockchain.
Parameters
----------
blockchain : Blockchain
The blockchain to initialize a utilities instance for.
blockchain_node_urls : str
The URLs of the blockchain nodes to use for the specified
blockchain.
fallback_blockchain_node_urls : list[str]
The URLs of the fallback nodes to use for communication
with the blockchain network.
average_block_time : int
The average time in seconds required to generate a new block of
the blockchain.
required_transaction_confirmations : int
The number of required confirmations for a transaction to be
considered included in the blockchain.
transaction_network_id : int or None
The unique public (i.e. non-Pantos-specific) blockchain network
identifier (partly called chain ID) to be used for signing
transactions (to prevent replay attacks between different
compatible blockchain networks). It is assumed to be the
identifier of the main or a test network of the specified
blockchain.
default_private_key : tuple of str and str, optional
The keystore value and password of the default private
key to be used by the blockchain utilities. (default: None).
celery_tasks_enabled : bool, optional
If True, Celery tasks are enabled for enhanced functionalities
(default: False). This requires a proper Celery environment to
be set up by the project using the blockchain utilities.
Raises
------
BlockchainUtilitiesError
If the blockchain-specific utilities cannot be initialized.
"""
utilities_class = _blockchain_utilities_classes[blockchain]
_blockchain_utilities[blockchain] = utilities_class(
blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time, required_transaction_confirmations,
transaction_network_id, default_private_key=default_private_key,
celery_tasks_enabled=celery_tasks_enabled)
def get_blockchain_utilities(blockchain: Blockchain) -> BlockchainUtilities:
"""Factory for blockchain-specific utilities objects.
Parameters
----------
blockchain : Blockchain
The blockchain to get the utilities instance for.
Returns
-------
BlockchainUtilities
A blockchain utilities instance for the specified blockchain.
Raises
------
NotInitializedError
If the utilities have not been initialized for the specified
blockchain.
"""
try:
return _blockchain_utilities[blockchain]
except KeyError:
raise NotInitializedError(
f'{blockchain.name} utilities have not been initialized')
| 3,642 | Python | .py | 80 | 38.9875 | 76 | 0.727375 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,174 | solana.py | pantos-io_common/pantos/common/blockchains/solana.py | """Module for Solana-specific utilities and errors.
"""
import typing
from pantos.common.blockchains.base import BlockchainUtilities
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.base import NodeConnections
from pantos.common.blockchains.base import UnhealthyNode
from pantos.common.blockchains.enums import Blockchain
from pantos.common.entities import TransactionStatus
class SolanaUtilitiesError(BlockchainUtilitiesError):
"""Exception class for all Solana utilities errors.
"""
pass
class SolanaUtilities(BlockchainUtilities):
"""Class for Solana-specific utilities.
"""
def __init__(self, blockchain_node_urls: list[str],
fallback_blockchain_node_urls: list[str],
average_block_time: int,
required_transaction_confirmations: int,
transaction_network_id: typing.Optional[int],
default_private_key: typing.Optional[tuple[str, str]] = None,
celery_tasks_enabled: bool = False):
# Docstring inherited
super().__init__(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id,
default_private_key=default_private_key,
celery_tasks_enabled=celery_tasks_enabled)
def get_address(self, private_key: str) -> str:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def get_balance(
self, account_address: str,
token_address: typing.Optional[str] = None,
node_connections: typing.Optional[NodeConnections] = None) -> int:
# Docstring inherited
raise NotImplementedError # pragma: no cover
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.SOLANA
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return SolanaUtilitiesError
def is_valid_address(self, address: str) -> bool:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def is_equal_address(self, address_one: str, address_two: str) -> bool:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def get_unhealthy_nodes(
self, blockchain_nodes: list[str],
timeout: float | tuple | None = None) -> list[UnhealthyNode]:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def _get_transaction_method_names(self) -> list[str]:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def decrypt_private_key(self, encrypted_key: str, password: str) -> str:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def read_transaction_status(
self, transaction_id: str,
node_connections: typing.Optional[NodeConnections] = None) \
-> TransactionStatus:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def submit_transaction(
self, request: BlockchainUtilities.TransactionSubmissionRequest,
node_connections: typing.Optional[NodeConnections] = None) \
-> BlockchainUtilities.TransactionSubmissionResponse:
# Docstring inherited
raise NotImplementedError # pragma: no cover
def _create_single_node_connection(
self, blockchain_node_url: str,
timeout: typing.Optional[typing.Union[float,
tuple]] = None) \
-> typing.Any:
# Docstring inherited
raise NotImplementedError # pragma: no cover
| 3,908 | Python | .py | 83 | 37.012048 | 78 | 0.660447 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,175 | cronos.py | pantos-io_common/pantos/common/blockchains/cronos.py | """Module for Cronos-specific utilities and errors. Since Cronos is
Ethereum-compatible, the utilities implementation inherits from the
pantos.common.blockchains.ethereum module.
"""
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.ethereum import EthereumUtilitiesError
class CronosUtilitiesError(EthereumUtilitiesError):
"""Exception class for all Cronos utilities errors.
"""
pass
class CronosUtilities(EthereumUtilities):
"""Class for Cronos-specific utilities.
"""
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.CRONOS
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return CronosUtilitiesError
| 938 | Python | .py | 23 | 36.478261 | 69 | 0.796031 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,176 | polygon.py | pantos-io_common/pantos/common/blockchains/polygon.py | """Module for Polygon-specific utilities and errors. Since Polygon is
Ethereum-compatible, the utilities implementation inherits from the
pantos.common.blockchains.ethereum module.
"""
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.ethereum import EthereumUtilitiesError
class PolygonUtilitiesError(EthereumUtilitiesError):
"""Exception class for all Polygon utilities errors.
"""
pass
class PolygonUtilities(EthereumUtilities):
"""Class for Polygon-specific utilities.
"""
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.POLYGON
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return PolygonUtilitiesError
| 946 | Python | .py | 23 | 36.826087 | 69 | 0.797814 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,177 | celo.py | pantos-io_common/pantos/common/blockchains/celo.py | """Module for Celo-specific utilities and errors. Since Celo is
Ethereum-compatible, the utilities implementation inherits from the
pantos.common.blockchains.ethereum module.
"""
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.ethereum import EthereumUtilitiesError
class CeloUtilitiesError(EthereumUtilitiesError):
"""Exception class for all Celo utilities errors.
"""
pass
class CeloUtilities(EthereumUtilities):
"""Class for Celo-specific utilities.
"""
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.CELO
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return CeloUtilitiesError
| 922 | Python | .py | 23 | 35.782609 | 69 | 0.792368 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,178 | ethereum.py | pantos-io_common/pantos/common/blockchains/ethereum.py | """Module for Ethereum-specific utilities and errors.
"""
import logging
import typing
import urllib.parse
import web3
import web3.contract.contract
import web3.exceptions
import web3.middleware
import web3.types
from pantos.common.blockchains.base import GENERAL_RPC_ERROR_MESSAGE
from pantos.common.blockchains.base import BlockchainUtilities
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.base import NodeConnections
from pantos.common.blockchains.base import ResultsNotMatchingError
from pantos.common.blockchains.base import SingleNodeConnectionError
from pantos.common.blockchains.base import UnhealthyNode
from pantos.common.blockchains.base import VersionedContractAbi
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.enums import ContractAbi
from pantos.common.entities import TransactionStatus
from pantos.common.protocol import get_latest_protocol_version
from pantos.common.types import BlockchainAddress
_NONCE_TOO_LOW = ['nonce too low', 'invalid nonce', 'ERR_INCORRECT_NONCE']
"""Possible nonce too low error messages."""
_TRANSACTION_METHOD_NAMES = [
'send_transaction', 'replace_transaction', 'modify_transaction',
'send_raw_transaction', 'transact'
]
"""The names of methods of the blockchain interactor object which
send transactions."""
_NO_ARCHIVE_NODE_RPC_ERROR_MESSAGE = 'missing trie node'
_NO_ARCHIVE_NODE_LOG_MESSAGE = 'due to the absence of an archive node'
_logger = logging.getLogger(__name__)
class EthereumUtilitiesError(BlockchainUtilitiesError):
"""Exception class for all Ethereum utilities errors.
"""
pass
class EthereumUtilities(BlockchainUtilities):
"""Class for Ethereum-specific utilities.
"""
def __init__(self, blockchain_node_urls: list[str],
fallback_blockchain_node_urls: list[str],
average_block_time: int,
required_transaction_confirmations: int,
transaction_network_id: typing.Optional[int],
default_private_key: typing.Optional[tuple[str, str]] = None,
celery_tasks_enabled: bool = False):
# Docstring inherited
if transaction_network_id is None:
raise self._create_error(
'transaction network ID (chain ID) must be given')
super().__init__(blockchain_node_urls, fallback_blockchain_node_urls,
average_block_time,
required_transaction_confirmations,
transaction_network_id,
default_private_key=default_private_key,
celery_tasks_enabled=celery_tasks_enabled)
def create_node_connections(
self,
timeout: typing.Optional[typing.Union[float, tuple]] = None) \
-> NodeConnections[web3.Web3]:
# Docstring inherited
return super().create_node_connections(timeout)
def create_contract(
self, contract_address: BlockchainAddress,
versioned_contract_abi: VersionedContractAbi,
node_connections: typing.Optional[NodeConnections] = None) \
-> NodeConnections.Wrapper[web3.contract.Contract]:
"""Create a contract instance.
Parameters
----------
contract_address : BlockchainAddress
The address of the contract.
versioned_contract_abi : VersionedContractAbi
The version and the contract ABI to load.
w3 : web3.Web3, optional
The Web3 instance to use.
Returns
-------
NodeConnections.Wrapper[web3.contract.Contract]
The wrapper instance over the contract object.
Raises
------
EthereumUtilitiesError
If the contract instance cannot be created.
"""
if not self.is_valid_address(contract_address):
raise self._create_error('invalid contract address',
contract_address=contract_address)
node_connections = self.__get_node_connections(node_connections)
try:
return node_connections.eth.contract(
address=typing.cast(web3.types.ChecksumAddress,
contract_address),
abi=self.load_contract_abi(versioned_contract_abi))
except Exception:
raise self._create_error(
'unable to create a contract instance',
contract_address=contract_address,
contract_abi=versioned_contract_abi.contract_abi,
version=versioned_contract_abi.version)
def get_address(self, private_key: str) -> str:
# Docstring inherited
try:
address = web3.Account.from_key(private_key).address
assert address.startswith('0x')
return address
except Exception:
raise self._create_error(
'cannot determine the address from a private key')
def get_balance(
self, account_address: str,
token_address: typing.Optional[str] = None,
node_connections: typing.Optional[NodeConnections] = None) -> int:
# Docstring inherited
if not self.is_valid_address(account_address):
raise self._create_error('invalid account address')
node_connections = self.__get_node_connections(node_connections)
if token_address is None:
try:
return node_connections.eth.get_balance(
typing.cast(web3.types.ChecksumAddress,
account_address)).get()
except ResultsNotMatchingError:
raise
except Exception:
raise self._create_error('cannot determine balance')
else:
versioned_contract_abi = VersionedContractAbi(
ContractAbi.STANDARD_TOKEN, get_latest_protocol_version())
if not self.is_valid_address(token_address):
raise self._create_error('invalid token address')
erc20_contract = self.create_contract(
BlockchainAddress(token_address), versioned_contract_abi,
node_connections)
try:
return erc20_contract.functions.\
balanceOf(account_address).call().get()
except ResultsNotMatchingError:
raise
except Exception:
raise self._create_error("cannot call the contract")
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.ETHEREUM
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return EthereumUtilitiesError
def get_logs(
self,
event: NodeConnections.Wrapper[
web3.contract.contract.ContractEvent],
from_block_number: int, to_block_number: int) \
-> typing.List[web3.types.EventData]:
"""Retrieves the logs of a contract event from a block range.
Parameters
----------
event : NodeConnections.Wrapper[
web3.contract.contract.ContractEvent]
The instance of the wrapper over the contract event,
used for retrieving the associated logs.
from_block_number : int
The block number to start the search from.
to_block_number : int
The block number to end the search at.
Returns
-------
list of web3.types.EventData
The logs of the contract event.
Raises
------
ResultsNotMatchingError
If the results given by the configured blockchain
nodes do not match.
BlockchainUtilitiesError
If the balance cannot be fetched.
"""
try:
# Query all events of the contract event.address between the two
# specified block numbers
logs = event.get_logs(fromBlock=from_block_number,
toBlock=to_block_number, argument_filters={
'address': event.address
}).get()
return logs
except ResultsNotMatchingError:
raise
except Exception:
assert event is not None
raise self._create_error(
'cannot process the "{}" event logs of the contract with the '
'address "{}"'.format(event.event_name, event.address))
def is_valid_address(self, address: str) -> bool:
# Docstring inherited
return web3.Web3.is_checksum_address(address)
def is_equal_address(self, address_one: str, address_two: str) -> bool:
# Docstring inherited
return address_one.lower() == address_two.lower()
def get_unhealthy_nodes(
self, blockchain_nodes: list[str],
timeout: float | tuple | None = None) -> list[UnhealthyNode]:
# Docstring inherited
unhealthy_nodes = []
for blockchain_node in blockchain_nodes:
try:
self._create_single_node_connection(blockchain_node, timeout)
except SingleNodeConnectionError:
unhealthy_nodes.append(
UnhealthyNode(
urllib.parse.urlparse(blockchain_node).netloc,
GENERAL_RPC_ERROR_MESSAGE))
return unhealthy_nodes
def _get_transaction_method_names(self) -> list[str]:
# Docstring inherited
return _TRANSACTION_METHOD_NAMES
def decrypt_private_key(self, encrypted_key: str, password: str) -> str:
# Docstring inherited
try:
private_key = web3.Account.decrypt(encrypted_key, password).hex()
except Exception:
raise self._create_error('cannot load the private key')
assert not private_key.startswith('0x')
return private_key
def read_transaction_status(
self, transaction_id: str,
node_connections: typing.Optional[NodeConnections] = None) \
-> TransactionStatus:
# Docstring inherited
try:
node_connections = self.__get_node_connections(node_connections)
try:
transaction_receipt = \
node_connections.eth.get_transaction_receipt(
typing.cast(web3.types.HexStr, transaction_id)).get()
except web3.exceptions.TransactionNotFound:
return TransactionStatus.UNINCLUDED
assert (transaction_receipt['transactionHash'].to_0x_hex() ==
transaction_id)
transaction_block_number = transaction_receipt['blockNumber']
if transaction_block_number is None:
return TransactionStatus.UNINCLUDED
current_block_number = \
node_connections.eth.get_block_number().get_minimum_result()
confirmations = current_block_number - transaction_block_number
assert confirmations >= 0
if confirmations < self.required_transaction_confirmations:
return TransactionStatus.UNCONFIRMED
transaction_status = transaction_receipt['status']
if transaction_status != 1:
revert_message = self.__retrieve_revert_message(
transaction_id, node_connections)
_logger.info(revert_message, extra=transaction_receipt)
return TransactionStatus.REVERTED
return TransactionStatus.CONFIRMED
except ResultsNotMatchingError:
raise
except Exception:
raise self._create_error(
'unable to read the status of a transaction',
transaction_id=transaction_id)
def submit_transaction(
self, request: BlockchainUtilities.TransactionSubmissionRequest,
node_connections: typing.Optional[NodeConnections] = None) \
-> BlockchainUtilities.TransactionSubmissionResponse:
# Docstring inherited
if self._default_private_key is None:
raise self._create_error('default private key must be available')
self.__check_transaction_submission_request(request)
try:
node_connections = self.__get_node_connections(node_connections)
transaction_parameters, adaptable_fee_per_gas = \
self.__create_transaction_parameters(request, node_connections)
_logger.info('new transaction to be submitted',
extra=vars(request) | transaction_parameters)
contract = self.create_contract(request.contract_address,
request.versioned_contract_abi,
node_connections)
contract_function = contract.get_function_by_selector(
request.function_selector)
transaction = contract_function(
*request.function_args).build_transaction(
transaction_parameters).get()
signed_transaction = web3.Account.sign_transaction(
transaction, private_key=self._default_private_key)
transaction_hash = self.__send_raw_transaction(
signed_transaction.raw_transaction, node_connections)
return BlockchainUtilities.TransactionSubmissionResponse(
transaction_hash, adaptable_fee_per_gas)
except EthereumUtilitiesError:
raise
except ResultsNotMatchingError:
raise
except Exception:
raise self._create_error('unable to submit a transaction',
request=request)
def _create_single_node_connection(
self, blockchain_node_url: str,
timeout: float | tuple | None = None) -> typing.Any:
# Docstring inherited
request_kwargs = {'timeout': timeout}
try:
w3 = web3.Web3(
web3.Web3.HTTPProvider(blockchain_node_url,
request_kwargs=request_kwargs))
if w3.is_connected():
try:
w3.eth.get_block('latest')
except web3.exceptions.ExtraDataLengthError:
w3.middleware_onion.inject(
web3.middleware.geth_poa_middleware, layer=0)
_logger.info(
'new blockchain node connection', extra={
'blockchain': self.get_blockchain(),
'blockchain_node_domain': urllib.parse.urlparse(
blockchain_node_url).netloc,
'client_version': w3.client_version
})
return w3
except Exception:
raise self._create_single_node_connection_error()
raise self._create_single_node_connection_error()
def _type_2_transactions_supported(self) -> bool:
return True # pragma: no cover
def __check_transaction_submission_request(
self, request: BlockchainUtilities.TransactionSubmissionRequest) \
-> None:
if request.gas is not None and request.gas < 21000:
raise self._create_error('gas must be >= 21000', request=request)
if request.min_adaptable_fee_per_gas < 0:
raise self._create_error(
'minimum adaptable fee per gas must be >= 0', request=request)
if (request.max_total_fee_per_gas is not None
and request.min_adaptable_fee_per_gas
> request.max_total_fee_per_gas):
raise self._create_max_total_fee_per_gas_exceeded_error(
request=request)
if request.amount is not None and request.amount < 0:
raise self._create_error('amount must be >= 0', request=request)
if request.nonce < 0:
raise self._create_error('nonce must be >= 0', request=request)
def __create_transaction_parameters(
self, request: BlockchainUtilities.TransactionSubmissionRequest,
node_connections: NodeConnections[web3.Web3]) -> \
tuple[web3.types.TxParams, int]:
assert self.transaction_network_id is not None
assert self._default_address is not None
transaction_parameters: web3.types.TxParams = {
'chainId': self.transaction_network_id,
'from': self._default_address,
'nonce': web3.types.Nonce(request.nonce)
}
if request.gas is not None:
# If gas is not explicitly specified, it is automatically
# estimated using w3.eth.estimate_gas()
transaction_parameters['gas'] = web3.types.Wei(request.gas)
if request.amount is not None:
transaction_parameters['value'] = web3.types.Wei(request.amount)
if self._type_2_transactions_supported():
# EIP-1559 transaction
base_fee_per_gas = node_connections.eth.get_block(
'latest')['baseFeePerGas'].get_minimum_result()
max_priority_fee_per_gas = request.min_adaptable_fee_per_gas
max_fee_per_gas = 2 * base_fee_per_gas + max_priority_fee_per_gas
if (request.max_total_fee_per_gas is not None
and request.max_total_fee_per_gas < max_fee_per_gas):
_logger.warning(
f'maximum total fee per gas < {max_fee_per_gas}',
extra=vars(request))
max_fee_per_gas = request.max_total_fee_per_gas
transaction_parameters['maxPriorityFeePerGas'] = web3.types.Wei(
max_priority_fee_per_gas)
transaction_parameters['maxFeePerGas'] = web3.types.Wei(
max_fee_per_gas)
adaptable_fee_per_gas = max_priority_fee_per_gas
else:
# Legacy type-0 transaction
gas_price = max(
node_connections.eth.gas_price.get_minimum_result(),
request.min_adaptable_fee_per_gas)
if (request.max_total_fee_per_gas is not None
and request.max_total_fee_per_gas < gas_price):
_logger.warning(f'maximum total fee per gas < {gas_price}',
extra=vars(request))
gas_price = request.max_total_fee_per_gas
transaction_parameters['gasPrice'] = web3.types.Wei(gas_price)
adaptable_fee_per_gas = gas_price
return transaction_parameters, adaptable_fee_per_gas
def __send_raw_transaction(
self, raw_transaction: bytes,
node_connections: NodeConnections[web3.Web3]) -> str:
try:
return typing.cast(
str,
node_connections.eth.send_raw_transaction(
raw_transaction).to_0x_hex())
except ValueError as error:
if any(error_message in str(error)
for error_message in _NONCE_TOO_LOW):
raise self._create_transaction_nonce_too_low_error()
if 'transaction underpriced' in str(error):
raise self._create_transaction_underpriced_error()
raise
def __get_node_connections(
self, node_connections: typing.Optional[NodeConnections] = None) \
-> NodeConnections[web3.Web3]:
if node_connections is None:
node_connections = self.create_node_connections()
return node_connections
def __retrieve_revert_message(
self, transaction_hash: str,
node_connections: NodeConnections[web3.Web3]) -> str:
revert_message = 'unknown'
try:
full_tx = node_connections.eth.get_transaction(
typing.cast(web3.types.HexStr, transaction_hash)).get()
replay_tx = {
'from': full_tx['from'],
'to': full_tx['to'],
'value': full_tx['value'],
'data': full_tx['input']
}
context_block_number = full_tx['blockNumber'] - 1
try:
node_connections.eth.call(
typing.cast(web3.types.TxParams, replay_tx),
context_block_number).get()
except web3.exceptions.ContractLogicError as error:
revert_message = str(error)
except ValueError as error:
if _NO_ARCHIVE_NODE_RPC_ERROR_MESSAGE in error.args[0].get(
'message'):
revert_message += f' {_NO_ARCHIVE_NODE_LOG_MESSAGE}'
except Exception:
_logger.warning('unable to retrieve the revert message',
exc_info=True)
return revert_message
| 20,973 | Python | .py | 439 | 34.954442 | 79 | 0.60773 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,179 | tasks.py | pantos-io_common/pantos/common/blockchains/tasks.py | import dataclasses
import logging
import typing
import uuid
import celery # type: ignore
import celery.result # type: ignore
from pantos.common.blockchains.base import BlockchainUtilities
from pantos.common.blockchains.base import MaxTotalFeePerGasExceededError
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.factory import get_blockchain_utilities
from pantos.common.entities import TransactionStatus
_MAX_TRANSACTION_RESUBMISSION_TASK_RETRIES = 1000
TransactionResubmissionRequest = \
BlockchainUtilities.TransactionResubmissionRequest
TransactionSubmissionResponse = \
BlockchainUtilities.TransactionSubmissionResponse
TransactionSubmissionStartRequest = \
BlockchainUtilities.TransactionSubmissionStartRequest
assert (sorted(
field.name
for field in dataclasses.fields(TransactionResubmissionRequest)) == sorted(
field.name
for field in dataclasses.fields(TransactionSubmissionStartRequest)
if field.name != 'blocks_until_resubmission'))
_logger = logging.getLogger(__name__)
def create_transaction_resubmission_task(
blockchain: Blockchain, request: TransactionSubmissionStartRequest,
response: TransactionSubmissionResponse) -> uuid.UUID:
"""Create a new transaction resubmission task execution after the
initial transaction submission.
Parameters
----------
blockchain : Blockchain
The blockchain the transaction has been submitted to.
request : TransactionSubmissionStartRequest
The original request for starting the transaction submission.
response : TransactionSubmissionResponse
The response from the initial transaction submission.
Returns
-------
uuid.UUID
The internal transaction ID associated with the task.
Raises
------
Exception
If Celery has an issue with submitting the new task.
"""
internal_transaction_id = uuid.uuid4()
blockchain_utilities = get_blockchain_utilities(blockchain)
request = dataclasses.replace(
request, min_adaptable_fee_per_gas=response.adaptable_fee_per_gas)
request_dict = request.to_dict()
del request_dict['blocks_until_resubmission']
task_args = (blockchain.value, request.blocks_until_resubmission,
response.transaction_id, request_dict)
task_id = str(internal_transaction_id)
countdown = (blockchain_utilities.average_block_time *
request.blocks_until_resubmission)
_transaction_resubmission_task.apply_async(args=task_args, task_id=task_id,
countdown=countdown)
return internal_transaction_id
def get_transaction_resubmission_task_result(
internal_transaction_id: uuid.UUID) \
-> typing.Optional[tuple[TransactionStatus, str]]:
"""Get the result of a transaction resubmission task execution.
Parameters
----------
internal_transaction_id : uuid.UUID
The internal transaction ID associated with the task.
Returns
-------
tuple or None
None if the task has not yet finished, else a pair of the status
and ID of the submitted transaction.
Raises
------
Exception
If there has been an unresolvable error during the task
execution.
"""
task_id = str(internal_transaction_id)
task_result = celery.result.AsyncResult(task_id)
if not task_result.ready():
return None
assert task_result.state in ['SUCCESS', 'FAILURE']
transaction_status_id, transaction_id = task_result.get(
disable_sync_subtasks=False)
return TransactionStatus(transaction_status_id), transaction_id
@celery.shared_task(bind=True,
max_retries=_MAX_TRANSACTION_RESUBMISSION_TASK_RETRIES)
def _transaction_resubmission_task(
self, blockchain_id: int, blocks_until_resubmission: int,
transaction_id: str,
request_dict: dict[str, typing.Any]) -> tuple[int, str]:
blockchain = Blockchain(blockchain_id)
blockchain_utilities = get_blockchain_utilities(blockchain)
task_info = request_dict | {
'blockchain': blockchain,
'blocks_until_resubmission': blocks_until_resubmission,
'transaction_id': transaction_id,
'internal_transaction_id': self.request.id
}
resubmission_countdown = (blockchain_utilities.average_block_time *
blocks_until_resubmission)
confirmation_countdown = (
blockchain_utilities.average_block_time *
blockchain_utilities.required_transaction_confirmations)
try:
transaction_status = blockchain_utilities.read_transaction_status(
transaction_id)
except Exception as error:
_logger.error('unable to read the transaction status', extra=task_info,
exc_info=True)
raise self.retry(countdown=resubmission_countdown, exc=error)
_logger.info(f'transaction {transaction_status.name.lower()}',
extra=task_info)
if transaction_status is TransactionStatus.UNINCLUDED:
request = TransactionResubmissionRequest.from_dict(request_dict)
try:
response = blockchain_utilities.resubmit_transaction(request)
except MaxTotalFeePerGasExceededError as error:
_logger.warning(
'unable to further increase the adaptable fee per gas',
extra=task_info)
countdown = confirmation_countdown * self.request.retries
raise self.retry(countdown=countdown, exc=error)
except Exception as error:
_logger.error('unable to resubmit a transaction', extra=task_info,
exc_info=True)
raise self.retry(countdown=resubmission_countdown, exc=error)
_logger.info(
'adaptable fee per gas increased to '
f'{response.adaptable_fee_per_gas}', extra=task_info)
request.min_adaptable_fee_per_gas = response.adaptable_fee_per_gas
task_args = (blockchain_id, blocks_until_resubmission,
response.transaction_id, request.to_dict())
raise self.retry(args=task_args, countdown=resubmission_countdown)
if transaction_status is TransactionStatus.UNCONFIRMED:
raise self.retry(countdown=confirmation_countdown)
assert transaction_status in [
TransactionStatus.CONFIRMED, TransactionStatus.REVERTED
]
return transaction_status.value, transaction_id
| 6,505 | Python | .py | 143 | 37.811189 | 79 | 0.712461 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,180 | avalanche.py | pantos-io_common/pantos/common/blockchains/avalanche.py | """Module for Avalanche-specific utilities and errors. Since the
Avalanche C-Chain is Ethereum-compatible, the utilities implementation
inherits from the pantos.common.blockchains.ethereum module.
"""
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.ethereum import EthereumUtilitiesError
class AvalancheUtilitiesError(EthereumUtilitiesError):
"""Exception class for all Avalanche utilities errors.
"""
pass
class AvalancheUtilities(EthereumUtilities):
"""Class for Avalanche-specific utilities.
"""
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.AVALANCHE
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return AvalancheUtilitiesError
| 974 | Python | .py | 23 | 38.043478 | 70 | 0.800636 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,181 | sonic.py | pantos-io_common/pantos/common/blockchains/sonic.py | """Module for Sonic-specific utilities and errors. Since Sonic is
Ethereum-compatible, the utilities implementation inherits from the
pantos.common.blockchains.ethereum module.
Note that Pantos used to support Sonic's predecessor Fantom. This module
was renamed accordingly on 2024-10-17.
"""
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.ethereum import EthereumUtilitiesError
class SonicUtilitiesError(EthereumUtilitiesError):
"""Exception class for all Sonic utilities errors.
"""
pass
class SonicUtilities(EthereumUtilities):
"""Class for Sonic-specific utilities.
"""
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.SONIC
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return SonicUtilitiesError
| 1,043 | Python | .py | 25 | 37.64 | 72 | 0.79782 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,182 | base.py | pantos-io_common/pantos/common/blockchains/base.py | """Base module that defines the common class and error interfaces for
all blockchain utilities modules.
"""
import abc
import collections
import copy
import dataclasses
import importlib
import importlib.resources
import inspect
import json
import logging
import math
import pathlib
import pkgutil
import random
import typing
import urllib.parse
import uuid
import semantic_version # type: ignore
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.enums import ContractAbi
from pantos.common.entities import TransactionStatus
from pantos.common.exceptions import BaseError
from pantos.common.exceptions import ErrorCreator
from pantos.common.types import BlockchainAddress
from pantos.common.types import ContractFunctionArgs
_BASE_CONTRACT_ABI_PACKAGE = 'pantos.common.blockchains.contracts'
MIN_ADAPTABLE_FEE_INCREASE_FACTOR = 1.101
"""The minimum factor for increasing the adaptable fee per gas in
transaction resubmissions.
"""
GENERAL_RPC_ERROR_MESSAGE = 'unreachable'
H = typing.TypeVar('H', bound='BlockchainHandler')
T = typing.TypeVar('T',
bound='BlockchainUtilities.TransactionSubmissionRequest')
N = typing.TypeVar('N')
W = typing.TypeVar('W')
_logger = logging.getLogger(__name__)
class BlockchainUtilitiesError(BaseError):
"""Exception class for all blockchain utilities errors.
"""
pass
class NodeConnectionError(BaseError):
"""Exception class for all node connection errors.
"""
pass
class MaxTotalFeePerGasExceededError(BlockchainUtilitiesError):
"""Exception to be raised if the maximum total fee per gas would be
exceeded for a transaction to be submitted.
"""
def __init__(self, **kwargs: typing.Any):
# Docstring inherited
super().__init__('maximum total fee per gas exceeded', **kwargs)
class TransactionNonceTooLowError(BlockchainUtilitiesError):
"""Exception to be raised if a transaction has been submitted with a
nonce too low.
"""
def __init__(self, **kwargs: typing.Any):
# Docstring inherited
super().__init__('transaction nonce too low', **kwargs)
class TransactionUnderpricedError(BlockchainUtilitiesError):
"""Exception to be raised if a transaction has been submitted as an
underpriced transaction.
"""
def __init__(self, **kwargs: typing.Any):
# Docstring inherited
super().__init__('transaction underpriced', **kwargs)
class SingleNodeConnectionError(BlockchainUtilitiesError):
"""Exception to be raised if a blockchain specific node
connection cannot be established.
"""
def __init__(self, **kwargs: typing.Any):
# Docstring inherited
super().__init__('single node connection error', **kwargs)
class ResultsNotMatchingError(NodeConnectionError):
"""Exception to be raised if the results given by the blockchain
nodes do not match.
"""
def __init__(self, **kwargs: typing.Any):
# Docstring inherited
super().__init__('results not matching', **kwargs)
class NodeConnections(typing.Generic[N]):
"""Class for managing node connections in a blockchain-agnostic way.
"""
__node_connections: list[N]
__transaction_method_names: list[str]
class Wrapper(typing.Generic[W]):
"""Class for wrapping blockchain interactor objects.
"""
__is_transaction_function: bool
__objects: list[typing.Any]
__transaction_method_names: list[str]
def __init__(self, objects: list[typing.Any], attr_name: str,
transaction_method_names: list[str]):
"""Construct the wrapper instance.
Parameters
----------
objects : list
The list of wrapped blockchain interactor objects.
attr_name : str
The name of the attribute.
transaction_method_names : list of str
The list with method names which are sending
transactions to the blockchain nodes.
"""
self.__transaction_method_names = transaction_method_names
self.__is_transaction_function = (attr_name
in transaction_method_names)
if self.__is_transaction_function:
self.__objects = [getattr(objects[0], attr_name)]
else:
self.__objects = [
getattr(object_, attr_name) for object_ in objects
]
def get(self) -> typing.Any:
"""Get the compared results of wrapped objects. If they
do not match, then a ResultsNotMatchingError is raised.
Returns
-------
Any
The matching result of wrapped objects.
Raises
------
ResultsNotMatchingError
If the results do not match.
"""
result = self.__objects[0]
if not all(result == other_result
for other_result in self.__objects[1:]):
raise ResultsNotMatchingError(
**{
str(index): result
for index, result in enumerate(self.__objects)
})
return result
def get_minimum_result(self, *args: typing.Any,
**kwargs: typing.Any) -> int:
"""Get the minimum result value from the wrapped objects.
Returns
-------
int
The minimum result of the wrapped objects.
Raises
------
NodeConnectionError
If result values are not instances of int.
"""
self.__check_results_are_instances_of_int()
return min(self.__objects)
def get_maximum_result(self, *args: typing.Any,
**kwargs: typing.Any) -> int:
"""Get the maximum result value from the wrapped objects.
Returns
-------
int
The maximum result of the wrapped objects.
Raises
------
NodeConnectionError
If result values are not instances of int.
"""
self.__check_results_are_instances_of_int()
return max(self.__objects)
def __check_results_are_instances_of_int(self):
if not all(isinstance(value, int) for value in self.__objects):
raise NodeConnectionError('the result values from the wrapped '
'objects must be instances of int')
def __getattr__(self, attr_name: str) -> 'NodeConnections.Wrapper':
"""Create a new wrapper of the currently wrapped objects
with the given attribute.
Parameters
----------
attr_name : str
The attribute name.
Returns
-------
Wrapper
Object which wraps the currently wrapped objects
with the given attribute.
"""
return NodeConnections.Wrapper(self.__objects, attr_name,
self.__transaction_method_names)
def __getitem__(self, index: typing.Any) -> 'NodeConnections.Wrapper':
"""Forward the retrieval of the item found at the given
index to the wrapped objects.
Parameters
----------
index : int
The index of the requested item.
Returns
-------
Wrapper
The wrapper object with the inner wrapped items
retrieved.
Raises
------
IndexError
If the list index is out of range.
"""
self.__objects = [object_[index] for object_ in self.__objects]
return self
def __call__(
self, *args: typing.Any, **kwargs: typing.Any) -> \
typing.Union['NodeConnections.Wrapper', typing.Any]:
"""Forward the called method to the wrapped objects. If it
sends a transaction to the blockchain node, it will be
forwarded to only one of them, randomly chosen.
Returns
-------
Wrapper or Any
If the called method is a transaction function, the
result. Otherwise, the wrapper object
with the inner wrapped objects called.
"""
if self.__is_transaction_function:
random_node_index = random.randint(0, len(self.__objects) - 1)
return self.__objects[random_node_index](*args, **kwargs)
self.__objects = [
object_(*args, **kwargs) for object_ in self.__objects
]
return self
def __init__(self, transaction_method_names: list[str] = []):
"""Construct an instance.
Parameters
----------
transaction_method_names : list of str
The list with method names which are sending transactions
to the blockchain nodes.
"""
self.__node_connections = []
self.__transaction_method_names = transaction_method_names
def add_node_connection(self, node_connection: N) -> None:
"""Add a node connection to the list of node connections.
Parameters
----------
node_connection : N
The node connection to be added.
"""
self.__node_connections.append(node_connection)
def get_configured_node_connections(self) -> list[N]:
"""Get the node connections.
Returns
-------
list[N]
The list of node connections.
"""
return self.__node_connections
def __getattr__(self, attr_name: str) -> Wrapper:
"""Creates a wrapper object for the node connections with the
given attribute name.
Parameters
----------
attr_name : str
The name of the attribute.
Raises
------
NodeConnectionError
If no node connection was added.
Returns
-------
Wrapper
Object which wraps the current node connections
with the given attribute.
"""
if len(self.__node_connections) == 0:
raise NodeConnectionError(
'at least one valid connection must be established')
return NodeConnections.Wrapper(self.__node_connections, attr_name,
self.__transaction_method_names)
class BlockchainHandler(abc.ABC):
"""Base class for all classes that handle blockchain-specific data
and operations. These include blockchain clients and utilities.
"""
@classmethod
@abc.abstractmethod
def get_blockchain(cls) -> Blockchain:
"""Get the blockchain the handler is implemented for.
Returns
-------
Blockchain
The supported blockchain.
"""
pass # pragma: no cover
@classmethod
def get_blockchain_name(cls) -> str:
"""Get the name of the blockchain the handler is implemented
for.
Returns
-------
str
The name of the supported blockchain.
"""
return cls.get_blockchain().name
@classmethod
def find_subclasses(cls: type[H]) -> dict[Blockchain, type[H]]:
"""Find all subclasses of the blockchain handler in the
handler's package.
Returns
-------
dict
A dictionary of the blockchain handler's subclasses with
their supported blockchains as keys.
"""
package_name = cls.__module__.rpartition('.')[0]
package_path = str(pathlib.Path(inspect.getfile(cls)).parent)
# Only subclasses in imported modules can be found
for module in pkgutil.iter_modules([package_path]):
full_name = f'{package_name}.{module.name}'
if full_name != 'pantos.common.blockchains.tasks':
importlib.import_module(full_name)
blockchain_handlers = {}
handler_classes = collections.deque[type[H]](cls.__subclasses__())
while len(handler_classes) > 0:
handler_class = handler_classes.pop()
if not inspect.isabstract(handler_class):
blockchain = handler_class.get_blockchain()
blockchain_handlers[blockchain] = handler_class
handler_classes.extendleft(handler_class.__subclasses__())
return blockchain_handlers
@dataclasses.dataclass
class VersionedContractAbi:
"""Class which encapsulates the ABI definition of a contract
with support for versioning.
Attributes
----------
contract_abi : ContractAbi
Supported contract ABI.
version : semantic_version.Version
The version of the Pantos protocol.
"""
contract_abi: ContractAbi
version: semantic_version.Version
@dataclasses.dataclass
class UnhealthyNode:
"""Entity which encapsulates information about an unhealthy node.
Attributes
----------
node_domain : str
The domain of the node's URL.
status : str
The status of the node.
"""
node_domain: str
status: str
class BlockchainUtilities(BlockchainHandler,
ErrorCreator[BlockchainUtilitiesError]):
"""Base class for all blockchain utilities classes.
Attributes
----------
average_block_time : int
The average time in seconds required to generate a new block of
the blockchain.
required_transaction_confirmations : int
The number of required confirmations for a transaction to be
considered included in the blockchain.
transaction_network_id : int or None
The unique public (i.e. non-Pantos-specific) blockchain network
identifier (partly called chain ID) to be used for signing
transactions (to prevent replay attacks between different
compatible blockchain networks).
"""
def __init__(self, blockchain_node_urls: list[str],
fallback_blockchain_node_urls: list[str],
average_block_time: int,
required_transaction_confirmations: int,
transaction_network_id: typing.Optional[int],
default_private_key: typing.Optional[tuple[str, str]] = None,
celery_tasks_enabled: bool = False):
"""Construct a blockchain utilities instance.
Parameters
----------
blockchain_node_urls : list of str
The URLs of the nodes to use for communication with the
blockchain network.
fallback_blockchain_node_urls : list[str]
The URLs of the fallback nodes to use for communication
with the blockchain network.
average_block_time : int
The average time in seconds required to generate a new block
of the blockchain.
required_transaction_confirmations : int
The number of required confirmations for a transaction to be
considered included in the blockchain.
transaction_network_id : int or None
The unique public (i.e. non-Pantos-specific) blockchain
network identifier (partly called chain ID) to be used for
signing transactions (to prevent replay attacks between
different compatible blockchain networks). It is assumed to
be the identifier of the main or a test network of the
blockchain supported by the blockchain utilities subclass.
default_private_key : tuple of str and str, optional
The keystore value and password of the default private
key to be used by the blockchain utilities. (default: None).
celery_tasks_enabled : bool, optional
If True, Celery tasks are enabled for enhanced
functionalities (default: False). This requires a proper
Celery environment to be set up by the project using the
blockchain utilities.
Raises
------
BlockchainUtilitiesError
If the blockchain utilities initialization fails.
"""
if average_block_time <= 0:
raise self._create_error('average block time must be > 0',
average_block_time=average_block_time)
if required_transaction_confirmations < 0:
raise self._create_error(
'required transaction confirmations must be >= 0',
required_transaction_confirmations= # noqa: E251
required_transaction_confirmations)
if transaction_network_id is not None and transaction_network_id <= 0:
raise self._create_error(
'transaction network ID must be > 0',
transaction_network_id=transaction_network_id)
if len(blockchain_node_urls) == 0:
raise self._create_error(
'at least one blockchain node URL is expected')
self.average_block_time = average_block_time
self.required_transaction_confirmations = \
required_transaction_confirmations
self.transaction_network_id = transaction_network_id
self._blockchain_node_urls = blockchain_node_urls
self._fallback_blockchain_node_urls = fallback_blockchain_node_urls
self._default_private_key = (
None if default_private_key is None else self.decrypt_private_key(
default_private_key[0], default_private_key[1]))
self._default_address = (None if self._default_private_key is None else
self.get_address(self._default_private_key))
self._celery_tasks_enabled = celery_tasks_enabled
self.__loaded_contract_abis: dict[ContractAbi, list[typing.Any]] = {}
def create_node_connections(
self,
timeout: typing.Optional[typing.Union[float, tuple]] = None) \
-> NodeConnections:
"""Create blockchain node connections.
Parameters
----------
timeout : float or tuple
How long to wait for the server to send data before giving up,
as a float, or a (connect timeout, read timeout) tuple.
Returns
-------
NodeConnections
The initialized object with valid connections.
Raises
------
BlockchainUtilities
Raised if the node connection cannot be initialized.
"""
fallback_nodes = self._fallback_blockchain_node_urls.copy()
node_connections = NodeConnections[typing.Any](
self._get_transaction_method_names())
for blockchain_node_url in self._blockchain_node_urls:
node_connection = self.__create_valid_node_connection(
blockchain_node_url, fallback_nodes, timeout)
node_connections.add_node_connection(node_connection)
return node_connections
def __create_valid_node_connection(
self, blockchain_node_url: str,
fallback_blockchain_node_urls: list[str],
timeout: typing.Optional[typing.Union[float, tuple]] = None):
blockchain_node_urls = ([blockchain_node_url] +
fallback_blockchain_node_urls)
for blockchain_node_url_ in blockchain_node_urls:
try:
valid_node_connection = self._create_single_node_connection(
blockchain_node_url_, timeout)
if blockchain_node_url_ != blockchain_node_url:
fallback_blockchain_node_urls.remove(blockchain_node_url_)
return valid_node_connection
except SingleNodeConnectionError:
continue
blockchain_node_domains = [
urllib.parse.urlparse(blockchain_node_url).netloc
for blockchain_node_url in blockchain_node_urls
]
raise self._create_error(
'cannot connect to any of the blockchain nodes with the domains '
f'"{blockchain_node_domains}"')
@abc.abstractmethod
def get_address(self, private_key: str) -> str:
"""Determine the blockchain address from a private key.
Parameters
----------
private_key : str
The unencrypted private key.
Returns
-------
str
The blockchain address for the given private key.
Raises
------
BlockchainUtilitiesError
If the address cannot be determined from the private key.
"""
pass # pragma: no cover
@abc.abstractmethod
def get_balance(
self, account_address: str,
token_address: typing.Optional[str] = None,
node_connections: typing.Optional[NodeConnections] = None) -> int:
"""Determine the balance of native coins or tokens of an
address.
Parameters
----------
account_address : str
The address that will be evaluated.
token_address : str, optional
The address of the token that will be interrogated.
node_connections : NodeConnections, optional
The node connections object to be used (default: None).
Returns
-------
int
Balance of the address.
Raises
------
BlockchainUtilitiesError
If the balance cannot be fetched.
ResultsNotMatchingError
If the results given by the configured blockchain
nodes do not match.
"""
pass # pragma: no cover
@abc.abstractmethod
def is_valid_address(self, address: str) -> bool:
"""Determine if an address string is a valid address on the
blockchain.
Parameters
----------
address : str
The address string to check.
Returns
-------
bool
True if the given address string is a valid address on the
blockchain, else False.
"""
pass # pragma: no cover
@abc.abstractmethod
def is_equal_address(self, address_one: str, address_two: str) -> bool:
"""Determine if two addresses are equal.
Parameters
----------
address_one : str
The first address string to check.
address_two : str
The second address string to check.
Returns
-------
bool
True if the given addresses are equal, else False.
"""
pass # pragma: no cover
@abc.abstractmethod
def get_unhealthy_nodes(
self, blockchain_nodes: list[str],
timeout: float | tuple | None = None) -> list[UnhealthyNode]:
"""Determine the health of the blockchain nodes.
Parameters
----------
blockchain_nodes : list of str
The URLs of the blockchain nodes to check.
timeout : float, tuple or None
How long to wait for the server to send data before giving up,
as a float, or a (connect timeout, read timeout) tuple.
Returns
-------
list[UnhealthyNode]
The list of unhealthy nodes.
"""
pass
@abc.abstractmethod
def _get_transaction_method_names(self) -> list[str]:
"""Determine the blockchain interactor method names which
are sending transactions.
Returns
-------
list[str]
The list of method names which are sending
transactions.
"""
pass # pragma: no cover
def load_contract_abi(
self,
versioned_contract_abi: VersionedContractAbi) -> list[typing.Any]:
"""Load a contract ABI, according to its version, as a list
from a JSON file. If a contract ABI has already been loaded
before, a cached version is returned.
Parameters
----------
versioned_contract_abi : VersionedContractAbi
The version and the contract ABI to load.
Returns
-------
list
The loaded contract ABI.
"""
contract_abi = versioned_contract_abi.contract_abi
version = versioned_contract_abi.version
if contract_abi in self.__loaded_contract_abis:
return self.__loaded_contract_abis[contract_abi]
contract_abi_file_name = contract_abi.get_file_name(
self.get_blockchain())
versioned_contract_abi_package = (
f'{_BASE_CONTRACT_ABI_PACKAGE}.v'
f'{version.major}_{version.minor}_{version.patch}')
try:
with importlib.resources.open_text(
versioned_contract_abi_package,
contract_abi_file_name) as contract_abi_file:
loaded_contract_abi = json.load(contract_abi_file)
self.__loaded_contract_abis[contract_abi] = loaded_contract_abi
return loaded_contract_abi
except Exception:
raise self._create_error('unable to load a contract ABI',
contract_abi=contract_abi,
version=version)
@abc.abstractmethod
def decrypt_private_key(self, encrypted_key: str, password: str) -> str:
"""Load the private key from a password-encrypted key.
Parameters
----------
encrypted_key: str
The encrypted key.
password : str
The password to decrypt the key.
Returns
-------
str
The decrypted private key.
Raises
------
BlockchainUtilitiesError
If the private key cannot be decrypted.
"""
pass # pragma: no cover
@abc.abstractmethod
def read_transaction_status(
self, transaction_id: str,
node_connections: typing.Optional[NodeConnections] = None) \
-> TransactionStatus:
"""Read the status of a transaction.
Parameters
----------
transaction_id : str
The ID/hash of the transaction.
node_connections : NodeConnections, optional
The node connections object to be used (default: None).
Returns
-------
TransactionStatus
The transaction's current status.
Raises
------
BlockchainUtilitiesError
If the transaction status cannot be read.
ResultsNotMatchingError
If the results given by the configured blockchain
nodes do not match.
"""
pass # pragma: no cover
@dataclasses.dataclass
class TransactionSubmissionRequest:
"""Request data for submitting a transaction.
Attributes
----------
contract_address : BlockchainAddress
The address of the contract to invoke a function on in the
transaction.
versioned_contract_abi : VersionedContractAbi
The version and the ABI of the contract to invoke a function
on in the transaction.
function_selector : str
The selector of the contract function to be invoked in the
transaction.
function_args : ContractFunctionArgs
The arguments of the contract function to be invoked in the
transaction.
gas : int or None
The gas to be provided for the transaction. Depending on the
blockchain, it may not be necessary to specify the gas
explicitly or it may be possible to estimate the required gas
automatically.
min_adaptable_fee_per_gas : int
The minimum adaptable fee per gas. The definition of the
adaptable fee per gas depends on the blockchain's
transaction fee model.
max_total_fee_per_gas : int or None
The maximum total fee per gas. Since the total fee per gas
will anyway be kept as low as possible for the transaction
to be included in a block, it is recommended to specify a
large maximum total fee per gas. If it is not specified at
all, no upper limit for the total fee per gas will be
enforced.
amount : int or None
The amount of native coins to be sent in the transaction
(specified in the blockchain's smallest coin denomination).
nonce : int
The unique transaction nonce of the account controlled by
the default private key.
"""
contract_address: BlockchainAddress
versioned_contract_abi: VersionedContractAbi
function_selector: str
function_args: ContractFunctionArgs
gas: typing.Optional[int]
min_adaptable_fee_per_gas: int
max_total_fee_per_gas: typing.Optional[int]
amount: typing.Optional[int]
nonce: int
def to_dict(self) -> dict[str, typing.Any]:
"""Convert the request instance to its corresponding
dictionary representation.
Returns
-------
dict
The dictionary representation.
"""
request_dict = dataclasses.asdict(self)
request_dict['versioned_contract_abi']['contract_abi'] = \
self.versioned_contract_abi.contract_abi.value
request_dict['versioned_contract_abi']['version'] = \
str(self.versioned_contract_abi.version)
return request_dict
@classmethod
def from_dict(cls: type[T], request_dict: dict[str, typing.Any]) -> T:
"""Convert the dictionary representation of a request to its
corresponding request instance.
Parameters
----------
request_dict : dict
The dictionary representation.
Returns
-------
TransactionSubmissionRequest
The request instance.
"""
request_dict = copy.deepcopy(request_dict)
request_dict['versioned_contract_abi'] = VersionedContractAbi(
ContractAbi(
request_dict['versioned_contract_abi']['contract_abi']),
semantic_version.Version(
request_dict['versioned_contract_abi']['version']))
return cls(**request_dict)
@dataclasses.dataclass
class TransactionSubmissionResponse:
"""Response data from submitting a transaction.
Attributes
----------
transaction_id : str
The ID/hash of the submitted transaction.
adaptable_fee_per_gas : int
The adaptable fee per gas actually used for submitting the
transaction.
"""
transaction_id: str
adaptable_fee_per_gas: int
@abc.abstractmethod
def submit_transaction(
self, request: TransactionSubmissionRequest,
node_connections: typing.Optional[NodeConnections] = None) \
-> TransactionSubmissionResponse:
"""Submit a transaction.
Parameters
----------
request : TransactionSubmissionRequest
The request data for submitting a transaction.
node_connections : NodeConnections, optional
The node connections object to be used (default: None).
Returns
-------
TransactionSubmissionResponse
The response data from submitting a transaction.
Raises
------
MaxTotalFeePerGasExceededError
If the maximum total fee per gas would be exceeded for the
transaction to be submitted.
TransactionUnderpricedError
If the transaction has been submitted as an underpriced
transaction.
TransactionNonceTooLowError
If the transaction has been submitted with a nonce too low.
ResultsNotMatchingError
If the results given by the configured blockchain
nodes do not match.
BlockchainUtilitiesError
If the transaction cannot be submitted for any other reason.
"""
pass # pragma: no cover
@dataclasses.dataclass
class TransactionResubmissionRequest(TransactionSubmissionRequest):
"""Request data for resubmitting a transaction.
Attributes
----------
adaptable_fee_increase_factor : float
The factor for increasing the adaptable fee per gas.
"""
adaptable_fee_increase_factor: float
@dataclasses.dataclass
class TransactionResubmissionResponse(TransactionSubmissionResponse):
"""Response data from resubmitting a transaction.
"""
pass
def resubmit_transaction(
self, request: TransactionResubmissionRequest,
node_connections: typing.Optional[NodeConnections] = None) \
-> TransactionResubmissionResponse:
"""Resubmit (i.e. replace) a transaction.
Parameters
----------
request : TransactionResubmissionRequest
The request data for resubmitting a transaction.
node_connections : NodeConnections, optional
The node connections object to be used (default: None).
Returns
-------
TransactionResubmissionResponse
The response data from resubmitting a transaction.
Raises
------
MaxTotalFeePerGasExceededError
If the maximum total fee per gas would be exceeded for the
transaction to be resubmitted.
TransactionNonceTooLowError
If the transaction has been resubmitted with a nonce too
low.
ResultsNotMatchingError
If the results given by the configured blockchain
nodes do not match.
BlockchainUtilitiesError
If the transaction cannot be resubmitted for any other
reason.
"""
if request.min_adaptable_fee_per_gas < 0:
raise self._create_error(
'previous minimum adaptable fee per gas must be >= 0',
request=request)
if (request.adaptable_fee_increase_factor
< MIN_ADAPTABLE_FEE_INCREASE_FACTOR):
raise self._create_error(
'adaptable fee increase factor must be >= '
f'{MIN_ADAPTABLE_FEE_INCREASE_FACTOR}', request=request)
response = None
while response is None:
# Minimum adaptable fee per gas must be 1 for the
# resubmission if the previous minimum adaptable fee per gas
# has been 0
min_adaptable_fee_per_gas = max(
1,
math.ceil(request.min_adaptable_fee_per_gas *
request.adaptable_fee_increase_factor))
if (request.max_total_fee_per_gas is not None and
min_adaptable_fee_per_gas > request.max_total_fee_per_gas):
raise self._create_max_total_fee_per_gas_exceeded_error(
request=request)
request = dataclasses.replace(
request, min_adaptable_fee_per_gas=min_adaptable_fee_per_gas)
try:
response = self.submit_transaction(request, node_connections)
except TransactionUnderpricedError:
_logger.warning('resubmitted transaction underpriced',
extra=vars(request))
return BlockchainUtilities.TransactionResubmissionResponse(
response.transaction_id, response.adaptable_fee_per_gas)
@dataclasses.dataclass
class TransactionSubmissionStartRequest(TransactionResubmissionRequest):
"""Request data for starting a transaction submission.
Attributes
----------
blocks_until_resubmission : int
The number of blocks to wait until the transaction is
resubmitted if it has not yet been included in a block.
"""
blocks_until_resubmission: int
def start_transaction_submission(
self, request: TransactionSubmissionStartRequest,
node_connections: typing.Optional[NodeConnections] = None) \
-> uuid.UUID:
"""Start a transaction submission. The transaction is
automatically resubmitted with higher transaction fees until it
is included in a block. Celery tasks need to be enabled for this
function.
Parameters
----------
request : TransactionSubmissionStartRequest
The request data for starting a transaction submission.
node_connections : NodeConnections, optional
The node connections object to be used (default: None).
Returns
-------
uuid.UUID
The unique internal transaction ID, which can be used later
to retrieve the status of the transaction submission.
Raises
------
MaxTotalFeePerGasExceededError
If the maximum total fee per gas would be exceeded for the
transaction to be submitted.
TransactionNonceTooLowError
If the transaction has been submitted with a nonce too low.
ResultsNotMatchingError
If the results given by the configured blockchain
nodes do not match.
BlockchainUtilitiesError
If the transaction submission cannot be started for any
other reason.
"""
if not self._celery_tasks_enabled:
raise self._create_error('Celery tasks disabled')
if request.blocks_until_resubmission <= 0:
raise self._create_error('blocks until resubmission must be > 0',
request=request)
if (request.adaptable_fee_increase_factor
< MIN_ADAPTABLE_FEE_INCREASE_FACTOR):
raise self._create_error(
'adaptable fee increase factor must be >= '
f'{MIN_ADAPTABLE_FEE_INCREASE_FACTOR}', request=request)
try:
response = self.submit_transaction(request, node_connections)
except TransactionUnderpricedError:
_logger.warning('initially submitted transaction underpriced',
extra=vars(request))
response = self.resubmit_transaction(request, node_connections)
try:
from pantos.common.blockchains.tasks import \
create_transaction_resubmission_task
internal_transaction_id = create_transaction_resubmission_task(
self.get_blockchain(), request, response)
except Exception:
raise self._create_error(
'unable to create a transaction resubmission task',
request=request, transaction_id=response.transaction_id)
return internal_transaction_id
@dataclasses.dataclass
class TransactionSubmissionStatusResponse:
"""Response data from retrieving the status of a transaction
submission.
Attributes
----------
transaction_submission_completed : bool
True if and only if the transaction submission has been
completed (i.e. the transaction is either confirmed or
reverted).
transaction_status : TransactionStatus or None
The status of the submitted (and eventually included)
transaction (available if the transaction submission has
been completed).
transaction_id : str or None
The ID/hash of the submitted (and eventually included)
transaction (available if the transaction submission has
been completed).
"""
transaction_submission_completed: bool
transaction_status: typing.Optional[TransactionStatus] = None
transaction_id: typing.Optional[str] = None
def get_transaction_submission_status(
self, internal_transaction_id: uuid.UUID) \
-> TransactionSubmissionStatusResponse:
"""Retrieve the status of a transaction submission. Celery tasks
need to be enabled for this function.
Parameters
----------
internal_transaction_id : uuid.UUID
The unique internal transaction ID.
Returns
-------
TransactionSubmissionStatusResponse
The response data from retrieving the status of a
transaction submission.
Raises
------
BlockchainUtilitiesError
If the status of the transaction submission cannot be
retrieved or if there has been an unresolvable error during
the transaction submission.
"""
if not self._celery_tasks_enabled:
raise self._create_error('Celery tasks disabled')
try:
from pantos.common.blockchains.tasks import \
get_transaction_resubmission_task_result
task_result = get_transaction_resubmission_task_result(
internal_transaction_id)
except Exception:
raise self._create_error(
'unable to get a transaction resubmission task result',
internal_transaction_id=internal_transaction_id)
if task_result is None:
return BlockchainUtilities.TransactionSubmissionStatusResponse(
False)
transaction_status = task_result[0]
transaction_id = task_result[1]
return BlockchainUtilities.TransactionSubmissionStatusResponse(
True, transaction_status, transaction_id)
@abc.abstractmethod
def _create_single_node_connection(
self, blockchain_node_url: str,
timeout: float | tuple | None = None) -> typing.Any:
"""Create a single blockchain-specific node connection
with the given URL.
Parameters
----------
blockchain_node_url : str
The blockchain node URL.
timeout : float, tuple or None
How long to wait for the server to send data before giving up,
as a float, or a (connect timeout, read timeout) tuple.
Returns
-------
typing.Any
The blockchain-specific node connection.
Raises
------
SingleNodeConnectionError
If the node connection cannot be established.
"""
pass # pragma: no cover
def _create_max_total_fee_per_gas_exceeded_error(
self, **kwargs: typing.Any) -> BlockchainUtilitiesError:
return self._create_error(
specialized_error_class=MaxTotalFeePerGasExceededError, **kwargs)
def _create_transaction_nonce_too_low_error(
self, **kwargs: typing.Any) -> BlockchainUtilitiesError:
return self._create_error(
specialized_error_class=TransactionNonceTooLowError, **kwargs)
def _create_transaction_underpriced_error(
self, **kwargs: typing.Any) -> BlockchainUtilitiesError:
return self._create_error(
specialized_error_class=TransactionUnderpricedError, **kwargs)
def _create_single_node_connection_error(
self, **kwargs: typing.Any) -> BlockchainUtilitiesError:
return self._create_error(
specialized_error_class=SingleNodeConnectionError, **kwargs)
| 43,742 | Python | .py | 1,037 | 31.219865 | 79 | 0.612174 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,183 | bnbchain.py | pantos-io_common/pantos/common/blockchains/bnbchain.py | """Module for BNB-Chain-specific utilities and errors. Since the BNB
Smart Chain is Ethereum-compatible, the utilities implementation
inherits from the pantos.common.blockchains.ethereum module.
"""
from pantos.common.blockchains.base import BlockchainUtilitiesError
from pantos.common.blockchains.enums import Blockchain
from pantos.common.blockchains.ethereum import EthereumUtilities
from pantos.common.blockchains.ethereum import EthereumUtilitiesError
class BnbChainUtilitiesError(EthereumUtilitiesError):
"""Exception class for all BNB Chain utilities errors.
"""
pass
class BnbChainUtilities(EthereumUtilities):
"""Class for BNB-Chain-specific utilities.
"""
@classmethod
def get_blockchain(cls) -> Blockchain:
# Docstring inherited
return Blockchain.BNB_CHAIN
@classmethod
def get_error_class(cls) -> type[BlockchainUtilitiesError]:
# Docstring inherited
return BnbChainUtilitiesError
| 969 | Python | .py | 23 | 37.826087 | 69 | 0.795309 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,184 | generate-signer-key.py | pantos-io_common/scripts/generate-signer-key.py | #! /usr/bin/env python3
"""Generate a private key to be used with the pantos.common.signer
module.
"""
import getpass
import random
import string
import Crypto.PublicKey.ECC
passphrase = getpass.getpass('Passphrase: ')
random_string = ''.join(
random.choices(string.ascii_lowercase + string.digits, k=8))
key_file_name = f'signer-key-{random_string}.pem'
key = Crypto.PublicKey.ECC.generate(curve='Ed25519')
with open(key_file_name, 'wt') as key_file:
key_file.write(
key.export_key(format='PEM', passphrase=passphrase,
protection='PBKDF2WithHMAC-SHA1AndAES128-CBC'))
print(f'PEM file written to {key_file_name}')
| 658 | Python | .py | 18 | 33.111111 | 70 | 0.732283 | pantos-io/common | 8 | 4 | 0 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,185 | G2G_cal.py | dptech-corp_NAG2G/NAG2G/G2G_cal.py | from utils.G2G_cal import *
if __name__ == "__main__":
if len(sys.argv) < 2:
raise "ERROR"
if ".txt" in sys.argv[1]:
smi_path = sys.argv[1]
N_beam_search = 10
if len(sys.argv) >= 3:
N_beam_search = int(sys.argv[2])
if "--if_full" in sys.argv:
if_full = True
else:
if_full = False
if len(sys.argv) >= 5:
score_name = sys.argv[4]
save_path = smi_path.replace(smi_path.split("/")[-1], score_name)
else:
# score_name = "score"
score_name = smi_path.split("/")[-1].replace("smi", "score")
save_path = smi_path.replace(smi_path.split("/")[-1], score_name)
# save_path = None
run(smi_path, save_path, N_beam_search=N_beam_search, if_full=if_full)
else:
path = sys.argv[1]
# txt_name = "smi_lp0.0_t0_10_b256.txt"
dirs = [
os.path.join(path, i)
for i in os.listdir(path)
if "checkpoint_" in i and ".pt" not in i
]
for i in dirs:
dirs2 = [os.path.join(i, j) for j in os.listdir(i) if ".txt" in j and "smi" in j]
for j in dirs2:
orders = "python G2G_cal.py {} &".format(j)
print(orders)
os.system(orders) | 1,330 | Python | .py | 36 | 26.611111 | 93 | 0.493818 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,186 | infer.py | dptech-corp_NAG2G/NAG2G/infer.py | #!/usr/bin/env python3 -u
# Copyright (c) DP Techonology, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import ast
import logging
import json
import os
import sys
import numpy as np
from argparse import Namespace
from itertools import chain
import pickle
from rdkit import Chem
from rdkit.Chem import AllChem
import torch
from unicore import checkpoint_utils, distributed_utils, utils
from unicore import tasks, options
from unicore.logging import metrics, progress_bar
from search_strategies.parse import add_search_strategies_args
from search_strategies.beam_search_generator import SequenceGeneratorBeamSearch
from search_strategies.simple_sequence_generator import SimpleGenerator
from search_strategies.greedy_generator import GreedyGenerator
from search_strategies.sample_generator import SampleGenerator
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=os.environ.get("LOGLEVEL", "INFO").upper(),
stream=sys.stdout,
)
logger = logging.getLogger("NAG2G_cli.validate")
def setmap2smiles(smiles):
mol = Chem.MolFromSmiles(smiles)
mol = AllChem.RemoveHs(mol)
[atom.SetAtomMapNum(idx + 1) for idx, atom in enumerate(mol.GetAtoms())]
return Chem.MolToSmiles(mol)
def G2G_weight_reload(state):
if state["model"]["degree_pe.weight"].shape[0] != 100:
tmp_shape = state["model"]["degree_pe.weight"].shape
tmp = torch.zeros((100, tmp_shape[1])).to(
state["model"]["degree_pe.weight"].device
)
tmp[: tmp_shape[0]] = state["model"]["degree_pe.weight"]
state["model"]["degree_pe.weight"] = tmp
return state
def init(args):
assert (
args.batch_size is not None
), "Must specify batch size either with --batch-size"
use_fp16 = args.fp16
use_cuda = torch.cuda.is_available() and not args.cpu
if use_cuda:
torch.cuda.set_device(args.device_id)
if args.distributed_world_size > 1:
data_parallel_world_size = distributed_utils.get_data_parallel_world_size()
data_parallel_rank = distributed_utils.get_data_parallel_rank()
else:
data_parallel_world_size = 1
data_parallel_rank = 0
overrides = ast.literal_eval(args.model_overrides)
logger.info("loading model(s) from {}".format(args.path))
state = checkpoint_utils.load_checkpoint_to_cpu(args.path)
task = tasks.setup_task(args)
model = task.build_model(args)
if args.task == "G2G":
state = G2G_weight_reload(state)
model.load_state_dict(state["model"], strict=False)
# Move models to GPU
# for model in models:
# if use_fp16:
# model.half()
# if use_cuda:
# model.cuda()
if use_fp16:
model = model.half()
if use_cuda:
model.cuda()
# Print args
logger.info(args)
# Build loss
loss = task.build_loss(args)
loss.eval()
model.eval()
logger.info(model)
logger.info("task: {}".format(task.__class__.__name__))
logger.info("model: {}".format(model.__class__.__name__))
logger.info("loss: {}".format(loss.__class__.__name__))
logger.info(
"num. model params: {:,} (num. trained: {:,})".format(
sum(getattr(p, "_orig_size", p).numel() for p in model.parameters()),
sum(
getattr(p, "_orig_size", p).numel()
for p in model.parameters()
if p.requires_grad
),
)
)
np.random.seed(args.seed)
print(
"test beam search params: ",
args.search_strategies,
args.beam_size,
args.len_penalty,
args.temperature,
)
if args.search_strategies == "SequenceGeneratorBeamSearch":
# raise
generator = SequenceGeneratorBeamSearch(
[model],
task.dictionary,
beam_size=args.beam_size,
len_penalty=args.len_penalty,
max_len_b=1024
)
elif args.search_strategies == "SimpleGenerator":
generator = SimpleGenerator(
model,
task.dictionary,
beam_size=args.beam_size,
len_penalty=args.len_penalty,
args=args,
)
elif args.search_strategies == "GreedyGenerator":
generator = GreedyGenerator(model, task.dictionary, beam_size=args.beam_size)
# dataset_empty = task.load_empty_dataset(seed=args.seed)
return (
args,
use_cuda,
task,
generator,
data_parallel_world_size,
data_parallel_rank,
# dataset_empty,
)
def run(smiles, model_tuple, seed=42):
(
args,
use_cuda,
task,
generator,
data_parallel_world_size,
data_parallel_rank,
# dataset_empty,
) = model_tuple
dataset_empty = task.load_empty_dataset(init_values=smiles, seed=seed)
# dataset_empty.put_smiles_in(smiles)
dataset = task.dataset("test")
itr = task.get_batch_iterator(
dataset=dataset,
batch_size=len(smiles), # args.batch_size,
ignore_invalid_inputs=True,
seed=args.seed,
num_shards=data_parallel_world_size,
shard_id=data_parallel_rank,
num_workers=args.num_workers,
data_buffer_size=args.data_buffer_size,
).next_epoch_itr(shuffle=False)
for i, sample in enumerate(itr):
sample = utils.move_to_cuda(sample) if use_cuda else sample
# sample = utils.move_to_cuda(dataset[0]) if use_cuda else sample
# sample_tmp = {"net_input": {}, "target": ()}
# for k, v in sample.items():
# if "net_input." in k:
# tmp_k = k.replace("net_input.", "")
# sample_tmp["net_input"][tmp_k] = v.unsqueeze(0)
# elif "target." in k:
# tmp_k = k.replace("net_input.", "")
# sample_tmp["net_input"][tmp_k] = v.unsqueeze(0)
# sample = sample_tmp
result = task.infer_step(sample, generator)
print(result)
return result
def main(args):
model_tuple = init(args)
smiles = [
"CC(=O)c1ccc2c(ccn2C(=O)OC(C)(C)C)c1",
"[CH3:1][Si:2]([CH3:3])([CH3:4])[O:5][C:6](=[O:7])/[CH:8]=[CH:9]/[CH2:10][Br:11]",
"[CH3:1][C:2]([CH3:3])([CH3:4])[O:5][C:6](=[O:7])[N:8]1[CH2:9][CH2:10][c:11]2[o:12][c:13]3[c:14]([Cl:15])[cH:16][c:17]([S:18](=[O:19])[c:20]4[cH:21][cH:22][cH:23][cH:24][cH:25]4)[cH:26][c:27]3[c:28]2[CH2:29]1",
"[CH3:1][C:2]([CH3:3])([CH3:4])[O:5][C:6](=[O:7])[n:8]1[cH:9][n:10][c:11]([CH:12]=[O:13])[cH:14]1",
"[Cl:1][c:2]1[cH:3][c:4]([Cl:5])[c:6]([CH2:7][Br:8])[cH:9][n:10]1",
"[CH3:1][O:2][c:3]1[n:4][c:5]2[cH:6][cH:7][c:8]([C:9](=[O:10])[c:11]3[cH:12][n:13][n:14][n:15]3[CH3:16])[cH:17][c:18]2[c:19]([Cl:20])[c:21]1[CH2:22][c:23]1[cH:24][cH:25][c:26]([C:27]([F:28])([F:29])[F:30])[cH:31][cH:32]1",
"[CH3:1][C:2](=[O:3])[c:4]1[n:5][c:6]2[cH:7][c:8]([NH:9][C:10](=[O:11])[c:12]3[cH:13][cH:14][c:15](/[CH:16]=[CH:17]/[C:18]([F:19])([F:20])[F:21])[cH:22][c:23]3[CH3:24])[cH:25][cH:26][c:27]2[s:28]1",
"[CH3:1][CH2:2][O:3][C:4](=[O:5])[CH:6]1[CH2:7][CH2:8][C:9]2([CH2:10][CH2:11]1)[O:12][CH2:13][CH2:14][O:15]2",
"[CH3:1][CH2:2][CH2:3][CH2:4][c:5]1[n:6][cH:7][c:8]([C:9]([CH3:10])=[O:11])[n:12]1[CH2:13][c:14]1[cH:15][cH:16][cH:17][cH:18][c:19]1[Cl:20]",
"[CH3:1][C:2]1([c:3]2[cH:4][c:5]3[cH:6][cH:7][cH:8][n+:9]([O-:10])[c:11]3[nH:12]2)[CH2:13][CH2:14]1",
"[O:1]=[CH:2][c:3]1[cH:4][cH:5][c:6]([F:7])[c:8]([N+:9](=[O:10])[O-:11])[cH:12]1",
"[CH3:1][c:2]1[cH:3][cH:4][cH:5][c:6]([C:7]#[C:8][c:9]2[cH:10][cH:11][c:12](-[c:13]3[cH:14][cH:15][n:16][n:17]3[CH3:18])[cH:19][cH:20]2)[n:21]1",
"[NH2:1][c:2]1[cH:3][cH:4][c:5](-[c:6]2[c:7]([F:8])[cH:9][cH:10][cH:11][c:12]2[C:13]([F:14])([F:15])[F:16])[cH:17][c:18]1[N+:19](=[O:20])[O-:21]",
]
smiles = [setmap2smiles(i) for i in smiles]
run(smiles, model_tuple)
def cli_main():
parser = options.get_validation_parser()
add_search_strategies_args(parser)
options.add_model_args(parser)
args = options.parse_args_and_arch(parser)
distributed_utils.call_main(args, main)
if __name__ == "__main__":
cli_main()
| 8,355 | Python | .py | 200 | 35.055 | 230 | 0.595962 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,187 | __init__.py | dptech-corp_NAG2G/NAG2G/__init__.py | import importlib
import NAG2G.tasks
import NAG2G.data
import NAG2G.models
import NAG2G.losses
import NAG2G.utils
import NAG2G.optim.lr_scheduler
import NAG2G.modules
import NAG2G.search_strategies
import NAG2G.decoder | 218 | Python | .py | 10 | 20.8 | 31 | 0.894231 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,188 | validate.py | dptech-corp_NAG2G/NAG2G/validate.py | #!/usr/bin/env python3 -u
# Copyright (c) DP Techonology, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import ast
import logging
import json
import os
import sys
import numpy as np
from argparse import Namespace
from itertools import chain
import pickle
import torch
from unicore import checkpoint_utils, distributed_utils, utils
from unicore import tasks, options
from unicore.logging import metrics, progress_bar
from search_strategies.parse import add_search_strategies_args
from search_strategies.beam_search_generator import SequenceGeneratorBeamSearch
from search_strategies import search
from search_strategies.simple_sequence_generator import SimpleGenerator
from search_strategies.greedy_generator import GreedyGenerator
from search_strategies.sample_generator import SampleGenerator
from utils import save_config
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=os.environ.get("LOGLEVEL", "INFO").upper(),
stream=sys.stdout,
)
logger = logging.getLogger("NAG2G_cli.validate")
def G2G_weight_reload(state):
if state["model"]["degree_pe.weight"].shape[0] != 100:
tmp_shape = state["model"]["degree_pe.weight"].shape
tmp = torch.zeros((100, tmp_shape[1])).to(
state["model"]["degree_pe.weight"].device
)
tmp[: tmp_shape[0]] = state["model"]["degree_pe.weight"]
state["model"]["degree_pe.weight"] = tmp
return state
def main(args):
assert (
args.batch_size is not None
), "Must specify batch size either with --batch-size"
use_fp16 = args.fp16
use_cuda = torch.cuda.is_available() and not args.cpu
if use_cuda:
torch.cuda.set_device(args.device_id)
if args.distributed_world_size > 1:
data_parallel_world_size = distributed_utils.get_data_parallel_world_size()
data_parallel_rank = distributed_utils.get_data_parallel_rank()
else:
data_parallel_world_size = 1
data_parallel_rank = 0
overrides = ast.literal_eval(args.model_overrides)
logger.info("loading model(s) from {}".format(args.path))
state = checkpoint_utils.load_checkpoint_to_cpu(args.path)
task = tasks.setup_task(args)
model = task.build_model(args)
if args.task == "G2G":
state = G2G_weight_reload(state)
model.load_state_dict(state["model"], strict=False)
# Move models to GPU
# for model in models:
# if use_fp16:
# model.half()
# if use_cuda:
# model.cuda()
if use_fp16:
model = model.half()
if use_cuda:
model.cuda()
# Print args
logger.info(args)
# Build loss
loss = task.build_loss(args)
loss.eval()
model.eval()
logger.info(model)
logger.info("task: {}".format(task.__class__.__name__))
logger.info("model: {}".format(model.__class__.__name__))
logger.info("loss: {}".format(loss.__class__.__name__))
logger.info(
"num. model params: {:,} (num. trained: {:,})".format(
sum(getattr(p, "_orig_size", p).numel() for p in model.parameters()),
sum(
getattr(p, "_orig_size", p).numel()
for p in model.parameters()
if p.requires_grad
),
)
)
for subset in args.valid_subset.split(","):
try:
task.load_dataset(subset, combine=False, epoch=1, task_cfg=args.task)
dataset = task.dataset(subset)
except KeyError:
raise Exception("Cannot find dataset: " + subset)
if not os.path.exists(args.results_path):
try:
os.makedirs(args.results_path)
except:
pass
# Initialize data iterator
itr = task.get_batch_iterator(
dataset=dataset,
batch_size=args.batch_size,
ignore_invalid_inputs=True,
seed=args.seed,
num_shards=data_parallel_world_size,
shard_id=data_parallel_rank,
num_workers=args.num_workers,
data_buffer_size=args.data_buffer_size,
).next_epoch_itr(shuffle=False)
progress = progress_bar.progress_bar(
itr,
log_format=args.log_format,
log_interval=args.log_interval,
prefix=f"valid on '{subset}' subset",
default_log_format=("tqdm" if not args.no_progress_bar else "simple"),
)
np.random.seed(args.seed)
# utils.set_torch_seed(args.seed)
if args.task == "masked_lm":
raise
log_outputs = []
for i, sample in enumerate(progress):
sample = utils.move_to_cuda(sample) if use_cuda else sample
if len(sample) == 0:
continue
_loss, _sample_size, log_output = task.test_step(
args, sample, model, loss
)
progress.log(log_output, step=i)
log_outputs.append(log_output)
if data_parallel_world_size > 1:
log_outputs = distributed_utils.all_gather_list(
log_outputs,
max_size=args.all_gather_list_size,
group=distributed_utils.get_data_parallel_group(),
)
log_outputs = list(chain.from_iterable(log_outputs))
with metrics.aggregate() as agg:
task.reduce_metrics(log_outputs, loss)
log_output = agg.get_smoothed_values()
progress.print(log_output, tag=subset, step=i)
else:
print(
"test beam search params: ",
args.search_strategies,
args.beam_size,
args.len_penalty,
args.temperature,
)
if args.bpe_tokenizer_path == "none":
dictionary = task.dictionary
else:
dictionary = task.infer_dictionary
if args.search_strategies == "SequenceGeneratorBeamSearch_test":
search_strategy = None
generator = SequenceGeneratorBeamSearch(
[model],
dictionary,
beam_size=args.beam_size,
len_penalty=args.len_penalty,
max_len_b=args.max_seq_len - 1,
search_strategy = search_strategy,
eos = dictionary.index('[SEP2]'),
)
infer_beam_size_list = [5,2,1,1,1]
generator2 = []
for size in infer_beam_size_list:
model_tmp = SequenceGeneratorBeamSearch(
[model],
dictionary,
beam_size=size,
len_penalty=args.len_penalty,
max_len_b=args.max_seq_len - 1,
search_strategy = search_strategy,
)
generator2.append(model_tmp)
elif args.search_strategies == "SequenceGeneratorBeamSearch":
search_strategy = None
generator = SequenceGeneratorBeamSearch(
[model],
dictionary,
beam_size=args.beam_size,
len_penalty=args.len_penalty,
max_len_b=args.max_seq_len - 1,
search_strategy = search_strategy,
# normalize_scores=False
)
elif args.search_strategies == "SimpleGenerator":
generator = SimpleGenerator(
model,
dictionary,
beam_size=args.beam_size,
len_penalty=args.len_penalty,
max_seq_len=args.max_seq_len - 1,
args=args,
)
elif args.search_strategies == "GreedyGenerator":
generator = GreedyGenerator(
model, dictionary, beam_size=args.beam_size
)
log_outputs = []
for i, sample in enumerate(progress):
sample = utils.move_to_cuda(sample) if use_cuda else sample
if len(sample) == 0:
continue
if args.search_strategies == "SequenceGeneratorBeamSearch_test":
pred, log_output = task.test_step(
args, sample, generator, loss, i, args.seed,
second_beam_size = args.beam_size_second,
second_token_size=args.beam_head_second,
model2 = generator2
)
else:
pred, log_output = task.test_step(
args, sample, generator, loss, i, args.seed
)
progress.log(log_output, step=i)
log_outputs.append(log_output)
if data_parallel_world_size > 1:
log_outputs = distributed_utils.all_gather_list(
log_outputs,
max_size=450000000,
group=distributed_utils.get_data_parallel_group(),
)
log_outputs = list(chain.from_iterable(log_outputs))
with metrics.aggregate() as agg:
task.reduce_metrics(log_outputs, loss)
log_output = agg.get_smoothed_values()
progress.print(log_output, tag=subset, step=i)
def cli_main():
parser = options.get_validation_parser()
add_search_strategies_args(parser)
options.add_model_args(parser)
args = options.parse_args_and_arch(parser)
args = save_config.read_config(args)
distributed_utils.call_main(args, main)
if __name__ == "__main__":
cli_main()
| 9,986 | Python | .py | 244 | 28.840164 | 83 | 0.560395 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,189 | data_utils.py | dptech-corp_NAG2G/NAG2G/data/data_utils.py | try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
import logging
import numpy as np
logger = logging.getLogger(__name__)
def collate_cross_2d(
values,
pad_idx,
left_pad=False,
pad_to_length=None,
pad_to_multiple=1,
):
"""Convert a list of 2d tensors into a padded 2d tensor."""
size_h = max(v.size(0) for v in values)
size_w = max(v.size(1) for v in values)
if pad_to_multiple != 1 and size_h % pad_to_multiple != 0:
size_h = int(((size_h - 0.1) // pad_to_multiple + 1) * pad_to_multiple)
if pad_to_multiple != 1 and size_w % pad_to_multiple != 0:
size_w = int(((size_w - 0.1) // pad_to_multiple + 1) * pad_to_multiple)
res = values[0].new(len(values), size_h, size_w).fill_(pad_idx)
def copy_tensor(src, dst):
assert dst.numel() == src.numel()
dst.copy_(src)
for i, v in enumerate(values):
copy_tensor(v, res[i][size_h - v.size(0):, size_w - v.size(1):]
if left_pad else res[i][:v.size(0), :v.size(1)])
return res
def collate_tokens_coords(
values,
pad_idx,
left_pad=False,
pad_to_length=None,
pad_to_multiple=1,
):
"""Convert a list of 1d tensors into a padded 2d tensor."""
size = max(v.size(0) for v in values)
size = size if pad_to_length is None else max(size, pad_to_length)
if pad_to_multiple != 1 and size % pad_to_multiple != 0:
size = int(((size - 0.1) // pad_to_multiple + 1) * pad_to_multiple)
res = values[0].new(len(values), size, 3).fill_(pad_idx)
def copy_tensor(src, dst):
assert dst.numel() == src.numel()
dst.copy_(src)
for i, v in enumerate(values):
copy_tensor(v, res[i][size - len(v):, :]
if left_pad else res[i][: len(v), :])
return res
def collect_filtered(function, iterable, filtered):
"""
Similar to :func:`filter` but collects filtered elements in ``filtered``.
Args:
function (callable): function that returns ``False`` for elements that
should be filtered
iterable (iterable): iterable to filter
filtered (list): list to store filtered elements
"""
for el in iterable:
if function(el):
yield el
else:
filtered.append(el)
def batch_by_dynamic(
indices,
num_tokens_fn,
num_tokens_vec=None,
max_tokens=None,
max_sentences=None,
required_batch_size_multiple=1,
fixed_shapes=None,
):
"""
Yield mini-batches of indices bucketed by size. Batches may contain
sequences of different lengths.
Args:
indices (List[int]): ordered list of dataset indices
num_tokens_fn (callable): function that returns the number of tokens at
a given index
num_tokens_vec (List[int], optional): precomputed vector of the number
of tokens for each index in indices (to enable faster batch generation)
max_tokens (int, optional): max number of tokens in each batch
(default: None).
max_sentences (int, optional): max number of sentences in each
batch (default: None).
required_batch_size_multiple (int, optional): require batch size to
be less than N or a multiple of N (default: 1).
fixed_shapes (List[Tuple[int, int]], optional): if given, batches will
only be created with the given shapes. *max_sentences* and
*required_batch_size_multiple* will be ignored (default: None).
"""
# added int() to avoid TypeError: an integer is required
max_tokens = int(max_tokens) if max_tokens is not None else -1
max_sentences = max_sentences if max_sentences is not None else -1
bsz_mult = required_batch_size_multiple
if not isinstance(indices, np.ndarray):
indices = np.fromiter(indices, dtype=np.int64, count=-1)
if num_tokens_vec is not None and not isinstance(num_tokens_vec, np.ndarray):
num_tokens_vec = np.fromiter(num_tokens_vec, dtype=np.int64, count=-1)
if fixed_shapes is None:
if num_tokens_vec is None:
return batch_by_size_fn(
indices,
num_tokens_fn,
max_tokens,
max_sentences,
bsz_mult,
)
else:
return batch_by_size_vec(
indices,
num_tokens_vec,
max_tokens,
max_sentences,
bsz_mult,
)
else:
fixed_shapes = np.array(fixed_shapes, dtype=np.int64)
sort_order = np.lexsort(
[
fixed_shapes[:, 1].argsort(), # length
fixed_shapes[:, 0].argsort(), # bsz
]
)
fixed_shapes_sorted = fixed_shapes[sort_order]
return batch_fixed_shapes_fast(indices, num_tokens_fn, fixed_shapes_sorted)
def batch_by_size_vec(
indices,
num_tokens_vec,
max_tokens,
max_sentences,
bsz_mult,
):
if indices.shape[0] == 0:
return []
assert max_tokens <= 0 or np.max(num_tokens_vec) <= max_tokens, (
f"Sentences lengths should not exceed max_tokens={max_tokens}"
)
indices_len = indices.shape[0]
batches_ends = np.zeros(indices_len, dtype=np.int32)
batches_ends_view = batches_ends
num_tokens_view = num_tokens_vec
pos = 0
new_batch_end = 0
new_batch_max_tokens = 0
new_batch_sentences = 0
new_batch_num_tokens = 0
overflow = False
size_matches_with_bsz_mult = False
batches_count = 0
batch_start = 0
tail_max_tokens = 0
batch_max_tokens = 0
for pos in range(indices_len):
# At every pos we keep stats about the last complete batch [batch_start:batch_end),
# and tail [batch_end:pos].
# 1) Every time when (batch + tail) forms a valid batch
# (according to max_tokens, max_sentences and bsz_mult) we append tail to batch.
# 2) When (batch+tail) violates max_tokens or max_sentences constraints
# we finalize running batch, and tail becomes a new batch.
# 3) There is a corner case when tail also violates constraints.
# In that situation [batch_end:pos-1] (tail without the current pos)
# gets added to the finalized batches, while [pos:pos] becomes a new tail.
#
# Important: For the sake of performance try to avoid using function calls within this loop.
tail_max_tokens = tail_max_tokens \
if tail_max_tokens > num_tokens_view[pos] \
else num_tokens_view[pos]
new_batch_end = pos + 1
new_batch_max_tokens = batch_max_tokens \
if batch_max_tokens > tail_max_tokens \
else tail_max_tokens
new_batch_sentences = new_batch_end - batch_start
new_batch_num_tokens = new_batch_sentences * new_batch_max_tokens
overflow = (new_batch_sentences > max_sentences > 0 or
new_batch_num_tokens > max_tokens > 0)
size_matches_with_bsz_mult = (new_batch_sentences < bsz_mult or
new_batch_sentences % bsz_mult == 0)
if overflow:
tail_num_tokens = tail_max_tokens * \
(new_batch_end - batches_ends_view[batches_count])
tail_overflow = tail_num_tokens > max_tokens > 0
# In case of a tail overflow finalize two batches
if tail_overflow:
batches_count += 1
batches_ends_view[batches_count] = pos
tail_max_tokens = num_tokens_view[pos]
batch_start = batches_ends_view[batches_count]
batches_count += 1
new_batch_max_tokens = tail_max_tokens
if overflow or size_matches_with_bsz_mult:
batches_ends_view[batches_count] = new_batch_end
batch_max_tokens = new_batch_max_tokens
tail_max_tokens = 0
if batches_ends_view[batches_count] != indices_len:
batches_count += 1
# Memory and time-efficient split
return np.split(indices, batches_ends[:batches_count])
# 先忽略这里
def _filter_by_size_dynamic(indices, size_fn, max_positions, raise_exception=False):
def compare_leq(a, b):
return a <= b if not isinstance(a, tuple) else max(a) <= b
def check_size(idx):
if isinstance(max_positions, float) or isinstance(max_positions, int):
return size_fn(idx) <= max_positions
elif isinstance(max_positions, dict):
idx_size = size_fn(idx)
assert isinstance(idx_size, dict)
intersect_keys = set(max_positions.keys()) & set(idx_size.keys())
return all(
all(
a is None or b is None or a <= b
for a, b in zip(idx_size[key], max_positions[key])
)
for key in intersect_keys
)
else:
# For MultiCorpusSampledDataset, will generalize it later
if not isinstance(size_fn(idx), Iterable):
return all(size_fn(idx) <= b for b in max_positions)
return all(
a is None or b is None or a <= b
for a, b in zip(size_fn(idx), max_positions)
)
ignored = []
itr = collect_filtered(check_size, indices, ignored)
indices = np.fromiter(itr, dtype=np.int64, count=-1)
return indices, ignored
def batch_by_size_fn(
indices,
num_tokens_fn,
max_tokens,
max_sentences,
bsz_mult,
):
indices_len = indices.shape[0]
num_tokens_vec = np.zeros(indices_len, dtype=np.int64)
indices_view = indices
num_tokens_vec_view = num_tokens_vec
for pos in range(indices_len):
num_tokens_vec[pos] = num_tokens_fn(indices_view[pos])
return batch_by_size_vec(indices, num_tokens_vec, max_tokens,
max_sentences, bsz_mult,)
def _find_valid_shape(
shapes_view,
num_sentences,
num_tokens,
):
"""Return index of first valid shape of -1 if none is found."""
for i in range(shapes_view.shape[0]):
if num_sentences <= shapes_view[i][0] and num_tokens <= shapes_view[i][1]:
return i
return -1
def batch_fixed_shapes_fast(
indices,
num_tokens_fn,
fixed_shapes_sorted,
):
sample_len = 0
sample_lens = []
batch = []
batches = []
indices_view = indices
shapes_view = fixed_shapes_sorted
for i in range(len(indices_view)):
idx = indices_view[i]
num_tokens = num_tokens_fn(idx)
sample_lens.append(num_tokens)
sample_len = max(sample_len, num_tokens)
shape_idx = _find_valid_shape(shapes_view, len(batch) + 1, sample_len)
if shape_idx == -1:
batches.append(batch)
batch = []
sample_lens = []
sample_len = 0
shapes_view = fixed_shapes_sorted
elif shape_idx > 0:
# small optimization for the next call to _find_valid_shape
shapes_view = shapes_view[shape_idx:]
batch.append(idx)
if len(batch) > 0:
batches.append(batch)
return batches
| 11,227 | Python | .py | 283 | 31.045936 | 100 | 0.604463 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,190 | pad_dataset_3d.py | dptech-corp_NAG2G/NAG2G/data/pad_dataset_3d.py | from unicore.data import BaseWrapperDataset
def collate_tokens_3d(
values,
pad_idx,
left_pad=False,
pad_to_length=None,
pad_to_multiple=1,
):
"""Convert a list of 1d tensors into a padded 2d tensor."""
size = max(v.size(0) for v in values)
size = size if pad_to_length is None else max(size, pad_to_length)
if pad_to_multiple != 1 and size % pad_to_multiple != 0:
size = int(((size - 0.1) // pad_to_multiple + 1) * pad_to_multiple)
res = values[0].new(len(values), size, size, values[0].size(-1)).fill_(pad_idx)
def copy_tensor(src, dst):
assert dst.numel() == src.numel()
dst.copy_(src)
for i, v in enumerate(values):
copy_tensor(
v,
res[i][size - len(v) :, size - len(v) :]
if left_pad
else res[i][: len(v), : len(v)],
)
return res
class RightPadDataset3D(BaseWrapperDataset):
def __init__(self, dataset, pad_idx, left_pad=False):
super().__init__(dataset)
self.pad_idx = pad_idx
self.left_pad = left_pad
def collater(self, samples):
return collate_tokens_3d(
samples, self.pad_idx, left_pad=self.left_pad, pad_to_multiple=8
)
| 1,236 | Python | .py | 34 | 29.264706 | 83 | 0.594979 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,191 | random_smiles_dataset.py | dptech-corp_NAG2G/NAG2G/data/random_smiles_dataset.py | # Copyright (c) DP Techonology, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from unittest.mock import NonCallableMagicMock
from functools import lru_cache
from unicore.data import BaseWrapperDataset
from rdkit import Chem
import random
import numpy as np
import logging
logger = logging.getLogger(__name__)
class RandomSmilesDataset(BaseWrapperDataset):
def __init__(self, dataset, prob=1.0):
super().__init__(dataset)
self.prob = prob
def get_random_smiles(self, smi):
if self.prob == 0 or random.random() >= self.prob:
return smi
try:
mol = Chem.MolFromSmiles(smi)
if mol is None:
return smi
return Chem.MolToSmiles(mol, doRandom=True)
except:
return smi
@lru_cache(maxsize=16)
def __getitem__(self, idx):
result = self.dataset[idx]
if isinstance(result, list):
smi = ".".join([self.get_random_smiles(i) for i in self.dataset[idx]])
elif isinstance(result, str):
smi = [self.get_random_smiles(i) for i in result.split(".") if i != ""]
random.shuffle(smi)
smi = ".".join(smi)
else:
raise
return smi
class ReorderSmilesDataset(BaseWrapperDataset):
def __init__(self, product_dataset, reactant_dataset):
super().__init__(product_dataset)
self.reactant_dataset = reactant_dataset
self.set_epoch(None)
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.reactant_dataset.set_epoch(epoch)
self.epoch = epoch
def get_map(self, smi):
c_mol = Chem.MolFromSmiles(smi)
c_id_list = [atom.GetAtomMapNum() for atom in c_mol.GetAtoms()]
return c_id_list, c_mol
def get_list(self, atoms_map_product, atoms_map_reactant):
atoms_map_reactant_dict = {
atoms_map_reactant[i]: i for i in range(len(atoms_map_reactant))
}
tmp = np.array([atoms_map_reactant_dict[i] for i in atoms_map_product])
orders = np.array([i for i in range(len(atoms_map_reactant))])
mask = np.array(atoms_map_reactant) != 0
list_reactant = np.concatenate([tmp, orders[~mask]], 0).tolist()
return list_reactant
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
product = self.dataset[index]
reactant = self.reactant_dataset[index]
product_map, _ = self.get_map(product)
reactant_map, reactant_mol = self.get_map(reactant)
list_reactant = self.get_list(product_map, reactant_map)
nm = Chem.RenumberAtoms(reactant_mol, list_reactant)
return Chem.MolToSmiles(nm, canonical=False)
| 2,984 | Python | .py | 72 | 33.652778 | 83 | 0.639724 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,192 | collator.py | dptech-corp_NAG2G/NAG2G/data/collator.py | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import torch
def pad_1d_unsqueeze(x, padlen):
x = x + 1 # pad id = 0
xlen = x.size(0)
if xlen < padlen:
new_x = x.new_zeros([padlen], dtype=x.dtype)
new_x[:xlen] = x
x = new_x
return x.unsqueeze(0)
def pad_2d_unsqueeze(x, padlen):
x = x + 1 # pad id = 0
xlen, xdim = x.size()
if xlen < padlen:
new_x = x.new_zeros([padlen, xdim], dtype=x.dtype)
new_x[:xlen, :] = x
x = new_x
return x.unsqueeze(0)
def pad_attn_bias_unsqueeze(x, padlen):
xlen = x.size(0)
if xlen < padlen:
new_x = x.new_zeros([padlen, padlen], dtype=x.dtype).fill_(float("-inf"))
new_x[:xlen, :xlen] = x
new_x[xlen:, :xlen] = 0
x = new_x
return x.unsqueeze(0)
def pad_edge_type_unsqueeze(x, padlen):
xlen = x.size(0)
if xlen < padlen:
new_x = x.new_zeros([padlen, padlen, x.size(-1)], dtype=x.dtype)
new_x[:xlen, :xlen, :] = x
x = new_x
return x.unsqueeze(0)
def pad_spatial_pos_unsqueeze(x, padlen):
x = x + 1
xlen = x.size(0)
if xlen < padlen:
new_x = x.new_zeros([padlen, padlen], dtype=x.dtype)
new_x[:xlen, :xlen] = x
x = new_x
return x.unsqueeze(0)
def pad_3d_unsqueeze(x, padlen1, padlen2, padlen3):
x = x + 1
xlen1, xlen2, xlen3, xlen4 = x.size()
if xlen1 < padlen1 or xlen2 < padlen2 or xlen3 < padlen3:
new_x = x.new_zeros([padlen1, padlen2, padlen3, xlen4], dtype=x.dtype)
new_x[:xlen1, :xlen2, :xlen3, :] = x
x = new_x
return x.unsqueeze(0)
def pad_pos_unsqueeze(x, padlen):
xlen, xdim = x.size()
if xlen < padlen:
new_x = x.new_zeros([padlen, xdim], dtype=x.dtype)
new_x[:xlen, :] = x
x = new_x
return x.unsqueeze(0) | 1,861 | Python | .py | 57 | 26.561404 | 81 | 0.573743 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,193 | pad_dataset.py | dptech-corp_NAG2G/NAG2G/data/pad_dataset.py | # Copyright (c) DP Techonology, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from . import data_utils
from unicore.data import BaseWrapperDataset
class RightPadDatasetCoord(BaseWrapperDataset):
def __init__(self, dataset, pad_idx, left_pad=False):
super().__init__(dataset)
self.pad_idx = pad_idx
self.left_pad = left_pad
def collater(self, samples):
return data_utils.collate_tokens_coords(samples, self.pad_idx, left_pad=self.left_pad, pad_to_multiple=8)
class RightPadDatasetCross2D(BaseWrapperDataset):
def __init__(self, dataset, pad_idx, left_pad=False):
super().__init__(dataset)
self.pad_idx = pad_idx
self.left_pad = left_pad
def collater(self, samples):
return data_utils.collate_cross_2d(samples, self.pad_idx, left_pad=self.left_pad, pad_to_multiple=8)
| 959 | Python | .py | 20 | 42.55 | 113 | 0.711063 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,194 | coord_noise_dataset.py | dptech-corp_NAG2G/NAG2G/data/coord_noise_dataset.py |
from functools import lru_cache
import numpy as np
import torch
from unicore.data import Dictionary
from unicore.data import BaseWrapperDataset
from . import data_utils
def kabsch_rotation(P, Q):
C = P.transpose(-1, -2) @ Q
V, _, W = np.linalg.svd(C)
d = (np.linalg.det(V) * np.linalg.det(W)) < 0.0
if d:
V[:, -1] = -V[:, -1]
U = V @ W
return U
def get_optimal_transform(src_atoms, tgt_atoms):
src_center = src_atoms.mean(-2)[None, :]
tgt_center = tgt_atoms.mean(-2)[None, :]
r = kabsch_rotation(src_atoms - src_center, tgt_atoms - tgt_center)
x = tgt_center - src_center @ r
return r, x
class CoordNoiseDataset(BaseWrapperDataset):
def __init__(
self,
dataset: torch.utils.data.Dataset,
tgt_dataset: torch.utils.data.Dataset,
coord_gen_prob: float,
coord_noise_prob: float,
src_noise: float = 1.0,
tgt_noise: float = 1.0,
seed: int = 1,
):
assert 0.0 <= coord_noise_prob <= 1.0
self.dataset = dataset
self.tgt_dataset = tgt_dataset
self.coord_gen_prob = coord_gen_prob
self.coord_noise_prob = coord_noise_prob
self.seed = seed
self.src_noise = src_noise
self.tgt_noise = tgt_noise
self.epoch = None
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
src_coord = self.dataset[index]
tgt_coord = self.tgt_dataset[index]
num_atoms = src_coord.shape[0]
with data_utils.numpy_seed(self.seed, epoch, index):
if np.random.rand() < self.coord_gen_prob:
src_coord = np.copy(src_coord)
noise = self.src_noise
else:
src_coord = np.copy(tgt_coord)
noise = self.tgt_noise
if np.random.rand() < self.coord_noise_prob:
src_coord = src_coord + np.random.randn(num_atoms, 3) * noise
R, T = get_optimal_transform(src_coord, tgt_coord)
src_coord = src_coord @ R + T
return {"coordinates": src_coord.astype(np.float32)}
| 2,363 | Python | .py | 63 | 29.539683 | 77 | 0.599034 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,195 | customized_unicore_dataset.py | dptech-corp_NAG2G/NAG2G/data/customized_unicore_dataset.py | from unicore.data import UnicoreDataset
from . import data_utils
import numpy as np
class CustomizedUnicoreDataset(UnicoreDataset):
@staticmethod
def get_batch_shapes(self):
"""
Return a list of valid batch shapes, for example::
[(8, 512), (16, 256), (32, 128)]
The first dimension of each tuple is the batch size and can be ``None``
to automatically infer the max batch size based on ``--max-tokens``.
The second dimension of each tuple is the max supported length as given
by :func:`fairseq.data.FairseqDataset.num_tokens`.
This will be used by :func:`fairseq.data.FairseqDataset.batch_by_size`
to restrict batch shapes. This is useful on TPUs to avoid too many
dynamic shapes (and recompilations).
"""
return None
@staticmethod
def filter_indices_by_size(self, indices, max_sizes):
"""
Filter a list of sample indices. Remove those that are longer than
specified in *max_sizes*.
WARNING: don't update, override method in child classes
Args:
indices (np.array): original array of sample indices
max_sizes (int or list[int] or tuple[int]): max sample size,
can be defined separately for src and tgt (then list or tuple)
Returns:
np.array: filtered sample array
list: list of removed indices
"""
if isinstance(max_sizes, float) or isinstance(max_sizes, int):
if hasattr(self, "sizes") and isinstance(self.sizes, np.ndarray):
ignored = indices[self.sizes[indices] > max_sizes].tolist()
indices = indices[self.sizes[indices] <= max_sizes]
elif (
hasattr(self, "sizes")
and isinstance(self.sizes, list)
and len(self.sizes) == 1
# FFFFFFFF2
):
ignored = indices[self.sizes[0][indices] > max_sizes].tolist()
indices = indices[self.sizes[0][indices] <= max_sizes]
else:
indices, ignored = data_utils._filter_by_size_dynamic(
indices, self.size, max_sizes
)
else:
indices, ignored = data_utils._filter_by_size_dynamic(
indices, self.size, max_sizes
)
return indices, ignored
@staticmethod
def batch_by_size_dynamic(
self,
indices,
num_tokens_fn,
num_tokens_vec,
max_tokens=None,
max_sentences=None,
required_batch_size_multiple=1,
):
"""
Given an ordered set of indices, return batches according to
*max_tokens*, *max_sentences* and *required_batch_size_multiple*.
"""
fixed_shapes = CustomizedUnicoreDataset.get_batch_shapes(self)
# FFFFFF 1
if fixed_shapes is not None:
def adjust_bsz(bsz, num_tokens):
if bsz is None:
assert max_tokens is not None, "Must specify --max-tokens"
bsz = max_tokens // num_tokens
if max_sentences is not None:
bsz = min(bsz, max_sentences)
elif (
bsz >= required_batch_size_multiple
and bsz % required_batch_size_multiple != 0
):
bsz -= bsz % required_batch_size_multiple
return bsz
fixed_shapes = np.array(
[
[adjust_bsz(bsz, num_tokens), num_tokens]
for (bsz, num_tokens) in fixed_shapes
]
)
try:
num_tokens_vec = num_tokens_vec.astype("int64")
except NotImplementedError:
num_tokens_vec = None
return data_utils.batch_by_dynamic(
indices,
num_tokens_fn=num_tokens_fn,
num_tokens_vec=num_tokens_vec,
max_tokens=max_tokens,
max_sentences=max_sentences,
required_batch_size_multiple=required_batch_size_multiple,
fixed_shapes=fixed_shapes,
)
| 4,176 | Python | .py | 101 | 29.356436 | 79 | 0.568512 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,196 | bart_token_dataset.py | dptech-corp_NAG2G/NAG2G/data/bart_token_dataset.py | # Copyright (c) DP Techonology, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
from functools import lru_cache
from unicore.data import BaseWrapperDataset
from transformers import BartTokenizerFast
import logging
logger = logging.getLogger(__name__)
class BartTokenDataset(BaseWrapperDataset):
def __init__(self, dataset, dict_path, max_seq_len: int = 512):
self.dataset = dataset
self.tokenizer = BartTokenizerFast.from_pretrained(dict_path)
self.max_seq_len = max_seq_len
def __len__(self):
return len(self.dataset)
@lru_cache(maxsize=16)
def __getitem__(self, idx):
tmp = self.dataset[idx]
output = self.tokenizer(tmp)["input_ids"]
assert len(output) < self.max_seq_len and len(output) > 2
return torch.LongTensor(output)
| 928 | Python | .py | 23 | 35.608696 | 69 | 0.717464 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,197 | mask_points_dataset.py | dptech-corp_NAG2G/NAG2G/data/mask_points_dataset.py | from functools import lru_cache
import numpy as np
import torch
from unicore.data import Dictionary, data_utils
from scipy.spatial import distance_matrix
from unicore.data import LRUCacheDataset, BaseWrapperDataset
class MaskPointsDataset(BaseWrapperDataset):
def __init__(
self,
dataset: torch.utils.data.Dataset,
coord_dataset: torch.utils.data.Dataset,
vocab: Dictionary,
pad_idx: int,
mask_idx: int,
noise_type: str,
noise: float = 1.0,
seed: int = 1,
mask_prob: float = 0.15,
leave_unmasked_prob: float = 0.1,
random_token_prob: float = 0.1,
):
assert 0.0 < mask_prob < 1.0
assert 0.0 <= random_token_prob <= 1.0
assert 0.0 <= leave_unmasked_prob <= 1.0
assert random_token_prob + leave_unmasked_prob <= 1.0
self.dataset = dataset
self.coord_dataset = coord_dataset
self.vocab = vocab
self.pad_idx = pad_idx
self.mask_idx = mask_idx
self.noise_type = noise_type
self.noise = noise
self.seed = seed
self.mask_prob = mask_prob
self.leave_unmasked_prob = leave_unmasked_prob
self.random_token_prob = random_token_prob
if random_token_prob > 0.0:
weights = np.ones(len(self.vocab))
weights[vocab.special_index()] = 0
self.weights = weights / weights.sum()
self.epoch = None
if self.noise_type == 'trunc_normal':
self.noise_f = lambda num_mask: np.clip(np.random.randn(
num_mask, 3) * self.noise, a_min=-self.noise*2.0, a_max=self.noise*2.0)
elif self.noise_type == 'normal':
self.noise_f = lambda num_mask: np.random.randn(
num_mask, 3) * self.noise
elif self.noise_type == 'uniform':
self.noise_f = lambda num_mask: np.random.uniform(
low=-self.noise, high=self.noise, size=(num_mask, 3))
else:
self.noise_f = lambda num_mask: 0.0
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.coord_dataset.set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
ret = {}
with data_utils.numpy_seed(self.seed, epoch, index):
item = self.dataset[index]
coord = self.coord_dataset[index]
sz = len(item)
# don't allow empty sequence
assert sz > 0
# decide elements to mask
num_mask = int(
# add a random number for probabilistic rounding
self.mask_prob * sz + np.random.rand()
)
mask_idc = np.random.choice(sz, num_mask, replace=False)
mask = np.full(sz, False)
mask[mask_idc] = True
ret['targets'] = np.full(len(mask), self.pad_idx)
ret['targets'][mask] = item[mask]
ret['targets'] = torch.from_numpy(ret['targets']).long()
# decide unmasking and random replacement
rand_or_unmask_prob = self.random_token_prob + self.leave_unmasked_prob
if rand_or_unmask_prob > 0.0:
rand_or_unmask = mask & (
np.random.rand(sz) < rand_or_unmask_prob)
if self.random_token_prob == 0.0:
unmask = rand_or_unmask
rand_mask = None
elif self.leave_unmasked_prob == 0.0:
unmask = None
rand_mask = rand_or_unmask
else:
unmask_prob = self.leave_unmasked_prob / rand_or_unmask_prob
decision = np.random.rand(sz) < unmask_prob
unmask = rand_or_unmask & decision
rand_mask = rand_or_unmask & (~decision)
else:
unmask = rand_mask = None
if unmask is not None:
mask = mask ^ unmask
new_item = np.copy(item)
new_item[mask] = self.mask_idx
num_mask = mask.astype(np.int32).sum()
new_coord = np.copy(coord)
new_coord[mask, :] += self.noise_f(num_mask)
if rand_mask is not None:
num_rand = rand_mask.sum()
if num_rand > 0:
new_item[rand_mask] = np.random.choice(
len(self.vocab),
num_rand,
p=self.weights,
)
ret['atoms'] = torch.from_numpy(new_item).long()
ret['coordinates'] = torch.from_numpy(new_coord).float()
return ret
class MaskPointsPocketDataset(BaseWrapperDataset):
def __init__(
self,
dataset: torch.utils.data.Dataset,
coord_dataset: torch.utils.data.Dataset,
residue_dataset: torch.utils.data.Dataset,
vocab: Dictionary,
pad_idx: int,
mask_idx: int,
noise_type: str,
noise: float = 1.0,
seed: int = 1,
mask_prob: float = 0.15,
leave_unmasked_prob: float = 0.1,
random_token_prob: float = 0.1,
):
assert 0.0 < mask_prob < 1.0
assert 0.0 <= random_token_prob <= 1.0
assert 0.0 <= leave_unmasked_prob <= 1.0
assert random_token_prob + leave_unmasked_prob <= 1.0
self.dataset = dataset
self.coord_dataset = coord_dataset
self.residue_dataset = residue_dataset
self.vocab = vocab
self.pad_idx = pad_idx
self.mask_idx = mask_idx
self.noise_type = noise_type
self.noise = noise
self.seed = seed
self.mask_prob = mask_prob
self.leave_unmasked_prob = leave_unmasked_prob
self.random_token_prob = random_token_prob
if random_token_prob > 0.0:
weights = np.ones(len(self.vocab))
weights[vocab.special_index()] = 0
self.weights = weights / weights.sum()
self.epoch = None
if self.noise_type == 'trunc_normal':
self.noise_f = lambda num_mask: np.clip(np.random.randn(
num_mask, 3) * self.noise, a_min=-self.noise*2.0, a_max=self.noise*2.0)
elif self.noise_type == 'normal':
self.noise_f = lambda num_mask: np.random.randn(
num_mask, 3) * self.noise
elif self.noise_type == 'uniform':
self.noise_f = lambda num_mask: np.random.uniform(
low=-self.noise, high=self.noise, size=(num_mask, 3))
else:
self.noise_f = lambda num_mask: 0.0
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.coord_dataset.set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
ret = {}
with data_utils.numpy_seed(self.seed, epoch, index):
item = self.dataset[index]
coord = self.coord_dataset[index]
residue = self.residue_dataset[index]
# print(item)
sz = len(item)
# don't allow empty sequence
assert sz > 0
# mask on the level of residues
# print(item.shape[0], coord.shape, residue.shape[0])
# assert item.shape[0] == residue.shape[0]
res_list = list(set(residue))
res_sz = len(res_list)
# decide elements to mask
num_mask = int(
# add a random number for probabilistic rounding
self.mask_prob * res_sz + np.random.rand()
)
# mask_idc = np.random.choice(sz, num_mask, replace=False)
mask_res = np.random.choice(
res_list, num_mask, replace=False).tolist()
mask = np.isin(residue, mask_res)
# mask = np.argwhere(np.isin(residue, mask_res))
# mask = np.full(sz, False)
# mask[mask_idc] = True
ret['targets'] = np.full(len(mask), self.pad_idx)
ret['targets'][mask] = item[mask]
ret['targets'] = torch.from_numpy(ret['targets']).long()
# decide unmasking and random replacement
rand_or_unmask_prob = self.random_token_prob + self.leave_unmasked_prob
if rand_or_unmask_prob > 0.0:
rand_or_unmask = mask & (
np.random.rand(sz) < rand_or_unmask_prob)
if self.random_token_prob == 0.0:
unmask = rand_or_unmask
rand_mask = None
elif self.leave_unmasked_prob == 0.0:
unmask = None
rand_mask = rand_or_unmask
else:
unmask_prob = self.leave_unmasked_prob / rand_or_unmask_prob
decision = np.random.rand(sz) < unmask_prob
unmask = rand_or_unmask & decision
rand_mask = rand_or_unmask & (~decision)
else:
unmask = rand_mask = None
if unmask is not None:
mask = mask ^ unmask
new_item = np.copy(item)
new_item[mask] = self.mask_idx
num_mask = mask.astype(np.int32).sum()
new_coord = np.copy(coord)
new_coord[mask, :] += self.noise_f(num_mask)
if rand_mask is not None:
num_rand = rand_mask.sum()
if num_rand > 0:
new_item[rand_mask] = np.random.choice(
len(self.vocab),
num_rand,
p=self.weights,
)
ret['atoms'] = torch.from_numpy(new_item).long()
ret['coordinates'] = torch.from_numpy(new_coord).float()
return ret
| 10,124 | Python | .py | 237 | 30.278481 | 87 | 0.539164 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,198 | bpe_tokenize_dataset.py | dptech-corp_NAG2G/NAG2G/data/bpe_tokenize_dataset.py | import os
import torch
from functools import lru_cache
from unicore.data import BaseWrapperDataset
from tokenizers import Tokenizer
from tokenizers.models import BPE
import logging
logger = logging.getLogger(__name__)
def tostring(tmp, shift=30000):
tmp = tmp.detach().cpu().numpy().tolist()
return "".join([chr(i + shift) for i in tmp])
def tostringaftsep2(tmp_origin, sep2_ids, shift=30000):
tmp = tmp_origin.detach().cpu().numpy().tolist()
if sep2_ids not in tmp:
return tmp_origin, None
else:
idx = tmp.index(sep2_ids)
if len(tmp) - 1 == idx:
# TODO: do not support using both sep2 and bpe token
return tmp_origin[:-1], None
else:
return tmp_origin[:idx], "".join([chr(i + shift) for i in tmp[idx + 1 :]])
class BpeTokenizeDataset(BaseWrapperDataset):
def __init__(self, dataset, tokenizer_path, flag_aftsep2):
super().__init__(dataset)
self.tokenizer = Tokenizer(
BPE.from_file(
os.path.join(tokenizer_path, "vocab.json"),
os.path.join(tokenizer_path, "merges.txt"),
)
)
self.flag_aftsep2 = flag_aftsep2
self.sep2_token_id = self.tokenizer.encode("ç˜").ids[0]
self.set_epoch(None)
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
if self.flag_aftsep2:
return self.forward_aftsep2(index)
else:
return self.forward_normal(index)
def forward_normal(self, index):
idx = self.dataset[index]
result = tostring(idx)
result = self.tokenizer.encode(result).ids
result = torch.tensor(result).long()
return result
def forward_aftsep2(self, index):
idx = self.dataset[index]
origin_tensor, result = tostringaftsep2(idx, self.sep2_token_id)
if result is None:
return origin_tensor
result = self.tokenizer.encode(result).ids
result = torch.tensor(result).long()
return torch.cat([origin_tensor, result], 0)
| 2,329 | Python | .py | 60 | 31.066667 | 86 | 0.628989 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
2,287,199 | graphormer_dataset.py | dptech-corp_NAG2G/NAG2G/data/graphormer_dataset.py | import random
import pandas as pd
from NAG2G.utils.graph_process import (
process_one,
shuffle_graph_process,
graph2seq_process,
)
from unicore.data import UnicoreDataset, BaseWrapperDataset
from functools import lru_cache
import numpy as np
class CsvGraphormerDataset(UnicoreDataset):
def __init__(self, path):
self.path = path
self.csv_file = pd.read_csv(self.path)
def __len__(self):
return len(self.csv_file)
@lru_cache(maxsize=16)
def __getitem__(self, idx):
return dict(self.csv_file.iloc[idx])
class SmilesDataset(BaseWrapperDataset):
def __init__(self, dataset):
super().__init__(dataset)
@lru_cache(maxsize=16)
def __getitem__(self, idx):
tmp_smiles = self.dataset[idx]
tmp_smiles = tmp_smiles.split(">")
reactant_smiles = tmp_smiles[0]
product_smiles = tmp_smiles[2]
return {
"reactant_smiles": reactant_smiles,
"product_smiles": product_smiles,
}
class SmilesDataset_2(BaseWrapperDataset):
def __init__(self, dataset):
super().__init__(dataset)
@lru_cache(maxsize=16)
def __getitem__(self, idx):
result = self.dataset[idx]
return {
"reactant_smiles": result["target"],
"product_smiles": result["input"],
}
class GraphormerDataset(BaseWrapperDataset):
def __init__(self, dataset):
super().__init__(dataset)
self.set_epoch(None)
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
smiles = self.dataset[index]
result = process_one(smiles)
return result
class ShuffleGraphormerDataset(BaseWrapperDataset):
def __init__(self, dataset):
super().__init__(dataset)
self.set_epoch(None)
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
result = self.dataset[index]
result = shuffle_graph_process(result)
return result
class ReorderGraphormerDataset(BaseWrapperDataset):
def __init__(self, product_dataset, reactant_dataset, align_base="product"):
super().__init__(product_dataset)
self.reactant_dataset = reactant_dataset
self.align_base = align_base
self.set_epoch(None)
def get_list(self, atoms_map_product, atoms_map_reactant):
if self.align_base == "reactant":
mask = atoms_map_reactant != 0
orders = np.array([i for i in range(len(atoms_map_reactant))])
list_reactant = np.concatenate([orders[mask], orders[~mask]], 0)
atoms_map_product_dict = {
atoms_map_product[i]: i for i in range(len(atoms_map_product))
}
list_product = [atoms_map_product_dict[i] for i in atoms_map_reactant[mask]]
elif self.align_base == "product":
list_product = None
atoms_map_reactant_dict = {
atoms_map_reactant[i]: i for i in range(len(atoms_map_reactant))
}
tmp = [atoms_map_reactant_dict[i] for i in atoms_map_product]
orders = np.array([i for i in range(len(atoms_map_reactant))])
mask = atoms_map_reactant != 0
list_reactant = np.concatenate([tmp, orders[~mask]], 0)
else:
raise
return list_product, list_reactant
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.reactant_dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
product = self.dataset[index]
reactant = self.reactant_dataset[index]
try:
list_product, list_reactant = self.get_list(
product["atoms_map"], reactant["atoms_map"]
)
except:
raise
list_product, list_reactant = None, None
if list_product is not None:
product = shuffle_graph_process(product, list_=list_product)
if list_reactant is not None:
reactant = shuffle_graph_process(reactant, list_=list_reactant)
return {"reactant": reactant, "product": product}
class SeqGraphormerDataset(BaseWrapperDataset):
def __init__(
self,
dataset,
class_dataset,
min_node,
want_attn,
want_charge_h,
max_seq_len=None,
sumto2=True,
use_sep2=False,
want_h_degree=False,
idx_type=0,
charge_h_last=False,
):
super().__init__(dataset)
self.dataset = dataset
self.class_dataset = class_dataset
self.min_node = min_node
self.want_attn = want_attn
self.want_charge_h = want_charge_h
self.max_seq_len = max_seq_len
self.epoch = None
self.sumto2 = sumto2
self.use_sep2 = use_sep2
self.want_h_degree = want_h_degree
self.idx_type = idx_type
self.charge_h_last = charge_h_last
self.set_epoch(None)
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
if self.class_dataset is not None:
self.class_dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
result = self.dataset[index]
if self.class_dataset is not None:
class_idx = self.class_dataset[index]
else:
class_idx = None
result = graph2seq_process(
result,
class_idx,
self.min_node,
want_attn=self.want_attn,
want_charge_h=self.want_charge_h,
max_seq_len=self.max_seq_len,
sumto2=self.sumto2,
use_sep2=self.use_sep2,
want_h_degree=self.want_h_degree,
idx_type=self.idx_type,
charge_h_last=self.charge_h_last,
)
return result
class ReorderCoordDataset(BaseWrapperDataset):
def __init__(
self,
raw_coord_dataset,
map_coord_dataset,
product_dataset,
):
super().__init__(raw_coord_dataset)
self.product_dataset = product_dataset
self.map_coord_dataset = map_coord_dataset
self.set_epoch(None)
def set_epoch(self, epoch, **unused):
super().set_epoch(epoch)
self.dataset.set_epoch(epoch)
self.product_dataset.set_epoch(epoch)
self.map_coord_dataset.set_epoch(epoch)
self.epoch = epoch
def __getitem__(self, index: int):
return self.__getitem_cached__(self.epoch, index)
@lru_cache(maxsize=16)
def __getitem_cached__(self, epoch: int, index: int):
result = self.dataset[index]
old_map = self.map_coord_dataset[index]
new_map = self.product_dataset[index]["atoms_map"]
old_map_dict = {old_map[i]: i for i in range(len(old_map))}
orders = [old_map_dict[i] for i in new_map]
return result[orders]
| 7,755 | Python | .py | 207 | 28.806763 | 88 | 0.605512 | dptech-corp/NAG2G | 8 | 4 | 2 | GPL-3.0 | 9/5/2024, 10:48:09 PM (Europe/Amsterdam) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.