code
stringlengths
501
5.19M
package
stringlengths
2
81
path
stringlengths
9
304
filename
stringlengths
4
145
class WorkFlowMethods(object): @staticmethod def get_task_sig_queue_name(task_sig): queue_flag = "queue_flag" from celery import chord from celery import group from celery.canvas import Signature from celery.canvas import _chain if isinstance(task_sig, chord): # chord queue_name = task_sig.tasks[0].options.get(queue_flag) elif isinstance(task_sig, group) or isinstance(task_sig, _chain): # group, chain sig = task_sig.tasks[0] return WorkFlowMethods.get_task_sig_queue_name(sig) elif isinstance(task_sig, Signature): # signature queue_name = task_sig.options.get(queue_flag) else: return None return queue_name @staticmethod def merge_sig_list(sig_list): if not sig_list: return None if len(sig_list) > 1: from celery import group sig_gather = group(*tuple(sig_list)) return sig_gather else: return sig_list[0] @staticmethod def fill_res_task_id_list(res, task_id_list): if not res: return False from celery.result import GroupResult from celery.result import AsyncResult if isinstance(res, GroupResult): # GroupResult for res_ in res.results: task_id_list.append(res_.task_id) elif isinstance(res, AsyncResult): # AsyncResult task_id_list.append(res.task_id) else: return False if res.parent is not None: WorkFlowMethods.fill_res_task_id_list(res.parent, task_id_list) return True # region Signature Statistic max_sig_cnt = 0 @staticmethod def reset_max_sig_cnt(): WorkFlowMethods.max_sig_cnt = 0 @staticmethod def get_max_sig_cnt(): return WorkFlowMethods.max_sig_cnt @staticmethod def update_max_sig_cnt(cnt): WorkFlowMethods.max_sig_cnt = max(WorkFlowMethods.max_sig_cnt, cnt) @staticmethod def calculate_sig_cnt(sig): pass from celery import chord from celery.canvas import _chain from celery import group if isinstance(sig, chord): body_cnt = WorkFlowMethods.calculate_sig_cnt(sig.body) WorkFlowMethods.update_max_sig_cnt(body_cnt) return WorkFlowMethods.calculate_sig_cnt(sig.tasks) + body_cnt elif isinstance(sig, _chain): return WorkFlowMethods.calculate_sig_cnt(sig.tasks) elif isinstance(sig, group): cnt = WorkFlowMethods.calculate_sig_cnt(sig.tasks) WorkFlowMethods.update_max_sig_cnt(cnt) return cnt elif isinstance(sig, tuple): cnt = 0 for s in sig: cnt += WorkFlowMethods.calculate_sig_cnt(s) WorkFlowMethods.update_max_sig_cnt(cnt) return cnt elif isinstance(sig, list): cnt = 0 for s in sig: cnt += WorkFlowMethods.calculate_sig_cnt(s) return cnt else: return 1 # endregion # region make sig @staticmethod def link_signatures(sig_list): from celery import chain sig = chain(tuple(sig_list)) return sig @staticmethod def make_signature_batch( task_path, business_inst_name, kwargs_list=None, queue_name=None ): sig = None s_container = [] if kwargs_list: for kwargs in kwargs_list: s = WorkFlowMethods._make_signature( task_path, business_inst_name, **kwargs ) s_container.append(s) # 填充队列名 WorkFlowMethods._fill_sig_queue_name(s_container, queue_name) if not s_container: return sig if len(s_container) > 1: from celery import group sig = group(tuple(s_container)) WorkFlowMethods._fill_sig_queue_name(sig, queue_name) else: sig = s_container[0] return sig @staticmethod def _make_signature(task_path, business_inst_name, *args, **kwargs): exec(f"from {task_path} import {business_inst_name}") return eval(business_inst_name).s(*args, **kwargs) @staticmethod def _fill_sig_queue_name(sig_list, queue_name): from celery import group if queue_name is None: assert False from yyxx_game_pkg.stat.dispatch.common.common import get_queue_name real_queue_name = get_queue_name(queue_name) if isinstance(sig_list, list): for s in sig_list: s.options["queue"] = real_queue_name s.options["queue_flag"] = queue_name elif isinstance(sig_list, group): sig_list.options["queue"] = real_queue_name sig_list.options["queue_flag"] = queue_name # endregion # # if __name__ == '__main__': # from logic.celery_core.task_register import add # s = add.s(1, 1) # res = s.apply_async() # print res # # from celery import group # # g = group(s, s) # # c = WorkFlowMethods.link_sig_list([g, s, g, s, s]) # # res_g = c.apply_async() # # task_id_list = [] # # WorkFlowMethods.fill_res_task_id_list(res_g, task_id_list) # # print task_id_list
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/stat/dispatch/core/workflows.py
workflows.py
from yyxx_game_pkg.utils.xdate import split_date_str_by_day from yyxx_game_pkg.logger.log import root_log __schedule_file_path = "None" __api_addr = "http://localhost:8080" # region 内部方法 def _to_protocol_by_schedule( schedule, is_work_flow=False, custom_content=None, custom_queue=None ): proto_dict = dict() schedule_name = schedule.SCHEDULE_NAME instance_name = schedule.SCHEDULE_DISPATCH_RULE_INSTANCE_NAME queue_name = ( schedule.SCHEDULE_QUEUE_NAME if hasattr(schedule, "SCHEDULE_QUEUE_NAME") else None ) if custom_queue is not None: queue_name = custom_queue import copy content = copy.deepcopy(schedule.SCHEDULE_CONTENT) if custom_content: for c in content: if not isinstance(c, dict) or not isinstance(custom_content, dict): continue content_dict = c.get("custom_content") if not content_dict: c.update(custom_content) else: data_dict = custom_content.get("custom_content") content_dict.update(data_dict) proto_dict["SCHEDULE_NAME"] = schedule_name proto_dict["SCHEDULE_DISPATCH_RULE_INSTANCE_NAME"] = instance_name if queue_name: proto_dict["SCHEDULE_QUEUE_NAME"] = queue_name if is_work_flow: dict_rule = dict() for schedule_param in content: group = schedule_param.get("group") step = schedule_param.get("step") custom_content = schedule_param.get("custom_content") sub_schedule_name = schedule_param.get("schedule") if step is None or sub_schedule_name is None: continue if group is None: group = 1 if not dict_rule.get(group): dict_rule[group] = dict() if not dict_rule[group].get(step): dict_rule[group][step] = [] schedule_str = to_protocol( sub_schedule_name, custom_content=custom_content, custom_queue=queue_name, ) dict_rule[group][step].append(schedule_str) content = dict_rule proto_dict["SCHEDULE_CONTENT"] = content return proto_dict def _get_schedule(schedule_name): import importlib schedule_dir = "schedule" if schedule_name.find("@") > -1: schedule_name, schedule_dir = schedule_name.split("@") schedule = None is_work_flow = False try: module = f"{__schedule_file_path}.{schedule_dir}.statistic_task.{schedule_name}" schedule = importlib.import_module(module) is_work_flow = ( schedule.SCHEDULE_DISPATCH_RULE_INSTANCE_NAME.find("work_flow") >= 0 ) except Exception as e: try: module = f"{__schedule_file_path}.{schedule_dir}.work_flow.{schedule_name}" schedule = importlib.import_module(module) is_work_flow = True except Exception as e: root_log(e) return schedule, is_work_flow def _parse_proto_dict(proto_dict): process_proto_list = [] schedule_name = proto_dict.get("SCHEDULE_DISPATCH_RULE_INSTANCE_NAME") if schedule_name.find("work_flow") >= 0: schedule_content = proto_dict.get("SCHEDULE_CONTENT") step_schedule_content = schedule_content[1][1][0].get("SCHEDULE_CONTENT")[0] date_interval = step_schedule_content.get("day_interval") if date_interval and date_interval == "SPLIT_DATE_BY_DAY": date_appoint = step_schedule_content["date_appoint"] date_list = split_date_str_by_day(date_appoint[0], date_appoint[1]) for date_offset in date_list: content_k_v = dict() content_k_v["date_appoint"] = "" content_k_v["day_interval"] = date_offset _modify_proto_content(schedule_content, content_k_v) process_proto_list.append(proto_dict) else: process_proto_list.append(proto_dict) else: process_proto_list.append(proto_dict) return process_proto_list def _modify_proto_content(schedule_content, content_key_value): if not isinstance(schedule_content, dict) or not isinstance( content_key_value, dict ): return for content_dict in schedule_content.values(): # 替换工作流content的key, value for key, content_list in content_dict.items(): temp_list = [] for content in content_list: content = content for con_key, con_value in content_key_value.items(): if con_key == "SCHEDULE_QUEUE_NAME": content[con_key] = con_value else: s_content = content["SCHEDULE_CONTENT"] for c in s_content: c[con_key] = con_value temp_list.append(content) content_dict[key] = temp_list # endregion # region 外部方法 def set_config(path: str, api_addr: str): global __schedule_file_path, __api_addr __schedule_file_path = path __api_addr = api_addr def to_protocol(schedule_name, custom_content=None, custom_queue=None): schedule, is_work_flow = _get_schedule(schedule_name) if not schedule: return None return _to_protocol_by_schedule( schedule, is_work_flow, custom_content, custom_queue ) def process_proto(proto_dict): res_list = [] # 工作流切割 process_proto_list = _parse_proto_dict(proto_dict) for p in process_proto_list: res_list.append(p) return res_list def send(proto): import requests url = f"{__api_addr}/submit" post_data = {"content": proto} res = requests.post(json=post_data, url=url, timeout=600) return res # endregion
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/stat/submit/logic/submit_logic.py
submit_logic.py
from yyxx_game_pkg.center_api.sdk.recharge import BaseRecharge from .map_factor import MapRecharge class Recharge(MapRecharge, BaseRecharge): # 父类中核心代码,此处可删除 # def get_params(self, data) -> dict: # self.modify_params() # extra = data.get(self.params.extra, "") # if not extra: # return {} # # ext_ary = extra.split(",") # data_ary = {"extra": extra} # self.get_params_core(data, data_ary, ext_ary) # self.get_params_helper(data, data_ary) # # return data_ary def modify_params(self): """ 修改 self.params 属性 默认值: extra: str = "extra" cp_order_id: str = "billno" channel_order_id: str = "order_id" player_id: str = "role_id" channel_username: str = "openid" is_check_username: int = 1 is_test: int = 0 self.params.cp_order_id = "xxx" """ pass # 项目通用方法,一般不需要修改,直接使用父类方法,此处可删除 def get_params_core(self, data, data_ary, ext_ary) -> None: """ -------------------------------- 默认获取以下数据 data_ary["cp_order_id"] = data.get(self.params.cp_order_id, "") data_ary["channel_order_id"] = data.get(self.params.channel_order_id, "") data_ary["player_id"] = data.get(self.params.player_id) data_ary["is_check_username"] = self.params.is_check_username data_ary["channel_username"] = data.get(self.params.channel_username, "") if len(ext_ary) > 6: data_ary["recharge_id"] = int(ext_ary[5]) -------------------------------- """ super().get_params_core(data, data_ary, ext_ary) def get_params_helper(self, data, data_ary) -> None: """ 补充数据, 添加额外参数 对 get_params 中 data_ary 数据的补充 无法在 get_params_core 中通过通用方式获得的参数,在此处进行处理 -------------------------------- money 金额 real_money 实付金额 extra_gold 赠送元宝(渠道返利) extra_gold_bind 赠送绑元(渠道返利) pay_dt 充值时间(秒) -------------------------------- """ super().get_params_helper(data, data_ary) def make_sign_helper(self, values) -> (dict, str): """ ext_ary = values[self.extra_key].split(",") plat_code = ext_ary[0] game_channel_id = ext_ary[1] sdk_data = self.operator.get_key(plat_code, game_channel_id) pay_key = sdk_data.get("pay_key", "") return values, pay_key :param values: :return: values, pay_key -------------------------------- 如果对 values 或 pay_key 有调整,在此处修改 values, pay_key = super().make_sign_helper(values) ... (具体修改过程) return values, pay_key -------------------------------- """ return super().make_sign_helper(values) # 签名核心方法,此处可删除 # def make_sign(self, values, sign_key=None) -> str: # values, pay_key = self.make_sign_helper(values) # return self.channel_make_sign(values, pay_key) def channel_make_sign(self, values, sign_key) -> str: """ 默认签名方式为 md5(yyxx_game_pkg.crypto.basic.md5) post_data 中的键按照首字母升序排列 也可继承或重新 MapRecharge MapFactor 中的方法 :return: 签名字符串 """ return super().channel_make_sign(values, sign_key) def feedback(self, error_code, data: dict = None, msg="", *args, **kwargs): """ 根据需求 return 相应的数据 """ return error_code
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/template/sdk/sdk_{{cookiecutter.sdk_name}}/recharge.py
recharge.py
import time from yyxx_game_pkg.center_api.sdk.check_token import BaseCheckToken from yyxx_game_pkg.utils.error_code import ErrorCode from .map_factor import MapFactor class Check_Token(MapFactor, BaseCheckToken): # is_https = True # method = "POST" # time_param = ("time", int(time.time()) # 根据接口文档填写 params 的键值 # params key: post_data的键 # params value: kwargs的键 # --> post_data[key] = kwargs[value] params = {} sdk_exclude = () # 父类中核心代码,此处可删除 # def run_check_token(self, *args, **kwargs) -> dict: # """ # run check token # """ # sdk_helper, response_helper = self.sdk_version_choice(**kwargs) # if sdk_helper is None: # return self.sdk_rechfeed(ErrorCode.ERROR_INVALID_PARAM) # # channel_data, post_data = sdk_helper(**kwargs) # response = self.sdk_check_token(channel_data, post_data) # # return response_helper(response, **kwargs) def sdk_helper(self, sdk_exclude=(), **kwargs) -> (dict, dict): """ channel_data = kwargs.get("channel_data", {}) post_data = {} for k, v in self._params.items(): post_data[k] = kwargs.get(v) if self.Time not in sdk_exclude: post_data[self.Time] = int(time.time()) if self.Flag not in sdk_exclude: post_data[self.Flag] = self.channel_make_sign( post_data, channel_data.get("app_key", "") ) return channel_data, post_data :param sdk_exclude: exclude parameters :param kwargs: 参数 :return: channel_data, post_data -------------------------------- 如果 post_data 有修改或者补充,可以在此方法中添加 channel_data, post_data = super().sdk_helper(**kwargs) post_data["需要修改或添加的键"] = "需要修改或添加的值" return channel_data, post_data -------------------------------- """ return super().sdk_helper(**kwargs) def channel_make_sign(self, values, sign_key) -> str: """ 在sdk_helper中,如果只是 签名 的方法需要修改, 可以在此方法中重写 :param values: sdk_helper 中的 post_data :param sign_key: sdk_helper 中的 channel_data.get("app_key", "") :return: 签名字符串 """ return super().channel_make_sign(values, sign_key) def sdk_check_token(self, channel_data, post_data): """ 默认使用 登录验证URL进行二次验证 如果不是使用登录验证URL的方式进行二次验证 重写此方法进行验证 同时,可以删除 sdk_helper 和 channel_make_sign return 的值在 response_helper 中使用 具体返回什么视情况而定 """ return super().sdk_check_token(channel_data, post_data) def response_helper(self, response: dict, **kwargs) -> dict: """ 返回数据 根据渠道文档,设置返回数据 """ if response and response["code"] == 0: data = { # ----- ret,user_id为必传参数 ------ "ret": 1, "user_id": kwargs["?"], # ? 值根据具体参数填写 # -------------------------------- # 如果还有其他参数, 按需添加 } return data return super().response_helper(response, **kwargs) # sdk 版本映射,一般不需要改变,此处可删除 # @property # def sdk_version_map(self) -> dict: # """ # sdk version map # 如果存在多个version版本,需要添加对应的版本映射 # """ # return super().sdk_version_map
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/template/sdk/sdk_{{cookiecutter.sdk_name}}/check_token.py
check_token.py
import socket import struct import io import sys # xdb默认参数 HeaderInfoLength = 256 VectorIndexRows = 256 VectorIndexCols = 256 VectorIndexSize = 8 SegmentIndexSize = 14 class XdbSearcher(object): __f = None # the minimal memory allocation. vectorIndex = None # 整个读取xdb,保存在内存中 contentBuff = None @staticmethod def loadVectorIndexFromFile(dbfile): try: f = io.open(dbfile, "rb") f.seek(HeaderInfoLength) vi_len = VectorIndexRows * VectorIndexCols * SegmentIndexSize vector_data = f.read(vi_len) f.close() return vector_data except IOError as e: print("[Error]: %s" % e) @staticmethod def loadContentFromFile(dbfile): try: f = io.open(dbfile, "rb") all_data = f.read() f.close() return all_data except IOError as e: print("[Error]: %s" % e) def __init__(self, dbfile=None, vectorIndex=None, contentBuff=None): self.initDatabase(dbfile, vectorIndex, contentBuff) def search(self, ip): if isinstance(ip, str): if not ip.isdigit(): ip = self.ip2long(ip) return self.searchByIPLong(ip) else: return self.searchByIPLong(ip) def searchByIPStr(self, ip): if not ip.isdigit(): ip = self.ip2long(ip) return self.searchByIPLong(ip) def searchByIPLong(self, ip): # locate the segment index block based on the vector index sPtr = ePtr = 0 il0 = (int)((ip >> 24) & 0xFF) il1 = (int)((ip >> 16) & 0xFF) idx = il0 * VectorIndexCols * VectorIndexSize + il1 * VectorIndexSize if self.vectorIndex is not None: sPtr = self.getLong(self.vectorIndex, idx) ePtr = self.getLong(self.vectorIndex, idx + 4) elif self.contentBuff is not None: sPtr = self.getLong(self.contentBuff, HeaderInfoLength + idx) ePtr = self.getLong(self.contentBuff, HeaderInfoLength + idx + 4) else: self.__f.seek(HeaderInfoLength + idx) buffer_ptr = self.__f.read(8) sPtr = self.getLong(buffer_ptr, 0) ePtr = self.getLong(buffer_ptr, 4) # binary search the segment index block to get the region info dataLen = dataPtr = int(-1) l = int(0) h = int((ePtr - sPtr) / SegmentIndexSize) while l <= h: m = int((l + h) >> 1) p = int(sPtr + m * SegmentIndexSize) # read the segment index buffer_sip = self.readBuffer(p, SegmentIndexSize) sip = self.getLong(buffer_sip, 0) if ip < sip: h = m - 1 else: eip = self.getLong(buffer_sip, 4) if ip > eip: l = m + 1 else: dataLen = self.getInt2(buffer_sip, 8) dataPtr = self.getLong(buffer_sip, 10) break # empty match interception if dataPtr < 0: return "" buffer_string = self.readBuffer(dataPtr, dataLen) return_string = buffer_string.decode("utf-8") return return_string def readBuffer(self, offset, length): buffer = None # check the in-memory buffer first if self.contentBuff is not None: buffer = self.contentBuff[offset:offset + length] return buffer # read from the file handle if self.__f is not None: self.__f.seek(offset) buffer = self.__f.read(length) return buffer def initDatabase(self, dbfile, vi, cb): """ " initialize the database for search " param: dbFile, vectorIndex, contentBuff """ try: if cb is not None: self.__f = None self.vectorIndex = None self.contentBuff = cb else: self.__f = io.open(dbfile, "rb") self.vectorIndex = vi except IOError as e: print("[Error]: %s" % e) sys.exit() def ip2long(self, ip): _ip = socket.inet_aton(ip) return struct.unpack("!L", _ip)[0] def isip(self, ip): p = ip.split(".") if len(p) != 4: return False for pp in p: if not pp.isdigit(): return False if len(pp) > 3: return False if int(pp) > 255: return False return True def getLong(self, b, offset): if len(b[offset:offset + 4]) == 4: return struct.unpack('I', b[offset:offset + 4])[0] return 0 def getInt2(self, b, offset): return (b[offset] & 0x000000FF) | (b[offset + 1] & 0x0000FF00) def close(self): if self.__f is not None: self.__f.close() self.vectorIndex = None self.contentBuff = None if __name__ == '__main__': ip_array = [ "1.2.3.4", "117.136.122.164" ] # 1. 缓存 dbPath = "./data/ip2region.xdb" cb = XdbSearcher.loadContentFromFile(dbfile=dbPath) # 2. 创建查询对象 searcher = XdbSearcher(contentBuff=cb) # 3. 执行查询 # ip = "1.2.3.4" for ip in ip_array: region_str = searcher.searchByIPStr(ip) print(region_str) searcher.close()
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/ip2region/xdbSearcher.py
xdbSearcher.py
import io import time import cProfile from functools import wraps from line_profiler import LineProfiler class Profiler: """ Profiler """ def __init__(self): self.__lp = LineProfiler() def execute(self, main_func_inst, *args, assist_func_list=None, **kwargs): """ 执行分析 :param main_func_inst: 主函数入口 :param assist_func_list: 子函数列表 :param args: :param kwargs: :return: """ if assist_func_list is not None: for func in assist_func_list: self.__lp.add_function(func) lp_wrapper = self.__lp(main_func_inst) lp_wrapper(*args, **kwargs) self.__lp.print_stats() def func_time(func): """ 简单记录执行时间 :param func: :return: """ @wraps(func) def wrapper(*args, **kwargs): start = time.time() result = func(*args, **kwargs) end = time.time() print(func.__name__, args, kwargs, 'took', end - start, 'seconds') return result return wrapper def func_cprofile(func): """ 内建分析器 """ @wraps(func) def wrapper(*args, **kwargs): profile = cProfile.Profile() try: profile.enable() result = func(*args, **kwargs) profile.disable() return result finally: profile.print_stats(sort='time') return wrapper def func_line_time(follow=()): """ 每行代码执行时间详细报告 :param follow: 内部调用方法 :return: """ def decorate(func): @wraps(func) def profiled_func(*args, **kwargs): profiler = LineProfiler() try: profiler.add_function(func) for _f in follow: profiler.add_function(_f) profiler.enable_by_count() return func(*args, **kwargs) finally: _s = io.StringIO() profiler.print_stats(stream=_s) print(f"<line_profiler> {_s.getvalue()}") return profiled_func return decorate """ # example def do_stuff(numbers): do_other_stuff(numbers) s = sum(numbers) l = [numbers[i]/43 for i in range(len(numbers))] m = ['hello'+str(numbers[i]) for i in range(len(numbers))] @func_line_time() def do_other_stuff(numbers): s = sum(numbers) def main_instance(): import random numbers = [random.randint(1, 100) for i in range(1000)] do_stuff(numbers) do_other_stuff(numbers) if __name__ == '__main__': main_instance() # profile = Profiler() # profile.execute(main_func_inst=main_instance, assist_func_list=[do_stuff, do_other_stuff]) """
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/utils/profiler.py
profiler.py
import functools import pickle import random import time import traceback from concurrent import futures from yyxx_game_pkg.logger.log import root_log from yyxx_game_pkg.xtrace.helper import get_current_trace_id def fix_str(obj, max_len=5000): """ 切割过长str, 避免打印过多无用信息 """ msg = str(obj) return msg[0 : min(len(msg), max_len)] def log_execute_time_monitor(exec_lmt_time=20): """ 超时函数监控 :param exec_lmt_time:秒 :return: """ def decorator(func): @functools.wraps(func) def inner(*args, **kwargs): begin_dt = time.time() res = func(*args, **kwargs) end_dt = time.time() offset = end_dt - begin_dt if offset >= exec_lmt_time: ex_info = None if kwargs.get("connection") is not None: ex_info = kwargs.get("connection")._con._kwargs.get("host") _args = [] for _arg in args: _args.append(fix_str(_arg, 100)) for k, _v in kwargs.items(): kwargs[k] = fix_str(_v, 100) trace_id = get_current_trace_id() root_log( f"<log_execute_time_monitor> trace_id: {trace_id} " f"func <<{func.__name__}>> deal over time " f"begin_at: {begin_dt} end_at: {end_dt}, sec: {offset}" f"ex_info{ex_info}, params: {str(args)}, {str(kwargs)}" ) return res return inner return decorator def except_monitor(func): """ 异常处理捕捉装饰器 打印全部参数 :return: """ @functools.wraps(func) def inner(*args, **kwargs): res = None try: res = func(*args, **kwargs) except Exception as e: _args = [] for _arg in args: _args.append(fix_str(_arg, 100)) for k, _v in kwargs.items(): kwargs[k] = fix_str(_v, 100) root_log( "<except_monitor>" f"func:{func.__module__}.{func.__name__}, args:{str(_args)}, kwargs:{str(kwargs)}, " f"exc: {traceback.format_exc()} {e}", level="error", ) return res return inner def except_return(default=None, echo_raise=True): """ # 异常后指定返回值 :param default: 返回值(或者可执行函数) :param echo_raise: 是否打印报错信息 :return: """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: if echo_raise: _args = [] for _arg in args: _args.append(fix_str(_arg, 100)) for k, _v in kwargs.items(): kwargs[k] = fix_str(_v, 100) root_log( "<except_return>" f"func:{func.__module__}.{func.__name__}, args:{str(_args)}, kwargs:{str(kwargs)}, " f"exc: {traceback.format_exc()} {e}", level="error", ) return default(e) if callable(default) else default return wrapper return decorator def singleton(cls): instances = {} @functools.wraps(cls) def get_instance(*args, **kw): if cls not in instances: instances[cls] = cls(*args, **kw) return instances[cls] return get_instance def singleton_unique(cls): instances = {} @functools.wraps(cls) def get_instance(*args, **kw): unique_key = f"{cls}_{args}_{kw}" if unique_key not in instances: instances[unique_key] = cls(*args, **kw) return instances[unique_key] return get_instance def singleton_unique_obj_args(cls): # object 需重写 __str__ instances = {} @functools.wraps(cls) def get_instance(*args, **kw): unique_key = f"{cls}_{list(map(str, args))}_{kw}" if unique_key not in instances: instances[unique_key] = cls(*args, **kw) return instances[unique_key] return get_instance def timeout_run(timeout=2, default=None): def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): try: executor = futures.ThreadPoolExecutor(1) future = executor.submit(func, *args, **kw) return future.result(timeout=timeout) except Exception as e: root_log(f"timeout_run {func} error {e} args:{args} kw:{kw}") return default return wrapper return decorator # 缓存装饰器[仅支持可序列化返回值] # todo 重启服务清空缓存 def redis_cache_result(handle, redis_key=None, prefix="_fix", sec=3600): """ :param handle: redis连接 :param redis_key: 需保持唯一性 默认为函数名 :param prefix: key前缀 避免冲突 :param sec: 过期时间(秒) + 随机0 ~ 30 :return: """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: _arg = pickle.dumps(args) except Exception: # 静默处理 _arg = pickle.dumps(args[1:]) _kwargs = pickle.dumps(kwargs) # 不指明redis_key默认用func.name cache_key = redis_key if redis_key else func.__name__ # prefix 防止与其他模块的缓存key冲突 cache_key = f"{prefix}_{cache_key}_{_arg}_{_kwargs}" cache_data = handle.get_data(cache_key) if cache_data: res = pickle.loads(cache_data) return res res = func(*args, **kwargs) handle.set_data( cache_key, pickle.dumps(res), ex=sec + random.randint(0, 30) ) return res return wrapper return decorator
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/utils/decorator.py
decorator.py
import re import time import datetime from enum import Enum DAY = 1 WEEK = 2 MONTH = 3 VERSION = 4 # 时间转换 def str2date(date_str): """ 时间字符串转datetime obj """ if isinstance(date_str, bytes): date_str = date_str.decode(encoding="utf8") if isinstance(date_str, (int, float)) or date_str.isnumeric(): # 时间戳 if len(str(date_str)) == 8: # 20230101 return datetime.datetime.strptime(str(date_str), "%Y%m%d") # 1672502400 or 1672502400000 return datetime.datetime.fromtimestamp(date_str) if len(date_str) == 19: # 常用时间格式 2023-01-01 00:00:00 return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S") iso_regex = ( r"^\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}(\.\d{1,6})?([+-]\d{2}:\d{2})?$" ) if re.match(iso_regex, date_str.replace("Z", "+00:00")): # 符合iso格式的时间字符串 2022-03-08T16:30:00.000Z or 2023-03-08T20:45:17+08:00 return datetime.datetime.fromisoformat(date_str) millisecond_regex = r".*(\.\d{1,6})$" if re.match(millisecond_regex, date_str): # 带毫秒的时间 return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") # 常用时间格式 2023-01-01 return datetime.datetime.strptime(date_str, "%Y-%m-%d") def str2date_str(date_str, fmt="%Y-%m-%d %H:%M:%S"): """ 将时间字符串转成另外格式的时间字符串 """ return str2date(date_str).strftime(fmt) def str2day(date_str): """ "2021-05-31 12:23:40" to YYYYMMDD """ return datetime.datetime.strptime(date_str, "%Y%m%d") def date2dt_day(date, _h=0, _m=0, _s=0): """ "2021-05-31 12:23:40" to "2021-05-31 00:00:00" :param date: datetime obj :param _h: hour :param _m: minute :param _s: second :return: datetime obj """ return datetime.datetime(date.year, date.month, date.day, int(_h), int(_m), int(_s)) def date2dt_day_end(date) -> datetime.datetime: """ "2021-05-31 12:23:40" to "2021-05-31 23:59:59" :param date: datetime obj :return: datetime obj """ return date2dt_day(date, 23, 59, 59) def day2date(day, fmt="%Y%m%d", end=0) -> datetime.datetime: """ "20210531" to "2021-05-31 00:00:00" :param day: 时间字符串 :param fmt: 时间字符串格式 默认 "%Y%m%d" :param end: 0: 00:00:00; 1: 23:59:59 :return: datetime obj """ date = datetime.datetime.strptime(str(day), fmt) if end: return datetime.datetime(date.year, date.month, date.day, 23, 59, 59) return date def day2str_date(day) -> str: """ '20220301' -> '2022-03-01' 效率更高 """ day_s = str(day) return day_s[:4] + '-' + day_s[4:6] + '-' + day_s[6:] def date2day(date): """ "2021-05-31 12:23:40" to "20210531" """ return date.strftime("%Y%m%d") def date2date(date, _h=0, _m=0, _s=0, end=0): """ "2021-05-31 12:23:40" to "2021-05-31 00:00:00" :param date: datetime obj :param _h: hour :param _m: minute :param _s: second :param end: 0: 00:00:00; 1: 23:59:59 :return: datetime obj """ if end: return datetime.datetime(date.year, date.month, date.day, 23, 59, 59) return datetime.datetime(date.year, date.month, date.day, int(_h), int(_m), int(_s)) def day_diff(day1, day2): """ day_diff(20210531, 20210529) -> 2 """ return (day2date(day2) - day2date(day1)).days # 根据date获取第delta天时间 def delta_dt_day(date, delta=0, end=0): """ :param date: 起始时间 :param delta: 第几天 :param end: 0: 00:00:00 / 1: 23:59:59 :return: """ if end: return date2dt_day_end(date) + datetime.timedelta(days=delta) return date2dt_day(date) + datetime.timedelta(days=delta) def add_days(date, delta, end=0): """ 易读接口 """ return delta_dt_day(date, delta, end) def date2stamp(dt_date): """ datetime转时间戳 """ return time.mktime(dt_date.timetuple()) def stamp2str(t_stamp, fmt="%Y-%m-%d %H:%M:%S"): """ 时间戳转日期字符串 :param t_stamp: 时间戳 :param fmt: 生成时间字符串格式 默认 %Y-%m-%d %H:%M:%S :return: 时间字符串 """ if not t_stamp: return "" time_array = time.localtime(t_stamp) return time.strftime(fmt, time_array) def get_week_str(date, fmt="%Y%m%d"): """ 当前周 开始结束时间段 get_week_str("2023-03-09 11:15:20") -> "2023-03-05~2023-03-11" :param date: 时间字符串 :param fmt: 时间字符串格式 默认 "%Y%m%d" :return: 周期字符串 """ sdate = datetime.datetime.strptime(str(date), fmt) _, _, s_week_day = sdate.isocalendar() sday = (sdate - datetime.timedelta(days=s_week_day - 1)).strftime("%Y-%m-%d") eday = (sdate - datetime.timedelta(days=s_week_day - 7)).strftime("%Y-%m-%d") return f"{sday}~{eday}" def date_type_trans(day, date_type=DAY, fmt="%Y%m%d", version_configs=None): """ 周期时间格式化 :param day: 时间字符串 20230201 :param date_type: 周期类型(1: 天 2: 周 3: 月) :param fmt: 时间字符串格式 默认 "%Y%m%d" :param version_configs: 版本配置list [[版本号, 开始day, 结束day]] :return: 周期字符串 """ if date_type == DAY: return datetime.datetime.strptime(str(day), fmt).strftime("%Y-%m-%d") if date_type == WEEK: return get_week_str(day, fmt) if date_type == MONTH: return datetime.datetime.strptime(str(day), fmt).strftime("%Y年%m月") if date_type == VERSION: if version_configs is None: return day for version, start_day, end_day in version_configs: if start_day <= day <= end_day: return f"{version}版本" return "未配置版本" return day def to_start_of_interval(_t: datetime.datetime, unit="minute", interval=5): """ to_start_of_interval("2023-03-09 11:16:20", 'minute', interval=5) -> datetime(2023-03-09 11:15:00) to_start_of_interval("2023-03-09 11:16:20", 'hour', interval=1) -> datetime(2023-03-09 11:00:00) """ if unit == "minute": fix = _t.minute - _t.minute % interval _t = _t.replace(minute=fix, second=0, microsecond=0) elif unit == "hour": fix = _t.hour - _t.hour % interval _t = _t.replace(hour=fix, minute=0, second=0, microsecond=0) return _t def split_date_str_by_day(sdate_str, edate_str, day_slice=1): """ split_date_str_by_day """ res_list = [] if not sdate_str or not edate_str: return res_list # 按时间分配(天数) interval = datetime.timedelta(days=day_slice) start_dt = datetime.datetime.strptime(sdate_str, "%Y-%m-%d %H:%M:%S") edate_str = edate_str.replace("00:00:00", "23:59:59") end_dt = datetime.datetime.strptime(edate_str, "%Y-%m-%d %H:%M:%S") offset = datetime.timedelta(seconds=1) while start_dt < end_dt: next_dt = min((start_dt + interval - offset), end_dt) res_list.append( { "sdate": start_dt.strftime("%Y-%m-%d %H:%M:%S"), "edate": next_dt.strftime("%Y-%m-%d %H:%M:%S"), } ) start_dt = next_dt + offset return res_list
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/utils/xdate.py
xdate.py
class ErrorCode: # 基本错误码 ZERO = {"code": 0, "msg": "无"} SUCCESS = {"code": 1, "msg": "ok"} ERROR_INVALID_PARAM = {"code": -1, "msg": "参数无效"} ERROR_RESTFUL_ERROR = {"code": -2, "msg": "提交类型错误"} ERROR_SIGNATURE_ERROR = {"code": -3, "msg": "签名错误"} ERROR_TIME_OVERTIME = {"code": -4, "msg": "时间超时"} ERROR_PLATFORM_FUNCTION_ERROR = {"code": -5, "msg": "运营商映射方法不存在"} ERROR_CHECK_PUBLIC = {"code": -6, "msg": "公共校验错误"} ERROR_ROUTING_ERROR = {"code": -7, "msg": "请求错误"} ERROR_IP_ACCESS_RESTRICTION = {"code": -9, "msg": "限制访问"} # 方法内错误码 ERROR_PARAMS_ERROR = {"code": -1001, "msg": "参数错误"} ERROR_IP_WHITE_LIST_ERROR = {"code": -1002, "msg": "IP白名单错误"} ERROR_SERVER_API_URL_EMPTY = {"code": -1003, "msg": "单服接口不存在"} ERROR_REDIS_SET_ERROR = {"code": -1004, "msg": "REDIS设置失败"} ERROR_REDIS_PUSH_ERROR = {"code": -1005, "msg": "REDIS入队列失败"} ERROR_REQUEST_OFTEN = {"code": -1006, "msg": "请求过于频繁"} ERROR_GIFT_LOG_SET_ERROR = {"code": -1007, "msg": "媒体卡记录出错"} ERROR_GIFT_CODE_SET_ERROR = {"code": -1008, "msg": "媒体卡设置状态出错"} ERROR_NOTICE_VERSION_ERROR = {"code": -1009, "msg": "更新公告版本号错误"} ERROR_NOTICE_REWARDS_ERROR = {"code": -1010, "msg": "公告奖励错误"} ERROR_PARAMS_ERROR_NULL = {"code": -1011, "msg": "错误没数据返回"} ERROR_GIFT_LOG_SET_OFTEN = {"code": -1012, "msg": "记录错误"} ERROR_GIFT_CODE_SET_OFTEN = {"code": -1013, "msg": "改状态错误"} ERROR_UPDATE_DATA_ERROR = {"code": -1014, "msg": "更新数据错误"} ERROR_CERTIFICATION_OFTEN = {"code": -1015, "msg": "该身份证号认证次数过多"} ERROR_REPEAT_SUBMISSION = {"code": -1016, "msg": "重复提交"} ERROR_REWARD_ERROR = {"code": -1018, "msg": "奖励配置错误"} ERROR_ORDER_INFO_ERROR = {"code": -1019, "msg": "查询订单错误"} ERROR_CREATE_ORDER_ERROR = {"code": -1020, "msg": "创建订单错误"} # 请求接口返回的错误码 ERROR_SERVER_API_URL_ERROR = {"code": -2001, "msg": "单服接口错误"} ERROR_RECHARGE_ERROR = {"code": -2002, "msg": "充值失败"} ERROR_API_PLAYER_ERROR = {"code": -2003, "msg": "单服接口玩家错误"} ERROR_API_DATA_EMPTY = {"code": -2004, "msg": "数据为空"} # 服务器验证错误码 ERROR_SERVER_ERROR = {"code": -3001, "msg": "服务器错误"} ERROR_SERVER_CONN_ERROR = {"code": -3002, "msg": "服务器链接错误"} ERROR_MYSQL_CONN_ERROR = {"code": -3003, "msg": "MySQL链接错误"} ERROR_REDIS_CONN_ERROR = {"code": -3004, "msg": "Redis链接错误"} ERROR_MYSQL_REDIS_CONN_ERROR = {"code": -3005, "msg": "MySQL和Redis链接错误"} # 渠道要求返回码 ERROR_SIGN_ERROR = {"errno": -3, "errmsg": "签名错误"} API_SUCCESS = {"errno": 0, "errmsg": "成功"}
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/utils/error_code.py
error_code.py
import ujson as json def lst2str(lst, isdigit=True, symbol=",", warp="'") -> str: """ list转字符串 lst2str(['a', 'b', 'c]) -> "'a', 'b', 'c'" :param lst: :param isdigit: :param symbol: :param warp: 字符串包裹符 默认单引号 :return: """ if not lst: return "" if isinstance(lst, int): lst = [lst] if not isinstance(lst, list): lst = list(lst) # 简单情况自动处理 if not str(lst[0]).isdigit(): isdigit = False def _str(_s): return f"{warp}{_s}{warp}" lst = list(map(str, lst)) if isdigit else list(map(_str, lst)) lst_str = symbol.join(lst) return lst_str def load_js_str_keys(js_str, keys, default=None) -> dict: """ load json字符串中指定key列表 :param js_str: :param keys: :param default: :return: dict """ # 返回键值对 if default is None: default = {} if not js_str: return {} js_dict = json.loads(js_str) res = {} for key in keys: res[key] = js_dict.get(key, default) return res def str2list(list_str, split_symbol) -> list: """ str转list 去除空项 str2list("#1#2##", "#") -> ['1', '2'] :param list_str: :param split_symbol: :return: """ def filter_func(val): if not val: return False return True res = list(filter(filter_func, list_str.split(split_symbol))) return res def split_list(pending_lst, split_size=50000) -> list: """ 列表切分 split_list([[1, 2, 3, 4, 5]], 3) -> [[1, 2, 3], [4, 5]] split_list([1, 2, 3, 4, 5], 3) -> [[1, 2, 3], [4, 5]] :param pending_lst: :param split_size: :return: """ if not isinstance(pending_lst, (list, tuple)): return pending_lst if not isinstance(pending_lst[0], (list, tuple)): pending_lst = [pending_lst] if split_size == -1: return pending_lst base_num = split_size result = pending_lst[0] size = len(result) / base_num if len(result) % base_num != 0: size += 1 data_list = [] for index in range(int(size)): data_list.append(result[index * base_num : (index + 1) * base_num]) return data_list def split_list_ex(target_list, res_len): """ 把target_list分割成若干个小list(间隔切割) :param target_list: [1, 2, 3, 4, 5, 6] :param res_len: 3 :return: [[1,4], [2,5], [3,6]] """ if not isinstance(target_list, list): return [] if res_len <= 0: return [[]] target_list_len = len(target_list) if res_len >= target_list_len: return [target_list] split_parts_len = target_list_len / res_len + (1 if target_list_len % res_len > 0 else 0) res_list = [] for x in range(split_parts_len): res_list.append([]) for idx, val in enumerate(target_list): res_list[(idx % split_parts_len)].append(val) return res_list
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/utils/xListStr.py
xListStr.py
import functools import json from bisect import bisect_left import pandas as pd import numpy as np def empty_df(columns=None): """ :param columns: :return: """ if not columns: return pd.DataFrame() return pd.DataFrame(columns=columns) def df_col2row_idx(_df, index_keys, data_key): """ # df 列转行索引 # # day player_id money # 0 20210527 1508277000053 6.0 # 1 20210527 1508277000058 6.0 # 3 20210528 1508277000058 12.0 # # res_df = df_col2row_idx(res_df, ['player_id', 'day'], 'money') # # player_id 20210527 20210528 # 0 1508277000053 6.0 NaN # 1 1508277000058 6.0 12.0 """ df_index = _df.set_index(index_keys)[data_key] _df = df_index.unstack() _df = _df.rename_axis(None, axis="columns").reset_index() return _df def cut_bins(val, bins, concat="-"): """ :param val: :param bins: :param concat: :return: """ if not val: return val if val > bins[-1]: val = bins[-1] position = bisect_left(bins, val) labels = f"{bins[position - 1] + 1}{concat}{ bins[position]}" return labels, bins[position] def df_cut_bins(_df, key, bins, insert_zero=True): """ :param _df: :param key: :param bins: :param insert_zero: :return: """ def prefix_bins(_bins): """ 排序 :param _bins: :return: """ _bins = sorted(map(int, _bins)) if insert_zero and _bins[0] != 0: _bins.insert(0, 0) return _bins bins = prefix_bins(bins) return _df[key].apply(cut_bins, bins=bins) def cal_round_rate(data, precision=2, suffix="%", invalid_value="-"): """ :param data: :param precision: :param suffix: :param invalid_value: :return: """ if isinstance(data, pd.DataFrame): return data.apply(cal_round_rate, args=(precision, suffix), axis=0) if isinstance(data, pd.Series): if str(invalid_value).isdigit(): data = data.fillna(invalid_value) data = data.astype(float).round(precision) if precision == 0: data = data.astype(int) return data.apply( lambda d: invalid_value if (d == np.inf or np.isnan(d)) else f"{d}{suffix}" ) if isinstance(data, (int, float)): if np.isnan(data) or data == np.inf: return invalid_value if precision == 0: return str(int(data)) + suffix return str(round(data, precision)) + suffix return invalid_value def func_cal_round_rate(func, **kw): """ 用于快速构造用agg或apply传递的cal_round_rate函数 :param func: :param kw: :return: """ @functools.wraps(func) def wrapper(data, *args, **kwargs): if isinstance(func, str): data = getattr(data, func)() else: data = func(data) return cal_round_rate(data, **kw) return wrapper def dict_to_json(data): """用于es对象转json,并且正常显示中文""" if not data: if not isinstance(data, (str, bytes)): data = str(data) return data if isinstance(data, float) and pd.isna(data): return "" return json.dumps(data, ensure_ascii=False) def df_json_normalize(_df, columns, prefixes=None, sep=".", column_prefix=False): """ df: 原df数据 record_paths: 需要解析的列名list record_prefixes: 需要填充前缀list sep: 填充前缀的分隔符 column_prefix: 使用字段名作为前缀 """ for idx, record_column in enumerate(columns): if record_column not in _df.columns: continue tmp_df = pd.DataFrame(_df[record_column].apply(fill_dict).tolist()) record_prefix = None if column_prefix: record_prefix = record_column elif prefixes is not None: record_prefix = prefixes[idx] if record_prefix: tmp_df.columns = [f"{record_prefix}{sep}{col}" for col in tmp_df.columns] _df[tmp_df.columns] = tmp_df _df = _df.drop(columns=record_column) return _df def df_fill_columns(_df, columns, default="", tpe=None): """ 填充列,以确保列存在 """ if isinstance(columns, (list, tuple)): for column in columns: if column not in _df.columns: _df[column] = default elif tpe: _df[column] = _df[column].fillna(default).astype(tpe) else: _df[column] = _df[column].fillna(default) elif isinstance(columns, dict): for column, val in columns.items(): if column not in _df.columns: _df[column] = val elif tpe: _df[column] = _df[column].fillna(default).astype(tpe) else: _df[column] = _df[column].fillna(default) else: if columns not in _df.columns: _df[columns] = default elif tpe: _df[columns] = _df[columns].fillna(default).astype(tpe) else: _df[columns] = _df[columns].fillna(default) return _df def df_rm_columns(_df, columns): """ 安全删除列 :param _df:dataframe or series :param columns:需删除的列或index :return:新的dataframe or series """ if isinstance(_df, pd.Series): rm_columns = [column for column in columns if column in _df.index] if rm_columns: _df = _df.drop(rm_columns) else: rm_columns = [column for column in columns if column in _df.columns] if rm_columns: _df = _df.drop(columns=rm_columns) return _df def fill_dict(data): """填充{}到nan""" return {} if not isinstance(data, dict) and pd.isna(data) else data def fill_list(data): """填充[]到nan""" return [] if not isinstance(data, list) and pd.isna(data) else data def div_rate(data_df: pd.DataFrame, top_key, bottom_key, precision=2) -> pd.Series: """ dataframe div函数计算百分比 top_key / bottom_key example: data_df["pay_rate"] = div_rate(data_df, "pid_cnt", "act_player_cnt") :return: """ fmt_show = f"%0.{precision}f" if isinstance(top_key, list): return ( data_df[top_key] .div(data_df[bottom_key], axis=0) .round(precision + 2) .fillna(0) .applymap(lambda x: f"{ fmt_show % round(x * 100, precision) }%") ) return ( data_df[top_key] .div(data_df[bottom_key], axis=0) .round(precision + 2) .fillna(0) .apply(lambda x: f"{fmt_show % round(x * 100, precision) }%") ) def div_round(data_df: pd.DataFrame, top_key, bottom_key, precision=2) -> pd.Series: """ dataframe div函数 top_key / bottom_key example: data_df["pay_rate"] = div_round(data_df, "pid_cnt", "act_player_cnt") :return: """ return data_df[top_key].div(data_df[bottom_key], axis=0).round(precision) def concat_cols(data_df: pd.DataFrame, cols: list, concat_by="|") -> pd.Series: """ 合将列,汇总后的列为:recharge_cnt|recharge_type_id example: data_df["show_pid_cnt"] = concat_cols(data_df, ["pid_cnt", "pid_rate"]) -> 98|10.0% """ res = None for col in cols: if res is None: res = data_df[col].astype(str) else: res = res + data_df[col].astype(str) if col == cols[-1]: continue res = res + concat_by return res def df_astype(_df: pd.DataFrame, columns=(), excludes=(), tpe=str): """ dataframe转类型,可指定列进行转换,也可反向排除某些列,进行转换 主要用于某些数据列,仅少数列无需转,多数列需要转时,需要列举所有的列,此举可减少编写 columns:需转换的列 excludes:除了excludes外的列将进行转换(优先级更高) tpe:需转换的类型 """ if excludes: df_columns = _df.columns.tolist() columns = list(set(df_columns) - set(excludes)) if columns: _df[columns] = _df[columns].astype(tpe) return _df def show_range_labels(_df, key, bins, insert_zero=True, max_label_fmt=None): """ # money_df #### # player_id, money # 19296, 0 # 21169, 8 # 24003, 98 money_df[["money_label", "label_rank"]] = show_range_labels( money_df, "money", bins=[0, 8, 41], max_label_fmt="{}+" ) => # player_id, money, money_label, label_rank # 19296, 0, "", -1 # 21169, 8, "1-8", 8 # 24003, 98, "41+”, 41 insert_zero : 是否在bins最前面插入0 :return: """ def prefix_bins(_bins): _bins = sorted(map(int, _bins)) if insert_zero and _bins[0] != 0: _bins.insert(0, 0) return _bins bins = prefix_bins(bins) concat = "-" def cut_bins(row): val = row[key] if not val: return "", -1 if val > bins[-1]: val = bins[-1] position = bisect_left(bins, val) if position <= 0: return "", -1 left_val = bins[position - 1] + 1 right_val = bins[position] labels = f"{left_val}{concat}{right_val}" if position == len(bins) - 1 and max_label_fmt is not None: labels = max_label_fmt.format(left_val) return labels, bins[position] return _df.apply(cut_bins, axis=1, result_type="expand")
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/utils/xdataframe.py
xdataframe.py
import time import hashlib import urllib.parse import ujson as json import requests from celery import current_app from yyxx_game_pkg.logger.log import root_log as local_log def http_request( url, data, is_https=False, method="post", is_json_type=False, add_headers=None ): try: headers = {} if is_json_type is True: content_type = "application/json; charset=UTF-8" else: content_type = "application/x-www-form-urlencoded; charset=UTF-8" if is_https is True: url = f"https://{url}" else: url = f"http://{url}" headers["Content-Type"] = content_type if add_headers: headers.update(add_headers) post_data = set_params(data) if is_json_type is False else json.dumps(data) if method == "post": result = requests.post(url, data=post_data, headers=headers, verify=False) else: result = requests.get(url + "?" + post_data, headers=headers, verify=False) content = result.content if not content: return None return content except Exception as e: local_log(f"http_request Error Exception: {e}") return None def md5(md5_str): """ md5加密[center接口定] :param md5_str: :return: """ sign_str = hashlib.md5() sign_str.update(md5_str.encode("utf-8")) return sign_str.hexdigest() def set_params(params=None): """ 生成参数 """ if not isinstance(params, dict): raise TypeError("You must pass in a dictionary!") params_list = [] for k, _v in params.items(): if isinstance(_v, list) and _v: if isinstance(_v[0], dict): params_list.append((k, json.dumps(_v))) else: params_list.extend([(k, x) for x in _v]) elif isinstance(_v, dict): params_list.append((k, json.dumps(_v))) else: params_list.append((k, _v)) return urllib.parse.urlencode(params_list) def http_push_server(url, data, server_api_key): """ 单服推送 :param url: :param data: :param server_api_key: :return: """ if not url: local_log(f"Error http_push_server url: {url} data: {json.dumps(data)}") return None _t = int(time.time()) values = {"time": _t, "params": json.dumps(data)} keys = values.keys() keys = sorted(keys) params = [] for key in keys: params.append(f"{key}={values[key]}") params = "&".join(params) timestamp = str(_t + (_t % 38975)) _tmp = md5(f"{params}{server_api_key}") sign = md5(f"{timestamp}{_tmp}") post_data = {"time": _t, "params": data, "sign": sign} post_data_log = json.dumps(post_data, ensure_ascii=False) local_log(f"http_push_server url:{url} post_data: {post_data_log}") result = http_request(url, post_data, False, "get") local_log(f"http_push_server url:{url} res: {result}") return result def make_post_data(ex_params, api_key): """ 生成post_data """ _t = int(time.time()) values = {"time": _t, "params": json.dumps(ex_params)} keys = values.keys() keys = sorted(keys) params = [] for key in keys: params.append(f"{key}={values[key]}") params = "&".join(params) timestamp = str(_t + (_t % 38975)) _tmp = md5(f"{params}{api_key}") sign = md5(f"{timestamp}{_tmp}") post_data = {"time": _t, "params": ex_params, "sign": sign} post_data = set_params(post_data) return post_data
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/utils/xhttp.py
xhttp.py
import copy import operator import os from importlib import import_module from yyxx_game_pkg.conf import global_settings empty = object() ENVIRONMENT_VARIABLE = "SETTINGS" def new_method_proxy(func): def inner(self, *args): _wrapped = self._wrapped if _wrapped is empty: self._setup() _wrapped = self._wrapped return func(_wrapped, *args) inner._mask_wrapped = False return inner def unpickle_lazyobject(wrapped): return wrapped class ImproperlyConfigured(Exception): pass class UserSettingsHolder: SETTINGS_MODULE = None def __init__(self, default_settings): self.__dict__["_deleted"] = set() self.default_settings = default_settings def __getattr__(self, name): if not name.isupper() or name in self._deleted: raise AttributeError return getattr(self.default_settings, name) def __setattr__(self, name, value): self._deleted.discard(name) def __delattr__(self, name): self._deleted.add(name) if hasattr(self, name): super().__delattr__(name) def __dir__(self): return sorted( s for s in [*self.__dict__, *dir(self.default_settings)] if s not in self._deleted ) def is_overridden(self, setting): deleted = setting in self._deleted set_locally = setting in self.__dict__ set_on_default = getattr( self.default_settings, "is_overridden", lambda s: False )(setting) return deleted or set_locally or set_on_default def __repr__(self): return f"<{self.__class__.__name__}>" class Settings: def __init__(self, settings_module=None): for setting in dir(global_settings): if setting.isupper(): setattr(self, setting, getattr(global_settings, setting)) self.SETTINGS_MODULE = settings_module module = import_module(self.SETTINGS_MODULE) self._explicit_settings = set() for setting in dir(module): if setting.isupper(): setting_value = getattr(module, setting) setattr(self, setting, setting_value) self._explicit_settings.add(setting) def __repr__(self): return f'<{self.__class__.__name__} "{self.SETTINGS_MODULE}">' class LazyObject: _wrapped = None def __init__(self): self._wrapped = empty def __getattribute__(self, name): if name == "_wrapped": return super().__getattribute__(name) value = super().__getattribute__(name) if not getattr(value, "_mask_wrapped", True): raise AttributeError return value __getattr__ = new_method_proxy(getattr) def __setattr__(self, name, value): if name == "_wrapped": self.__dict__["_wrapped"] = value else: if self._wrapped is empty: self._setup() setattr(self._wrapped, name, value) def __delattr__(self, name): if name == "_wrapped": raise TypeError("can't delete _wrapped.") if self._wrapped is empty: self._setup() delattr(self._wrapped, name) def _setup(self): raise NotImplementedError( "subclasses of LazyObject must provide a _setup() method" ) def __reduce__(self): if self._wrapped is empty: self._setup() return unpickle_lazyobject, (self._wrapped,) def __copy__(self): if self._wrapped is empty: return type(self)() else: return copy.copy(self._wrapped) def __deepcopy__(self, memo): if self._wrapped is empty: result = type(self)() memo[id(self)] = result return result return copy.deepcopy(self._wrapped, memo) __bytes__ = new_method_proxy(bytes) __str__ = new_method_proxy(str) __bool__ = new_method_proxy(bool) # Introspection support __dir__ = new_method_proxy(dir) # Need to pretend to be the wrapped class, for the sake of objects that # care about this (especially in equality tests) __class__ = property(new_method_proxy(operator.attrgetter("__class__"))) __eq__ = new_method_proxy(operator.eq) __lt__ = new_method_proxy(operator.lt) __gt__ = new_method_proxy(operator.gt) __ne__ = new_method_proxy(operator.ne) __hash__ = new_method_proxy(hash) # List/Tuple/Dictionary methods support __getitem__ = new_method_proxy(operator.getitem) __setitem__ = new_method_proxy(operator.setitem) __delitem__ = new_method_proxy(operator.delitem) __iter__ = new_method_proxy(iter) __len__ = new_method_proxy(len) __contains__ = new_method_proxy(operator.contains) class LazySettings(LazyObject): def _setup(self, name=None): settings_module = os.environ.get(ENVIRONMENT_VARIABLE) if not settings_module: desc = ("setting %s" % name) if name else "settings" raise ImproperlyConfigured( "Requested %s, but settings are not configured. " "You must either define the environment variable %s " "or call settings.configure() before accessing settings." % (desc, ENVIRONMENT_VARIABLE) ) self._wrapped = Settings(settings_module) def __repr__(self): if self._wrapped is empty: return "<LazySettings [Unevaluated]>" return '<LazySettings "%(settings_module)s">' % { "settings_module": self._wrapped.SETTINGS_MODULE, } def __getattr__(self, name): _wrapped = self._wrapped if _wrapped is empty: self._setup(name) _wrapped = self._wrapped val = getattr(_wrapped, name) self.__dict__[name] = val return val def __setattr__(self, name, value): if name == "_wrapped": self.__dict__.clear() else: self.__dict__.pop(name, None) super().__setattr__(name, value) def __delattr__(self, name): super().__delattr__(name) self.__dict__.pop(name, None) def configure(self, default_settings=None, **options): if self._wrapped is not empty: raise RuntimeError("Settings already configured.") holder = UserSettingsHolder(default_settings) for name, value in options.items(): if not name.isupper(): raise TypeError(f"Setting {name} must be uppercase.") setattr(holder, name, value) self._wrapped = holder @property def configured(self): return self._wrapped is not empty settings = LazySettings()
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/conf/__init__.py
__init__.py
import base64 from Crypto import Random from Crypto.Cipher import PKCS1_v1_5 as PKCS1_cipher from Crypto.Hash import SHA256 from Crypto.PublicKey import RSA from Crypto.Signature import PKCS1_v1_5 as PKCS1_signature class RSACrypto: @staticmethod def generator_rsa(filename=None): random_generator = Random.new().read rsa = RSA.generate(1024, random_generator) rsa_private_key = rsa.exportKey() rsa_public_key = rsa.publickey().exportKey() if filename is not None: with open(f"{filename}_private.pem", "w") as f: f.write(rsa_private_key.decode()) with open(f"{filename}_public.pem", "w") as f: f.write(rsa_public_key.decode()) @staticmethod def rsa_public_crypto(raw_str: str, public_key: str) -> str: """ 公钥加密 :param raw_str: raw string :param public_key: public key """ cipher = PKCS1_cipher.new(RSA.importKey(public_key)) encrypt_text = base64.b64encode(cipher.encrypt(raw_str.encode("utf-8"))) return encrypt_text.decode("utf-8") @staticmethod def rsa_private_crypto(crypto_str: str, private_key: str) -> str: """ 私钥解密 :param crypto_str: 加密字符串 :param private_key: private key """ cipher = PKCS1_cipher.new(RSA.importKey(private_key)) decrypt_text = cipher.decrypt(base64.b64decode(crypto_str), Random.new().read) return decrypt_text.decode("utf-8") @staticmethod def rsa_private_sign(raw_str: str, private_key: str) -> str: """ 私钥签名 :param raw_str: raw string :param private_key: """ private_key = RSA.importKey(private_key) signer = PKCS1_signature.new(private_key) digest = SHA256.new() digest.update(raw_str.encode("utf8")) sign = signer.sign(digest) signature = base64.b64encode(sign) signature = signature.decode("utf-8") return signature @staticmethod def rsa_public_sign(raw_str: str, sign: str, public_key: str) -> bool: """ 公钥验证签名 :param raw_str: raw string :param sign: 签名 :param public_key: public key """ public_key = RSA.importKey(public_key) verifier = PKCS1_signature.new(public_key) digest = SHA256.new() digest.update(raw_str.encode("utf-8")) return verifier.verify(digest, base64.b64decode(sign))
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/crypto/rsa.py
rsa.py
from functools import wraps from opentelemetry import trace from opentelemetry.exporter.jaeger.thrift import JaegerExporter from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry.trace import get_current_span from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from opentelemetry.trace.status import Status, StatusCode _tracer = trace.get_tracer(__name__) def get_tracer(): """:cvar 获取全局tracer实例 """ return _tracer def register_to_jaeger(service_name: str, jaeger_host: str, jaeger_port: int = 6831, udp_split_oversized_batches: bool = True): """ 注册服务到jaeger,这样就可以发送tracer相关信息到jaeger服务器 Args: service_name: 注册的服务明 jaeger_host: jaeger地址 jaeger_port: The port of the Jaeger-Agent. udp_split_oversized_batches: Re-emit oversized batches in smaller chunks. Returns: TracerProvider """ provider = TracerProvider(resource=Resource.create({SERVICE_NAME: service_name})) trace.set_tracer_provider(provider) # create a JaegerExporter jaeger_exporter = JaegerExporter( agent_host_name=jaeger_host, agent_port=jaeger_port, udp_split_oversized_batches=udp_split_oversized_batches ) # Create a BatchSpanProcessor and add the exporter to it span_processor = BatchSpanProcessor(jaeger_exporter) # add to the tracer trace.get_tracer_provider().add_span_processor(span_processor) def trace_span(ret_trace_id: bool = False, set_attributes: bool = False, operation_name: str = ""): """:cvar 函数的span装饰器 """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): _operation_name = operation_name if not _operation_name: _operation_name = f"{func.__module__}.{func.__name__}" with _tracer.start_as_current_span(_operation_name) as span: try: result = func(*args, **kwargs) if ret_trace_id: return result, hex(span.get_span_context().trace_id) if set_attributes: span.set_attributes({"kwargs": str(kwargs), "args": str(args)}) return result except Exception as e: span.set_status(Status(StatusCode.ERROR, str(e))) raise return wrapper return decorator def get_current_trace_id(): """:cvar 获取当前trace id """ # 获取当前请求的span和trace id span = get_current_span() # 获取 trace_id trace_id = span.get_span_context().trace_id return hex(trace_id) def add_span_tags(attributes: dict): """:cvar 当前span添加tags """ span = get_current_span() span.set_attributes(attributes) def add_span_events(event_name: str, events: dict): """:cvar 当前span添加tags """ span = get_current_span() span.add_event(event_name, events) def get_trace_parent(): span = trace.get_current_span() span_context = span.get_span_context() if span_context == trace.INVALID_SPAN_CONTEXT: return {} trace_id = trace.format_trace_id(span_context.trace_id) trace_parent_string = f"00-{trace_id}-{trace.format_span_id(span_context.span_id)}-{span_context.trace_flags:02x}" return { "trace_id": trace_id, "trace_parent_string": trace_parent_string, } def trace_span_extract(ret_trace_id: bool = False, set_attributes: bool = False, operation_name: str = ""): """:cvar 函数的span装饰器(trace from trace_parent_string) """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): trace_parent_string = kwargs.get('trace_parent_string', '') carrier = {'traceparent': trace_parent_string} ctx = TraceContextTextMapPropagator().extract(carrier=carrier) _operation_name = operation_name if not _operation_name: _operation_name = f"{func.__module__}.{func.__name__}" with _tracer.start_as_current_span(_operation_name, context=ctx) as span: try: result = func(*args, **kwargs) if ret_trace_id: return result, hex(span.get_span_context().trace_id) if set_attributes: span.set_attributes({"kwargs": str(kwargs), "args": str(args)}) return result except Exception as e: span.set_status(Status(StatusCode.ERROR, str(e))) raise return wrapper return decorator
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/xtrace/helper.py
helper.py
import gzip import json from django.conf import settings from django.utils.deprecation import MiddlewareMixin from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator import yyxx_game_pkg.xtrace.helper as xtrace_helper from yyxx_game_pkg.xtrace.django.util.common import get_django_middleware_setting class _DjangoJaegerMiddleware(MiddlewareMixin): _jaeger_config = getattr(settings, "JAEGER", {}) _log_max_size = _jaeger_config.get("log_max_size", 2048) _is_log = _jaeger_config.get("is_log", False) _ignore_paths = _jaeger_config.get("ignore_paths", []) def __call__(self, request): try: span = xtrace_helper.get_current_span() path_info = request.environ['PATH_INFO'] span.update_name(f"{request.environ['REQUEST_METHOD']} {path_info}") if path_info not in self._ignore_paths: if getattr(request, "REQUEST", None): request_params = dict(request.REQUEST) else: request_params = {} request_params.update(request.GET) request_params.update(request.POST) span.add_event("request", {"params": json.dumps(request_params)[:self._log_max_size]}) except Exception as e: print(e) return super().__call__(request) def process_response(self, request, response): try: if self._is_log and (request.environ['PATH_INFO'] not in self._ignore_paths): span = xtrace_helper.get_current_span() admin_alias = getattr(getattr(request, "admin", None), "alias", None) if admin_alias: span.set_attributes({"request.admin.alias": admin_alias}) settings_middleware = getattr(settings, get_django_middleware_setting(), []) if "django.middleware.gzip.GZipMiddleware" in settings_middleware and response.get( "Content-Encoding") == 'gzip': span.add_event("response", {"params": gzip.decompress(response.content).decode()[:self._log_max_size]}) else: span.add_event("response", {"params": response.content.decode()[:self._log_max_size]}) # inject trace parent to response header TraceContextTextMapPropagator().inject(response) except Exception as e: print(e) return response def process_exception(self, request, exception): try: span = xtrace_helper.get_current_span() span.set_status(xtrace_helper.Status(xtrace_helper.StatusCode.ERROR, exception.__str__())) except Exception as e: print(e) return None
yyxx-game-pkg-compat
/yyxx_game_pkg_compat-2023.8.31.2-py3-none-any.whl/yyxx_game_pkg/xtrace/django/middleware.py
middleware.py
import datetime import json from pymysql.cursors import Cursor, DictCursor from yyxx_game_pkg.conf import settings from yyxx_game_pkg.dbops.mysql_op import MysqlOperation from yyxx_game_pkg.helpers.mysql_helper import get_dbpool from yyxx_game_pkg.helpers.redis_helper import get_redis class OPHelper: # --------------- mysql start --------------- @classmethod def connection(cls, mysql_alias="default", dict_cursor=True): db_settings = {} for k, v in settings.DATABASES[mysql_alias].items(): if k == "PORT" and isinstance(v, str) and v.isdigit(): # PORT 必须为数字 v = int(v) db_settings[k.lower()] = v if k == "NAME": db_settings["db"] = db_settings.pop("name") db_settings["cursor"] = DictCursor if dict_cursor else Cursor return get_dbpool(db_settings).get_connection() @classmethod def mp(cls): return MysqlOperation() @classmethod def sql_func_get_one(cls): return cls.mp().get_one @classmethod def sql_func_get_all(cls): return cls.mp().get_all # --------------- mysql end --------------- # --------------- redis start --------------- @classmethod def redis(cls, redis_alias="default"): return get_redis(settings.REDIS_SERVER[redis_alias]) # --------------- redis end --------------- # --------------- redis cache start --------------- @classmethod def cache( cls, sql="", sql_func=None, redis_key="", ex=None, redis_alias="default", mysql_alias="default", ): """ :param sql: sql语句 :param sql_func: sql方法 execute get_one get_all insert :param redis_key: 缓存key :param ex: 缓存过期时间,None表示不设置过期时间 :param redis_alias: 从redis_config中获取对应redis配置 :param mysql_alias: 从mysql_config中获取对应mysql配置 """ _redis = cls.redis(redis_alias) data = _redis.get_data(redis_key) if not data: data = sql_func(sql, cls.connection(mysql_alias)) if data: _redis.set_data(redis_key, json.dumps(str(data)), ex) if isinstance(data, bytes): data = eval(json.loads(data)) return data @classmethod def cache_sql_one( cls, sql, redis_key, ex=None, redis_alias="default", mysql_alias="default", ): sql_func = cls.mp().get_one return cls.cache(sql, sql_func, redis_key, ex, redis_alias, mysql_alias) @classmethod def cache_sql_all( cls, sql, redis_key, ex=None, redis_alias="default", mysql_alias="default", ): sql_func = cls.mp().get_all return cls.cache(sql, sql_func, redis_key, ex, redis_alias, mysql_alias) # --------------- redis cache end --------------- redis = OPHelper.redis() mp = OPHelper.mp()
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/helpers/op_helper.py
op_helper.py
import redis from yyxx_game_pkg.logger.log import root_log from yyxx_game_pkg.utils.decorator import singleton_unique_obj_args class RedisConfig: """ redis config """ HOST = None PORT = None DB = None PASSWORD = None OVERDUE_SECOND = 86400 def __str__(self): return "host:{}, port:{}, db:{}, OVERDUE_SECOND:{}".format( self.HOST, self.PORT, self.DB, self.OVERDUE_SECOND ) @singleton_unique_obj_args class RedisHelper: def __init__(self, config: RedisConfig): connection_pool = redis.ConnectionPool( host=config.HOST, port=config.PORT, db=config.DB, password=config.PASSWORD ) self.__r = redis.Redis(connection_pool=connection_pool) root_log(f"<RedisHelper> init, info:{config}") @property def redis_cli(self): return self.__r def get_data(self, key): return self.__r.get(key) def set_data(self, key, value, ex=None, _px=None): return self.__r.set(key, value, ex, _px) def list_keys(self, pattern="*"): return self.__r.keys(pattern) def delete(self, key): return self.__r.delete(key) def hset(self, name, key, value): return self.__r.hset(name, key, value) def hget(self, name, key): return self.__r.hget(name, key) def hdel(self, name, *keys): return self.__r.hdel(name, *keys) def hgetall(self, name): return self.__r.hgetall(name) def hlen(self, name): return self.__r.hlen(name) def incr(self, name, amount=1): return self.__r.incr(name, amount) def expire(self, key, ex): """ 设置key的过期时间 :param key: :param ex: :return: """ return self.__r.expire(key, ex) def lpush(self, key, *val): """ 在key对应的list中添加元素,每个新的元素都添加到列表的最左边 :param key: :param val: :return: """ return self.__r.lpush(key, *val) def rpush(self, key, *val): """ 同lpush,但每个新的元素都添加到列表的最右边 :param key: :param val: :return: """ return self.__r.rpush(key, *val) def lrange(self, key, start=0, end=-1): """ 分片获取元素 :param key: :param start: :param end: :return: """ return self.__r.lrange(key, start, end) def get_redis(config: dict) -> RedisHelper: """ 缓存redis :return: """ class Config(RedisConfig): """ redis config """ HOST = config["host"] PORT = config["port"] DB = config["db"] PASSWORD = config["password"] OVERDUE_SECOND = config.get("overdue_second", 86400) return RedisHelper(Config())
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/helpers/redis_helper.py
redis_helper.py
import pymysql from dbutils.pooled_db import PooledDB from pymysql.cursors import Cursor from yyxx_game_pkg.logger.log import root_log from yyxx_game_pkg.utils.decorator import ( except_monitor, log_execute_time_monitor, singleton_unique_obj_args, ) # #################################################### class MysqlConfig: HOST = None PORT = None USER = None PASSWD = None DB = None USE_UNICODE = None CHARSET = None MAX_CACHED = None MAX_CONNECTIONS = None CURSOR = None def __str__(self): # 不能返回无法序列化的数据, 否则单例会失效 return "host:{},port:{},db:{},use_unicode:{},charset:{},max_cache:{},max_connections:{}".format( self.HOST, self.PORT, self.DB, self.USE_UNICODE, self.CHARSET, self.MAX_CACHED, self.MAX_CONNECTIONS ) @singleton_unique_obj_args class MysqlDbPool(object): def __init__(self, config: MysqlConfig): self.DB_POOL = PooledDB( creator=pymysql, maxcached=config.MAX_CACHED, maxconnections=config.MAX_CONNECTIONS, host=config.HOST, port=config.PORT, user=config.USER, passwd=config.PASSWD, db=config.DB, use_unicode=config.USE_UNICODE, charset=config.CHARSET, cursorclass=config.CURSOR, ) root_log(f"<MysqlDbPool> init, info:{config}") @except_monitor @log_execute_time_monitor() def get_connection(self): return self.DB_POOL.connection() def close_connection(self): """ 关闭线程池,线程池最少占用1连接,100个进程跑1000个相同IP库的服时,最多会生成10W连接,所以需要关闭线程池,释放全部连接。 优化点:以后可以相同IP的服务器共用1个线程池(现阶段sql查game库没有指定库名,改动地方多,搁置) :return: """ self.DB_POOL.close() # #################### 模块对外接口 #################### def get_dbpool(config: dict) -> MysqlDbPool: class Config(MysqlConfig): HOST = config["host"] PORT = config["port"] USER = config["user"] PASSWD = config["password"] DB = config["db"] USE_UNICODE = config.get("use_unicode", True) CHARSET = config.get("charset", "utf8") MAX_CACHED = config.get("maxcached", 0) MAX_CONNECTIONS = config.get("maxconnections", 0) CURSOR = config.get("cursor", Cursor) return MysqlDbPool(Config())
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/helpers/mysql_helper.py
mysql_helper.py
from abc import ABC, abstractmethod from dataclasses import dataclass from yyxx_game_pkg.center_api.sdk.map_core import MapCore from yyxx_game_pkg.conf import settings @dataclass class Params: """ @param extra: 拓参 @param cp_order_id: 厂商订单ID, 由厂商生成 @param channel_order_id: 渠道方订单ID @param player_id: 角色ID @param is_check_username: 是否验证帐号与玩家ID @param channel_username: 渠道帐号 @param is_test: 是否测试订单 """ extra: str = "extra" cp_order_id: str = "billno" channel_order_id: str = "order_id" player_id: str = "role_id" channel_username: str = "openid" money: str = "amount" is_check_username: int = 1 is_test: int = 0 class BaseRecharge(MapCore, ABC): """ 注意: 方法 modify_params 用来修改 params 的参数值 需要实现 get_params_handler feedback 方法 get_params_handler 是对 get_params 参数的补充 feedback """ params = Params() def modify_params(self): """ 修改 self.params 属性 """ pass def get_params(self, data) -> dict: self.modify_params() extra = data.get(self.params.extra, "") if not extra: return {} ext_ary = extra.split(",") data_ary = {"extra": extra} self.get_params_core(data, data_ary, ext_ary) self.get_params_helper(data, data_ary) return data_ary def get_params_core(self, data, data_ary, ext_ary): data_ary["cp_order_id"] = data.get(self.params.cp_order_id, "") data_ary["channel_order_id"] = data.get(self.params.channel_order_id, "") data_ary["player_id"] = data.get(self.params.player_id) data_ary["is_check_username"] = self.params.is_check_username data_ary["channel_username"] = data.get(self.params.channel_username, "") if len(ext_ary) > 6: data_ary["recharge_id"] = int(ext_ary[5]) def get_params_helper(self, data, data_ary) -> None: """ 补充数据, 添加额外参数 对 get_params 中 data_ary 数据的补充 无法在 get_params_core 中通过通用方式获得的参数,在此处进行处理 -------------------------------- money 金额 real_money 实付金额 extra_gold 赠送元宝(渠道返利) extra_gold_bind 赠送绑元(渠道返利) pay_dt 充值时间(秒) -------------------------------- """ amount = int(data.get(self.params.money, 0)) data_ary["real_money"] = int(amount / 100) data_ary["money"] = amount / 100 def make_sign_helper(self, values) -> (dict, str): ext_ary = values[self.params.extra].split(",") plat_code = ext_ary[0] game_channel_id = ext_ary[1] sdk_data = self.operator.get_key(plat_code, game_channel_id) pay_key = sdk_data.get("pay_key", "") return values, pay_key def make_sign(self, values) -> str: values, pay_key = self.make_sign_helper(values) return self.channel_make_sign(values, pay_key) @abstractmethod def feedback(self, error_code, data: dict = None, msg="", *args, **kwargs): """ 根据需求 return 相应的数据 """ return error_code
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/center_api/sdk/recharge.py
recharge.py
import time from abc import ABC, abstractmethod from typing import Callable, NewType from urllib.parse import unquote from yyxx_game_pkg.center_api.sdk.map_core import MapCore from yyxx_game_pkg.utils.error_code import ErrorCode from yyxx_game_pkg.utils.xhttp import http_request from yyxx_game_pkg.utils.xstring import parse_json SDK_HELPER = NewType("SDK_HELPER", Callable[[...], None]) RESPONSE_HELPER = NewType("RESPONSE_HELPER", Callable[[...], None]) class BaseCheckToken(MapCore, ABC): """ 注意:需要实现 response_helper 方法 @func response_helper: 处理返回数据 @func sdk_check_token: 验证token方法 @func sdk_helper: sdk 参数处理 @func channel_make_sign: 默认 sorted(params) md5 根据渠道需求填写以下参数 @param is_https: 请求是否为https;默认 True @param method: 请求方式 POST GET;默认 POST @param params: (key)发送和(value)接收 参数的字段名 """ is_https = True # True False method = "POST" # params = {} sdk_exclude = () def run_check_token(self, *args, **kwargs) -> dict: """ run check token """ sdk_helper, response_helper = self.sdk_version_choice(**kwargs) if sdk_helper is None: return self.sdk_rechfeed(ErrorCode.ERROR_INVALID_PARAM) channel_data, post_data = sdk_helper(self.sdk_exclude, **kwargs) response = self.sdk_check_token(channel_data, post_data) return response_helper(response, **kwargs) @abstractmethod def response_helper(self, response: dict | None, **kwargs) -> dict: """ 根据需求 return 相应的数据 :return: {"ret": 1, "user_id": "any_user_id"} """ return self.sdk_rechfeed(ErrorCode.ERROR_INVALID_PARAM, "验证失败") @property def _params(self): """ params = { "appId": "sdk_appId", "accountId": "sdk_accountId", "token": "sdk_token", } """ if self.params is None: raise ValueError("params must be specified as a dict") return self.params def sdk_helper(self, sdk_exclude=(), **kwargs) -> (dict, dict): """ 处理 sdk 数据 :param sdk_exclude: sdk_helper 处理数据,要排除的key 可选值: time(self.Time) sign(self.Flag) """ channel_data = kwargs.get("channel_data", {}) post_data = {} for k, v in self._params.items(): post_data[k] = kwargs.get(v) if self.Time not in sdk_exclude: post_data[self.Time] = int(time.time()) if self.Flag not in sdk_exclude: post_data[self.Flag] = self.channel_make_sign( post_data, channel_data.get("app_key", "") ) return channel_data, post_data def sdk_check_token(self, channel_data, post_data) -> dict | None: """ 处理方法不适用时,重写此方法 默认使用发送请求的方式获取token验证结果 """ url = channel_data.get("api_url", "") if not url: return None result = http_request( url=url, data=post_data, is_https=self.is_https, method=self.method, ) return parse_json(unquote(result)) @property def sdk_version_map(self) -> dict: """ sdk version map 如果存在多个version版本,需要添加对应的版本映射 """ return { "1.0.0": { "sdk_helper": self.sdk_helper, "response_helper": self.response_helper, }, } def sdk_version_choice(self, **kwargs) -> (SDK_HELPER, RESPONSE_HELPER): """ 匹配对应 sdk version 相关方法 sdk_handler response_helper """ sdk_version = kwargs.get("sdk_version", "1.0.0") version_map = self.sdk_version_map.get(sdk_version, None) if version_map is None: return None, None sdk_helper = version_map["sdk_helper"] response_helper = version_map["response_helper"] return sdk_helper, response_helper
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/center_api/sdk/check_token.py
check_token.py
import json import time from abc import abstractmethod from flask import request from yyxx_game_pkg.center_api.model.Operator import Operator from yyxx_game_pkg.center_api.model.OperatorServer import OperatorServer from yyxx_game_pkg.conf import settings from yyxx_game_pkg.crypto.basic import RANDOM_STRING_CHARS_LOWER, get_random_string, md5 from yyxx_game_pkg.crypto.make_sign import make_sign from yyxx_game_pkg.helpers.op_helper import OPHelper class MapCore(OPHelper): Flag = "sign" Time = "time" Gmip = None Imei = None Callback = None OutTime = 0 make_sign_exclude = {"gmip", "cp_platform", "ch_conter", "opts"} API_KEY = settings.API_KEY params = None _plat_code = None _operator = None _game_channel_id = None # 大额充值限制 max_money_limit = 5000 def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def init_ip_imei(self, values): self.Gmip = values.get("gmip", "") self.Imei = values.get("imei", "") def get_params(self, data): return data def get_params_helper(self, data, data_ary) -> None: pass def check_sign(self, values): sign = values.get(self.Flag, None) if sign is None: return False _sign = self.make_sign(values) if sign != _sign: return False return True def make_sign(self, values) -> str: return make_sign( values, self.api_key, exclude=self.make_sign_exclude, time_key=self.Time ) def channel_make_sign(self, values, sign_key) -> str: return make_sign( values, sign_key, exclude=self.make_sign_exclude, time_key=None ) def check_time_out(self, values): _time = int(values.get(self.Time, 0)) t = time.time() if self.OutTime != 0 and int(t) - _time > self.OutTime: return False return True def check_public(self, values) -> bool: return True def sdk_rechfeed(self, error_code, msg="") -> dict: if not msg: msg = str(error_code.get("msg", "")) code = int(error_code.get("code", 0)) return {"ret": code, "msg": msg} def feedback( self, error_code, msg_data: dict | list = None, msg="", *args, **kwargs ): if type(error_code) == dict: if not msg: msg = str(error_code.get("msg", "")) code = int(error_code.get("code", 0)) else: code = error_code result = { f"{get_random_string(5, RANDOM_STRING_CHARS_LOWER)}_myzd_a": str( int(time.time()) ), f"{get_random_string(5, RANDOM_STRING_CHARS_LOWER)}_myzd_b": str( int(time.time()) ), "server_time": int(time.time()), } if msg_data or msg_data == 0: receive_data = request.values receive_path = request.path receive_oid = receive_data.get("oid", "") receive_gcid = receive_data.get("gcid", "") receive_action = "" if not receive_gcid: receive_gcid = receive_data.get("game_channel_id", "") receive_path_list = receive_path.split("/") if receive_oid and receive_gcid: if len(receive_path_list) > 2: receive_action = receive_path_list[2] else: receive_action = receive_path_list[1] oid_data = OperatorServer.get_oid_data(receive_oid, receive_gcid) if oid_data.get("is_close_check", None): result["close_check"] = "yesyes" data_str = json.dumps(msg_data) data_str = "\\/".join(data_str.split("/")) data_sign = md5(f"{data_str}{receive_action}{self.API_KEY}") result["code"] = code result["msg"] = msg result["data"] = msg_data result["data_sign"] = data_sign result = "\\\n".join(json.dumps(result, ensure_ascii=False).split("\n")) else: result = json.dumps({"code": code, "msg": msg}, ensure_ascii=False) if self.Callback: result = "{}({})".format(self.Callback, result) return result def is_open_ip(self, gmip=""): pass @property def operator(self): return Operator @property def api_key(self): print(self.API_KEY) if self.API_KEY is None: raise ValueError("API_KEY must be specified") return self.API_KEY class MapCoreMinix: def get_params(self, data): data_ary = { "cp_platform": data.get("cp_platform", ""), "page_size": 10000, "page": 1, } self.get_params_helper(data, data_ary) return data_ary def make_sign(self, values): sdk_data = self.operator.get_key(self._plat_code, self._game_channel_id) pay_key = sdk_data.get("pay_key", "") return self.channel_make_sign(values, pay_key) @abstractmethod def get_params_helper(self, data, data_ary) -> None: """ 补充数据 for k, v in self.params.items(): if v: data_ary[k] = data.get(v, "") """ @abstractmethod def feedback_helper(self, data_list, error_code, ex=None): """ if data_list: code = 1 message = "success" else: code = 2 message = error_code.get("msg", "") return {"code": code, "message": message, "data": data_list} """
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/center_api/sdk/map_core.py
map_core.py
import json import time from yyxx_game_pkg.helpers.op_helper import OPHelper from yyxx_game_pkg.utils.xstring import parse_json class TableFieldConf(OPHelper): @classmethod def get_field_config_by_table(cls, table_name): result = {} cache_key = f"sys_table_field_config_{table_name}" sql = """ SELECT * FROM sys_table_field_config WHERE table_name='{}' """.format( table_name ) data = cls.cache(sql, cls.sql_func_get_one(), cache_key) if data: for value in data: result[value["field_name"]] = value return result @classmethod def filter_table_config(cls, table_name, field_name, filter_data): """ 过滤 filter_data 的值,如果有表字段配置,必须 在表字段配置中 :param table_name: :param field_name: :param filter_data: :return: """ if not table_name: return filter_data cache_data = cls.get_field_config_by_table(table_name) if not cache_data: return filter_data if isinstance(cache_data, dict): field_data = cache_data.get(field_name, None) if field_data is None: return filter_data field_config = field_data.get("field_config", "{}") res = parse_json(field_config) if not res: return {} result = {} df_time = int(time.time()) df_json = json.dumps({}) for key, val in res.items(): fdv = filter_data.get(key, None) if fdv is None: val_d = val.get("default", "") val_t = val.get("type", "") if val_t == "int": val_d = int(val_d) elif val_t == "json" or val_t == "jsons": val_d = df_json elif val_t == "time": val_d = df_time elif val_t == "times": val_d = [df_time, df_time] elif val_t == "switch": val_d = 0 else: val_d = 0 fdv = val_d result[key] = fdv return result else: return filter_data
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/center_api/model/TableFieldConf.py
TableFieldConf.py
import json from redis import AuthenticationError from yyxx_game_pkg.helpers.op_helper import OPHelper, mp, redis from yyxx_game_pkg.utils.xstring import parse_json class Operator(OPHelper): """ 注意:需要先设置 connection 和 redis_handle """ @classmethod def get_key(cls, operator, game_channel_id): try: cache_key = "api_operator_channel_%s_%s_key" % ( operator, game_channel_id, ) package = {} subpackage = {} sdk_data = redis.get_data(cache_key) if not sdk_data: sdk_data = {} sql = """ SELECT t1.alias as operator, t2.game_channel_id, t2.group_id, t2.iw_id, t2.sdk_config, t3.alias as iw_alias FROM svr_operator t1, svr_channel t2 left join svr_inter_working_group t3 on t2.iw_id = t3.id WHERE ((t1.alias = '%s' AND t2.game_channel_id = '%s') OR (t1.alias = '%s' AND t2.game_channel_id='0')) AND t1.oid = t2.oid ORDER BY t2.id DESC """ % ( operator, game_channel_id, operator, ) data = mp.get_all(sql, cls.connection()) if data: for item in data: if ( item["game_channel_id"] == "0" or item["game_channel_id"] == 0 ): # 母包配置 package = item else: # 分包配置 subpackage = item if subpackage.get("sdk_config", "") or package.get( "sdk_config", "" ): sdk_data["operator"] = ( subpackage["operator"] if subpackage.get("operator", "") else package.get("operator", "") ) sdk_data["game_channel_id"] = ( subpackage["game_channel_id"] if subpackage.get("game_channel_id", "") else package.get("game_channel_id", "") ) sdk_data["group_id"] = ( subpackage["group_id"] if subpackage.get("group_id", "") else package.get("group_id", "") ) sdk_data["iw_id"] = ( subpackage["iw_id"] if subpackage.get("iw_id", "") else package.get("iw_id", "") ) sdk_data["iw_alias"] = ( subpackage["iw_alias"] if subpackage.get("iw_alias", "") else package.get("iw_alias", "") ) try: if subpackage.get("sdk_config", ""): sdk_subpackage = json.loads( subpackage.get("sdk_config", "{}") ) sdk_package = json.loads( package.get("sdk_config", "{}") ) for index, ist in sdk_subpackage.items(): if sdk_subpackage.get(index, ""): sdk_package[index] = sdk_subpackage.get( index, "" ) subpackage["sdk_config"] = json.dumps(sdk_package) except (TypeError, json.decoder.JSONDecodeError): subpackage["sdk_config"] = {} sdk_config = ( subpackage["sdk_config"] if subpackage.get("sdk_config", "") else package.get("sdk_config", "") ) sdk_config = parse_json(sdk_config) if sdk_config else {} sdk_data.update(sdk_config) redis.set_data(cache_key, json.dumps(sdk_data)) else: sdk_data = {} else: sdk_data = {} else: sdk_data = parse_json(sdk_data) return sdk_data except AuthenticationError: print("redis error") return {} except Exception as e: print(e, type(e)) return {}
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/center_api/model/Operator.py
Operator.py
import json from yyxx_game_pkg.helpers.op_helper import OPHelper from yyxx_game_pkg.utils.xstring import parse_json class RechargeConfig(OPHelper): @classmethod def get_mapping_config(cls, oid="", gcid=""): try: sql = """ SELECT t1.id, IFNULL(t4.json, '{}') json FROM svr_channel t1 LEFT JOIN svr_channel_group t2 ON t1.group_id = t2.id LEFT JOIN svr_operator t3 ON t1.oid = t3.oid LEFT JOIN api_recharge_mapping t4 ON t1.id = t4.channel_auto_id WHERE t3.alias ='%s' AND t1.game_channel_id = '%s' ORDER BY t1.id DESC """ % ( oid, gcid, ) result = cls.mp().get_one(sql, cls.connection()) if result and result.get("json", ""): return parse_json(result["json"]) return {} except: return False @classmethod def get_recharge_config(cls): try: sql = "SELECT * FROM api_recharge_config" res = cls.mp().get_all(sql, cls.connection()) result = {} if res: for v in res: vid = v["id"] result[str(vid)] = v return result except: return {} @classmethod def get_check_recharge_config(cls, param_server_id): try: sql = ( f"SELECT * FROM api_check_recharge_config where sid = {param_server_id}" ) res = cls.mp().get_all(sql, cls.connection()) result = {} if res: for v in res: vid = v["recharge_id"] result[str(vid)] = v return result except: return False @classmethod def recharge_config(cls): redis_key = "api_recharge_platform" recharge_config = cls.redis().get_data(redis_key) if not recharge_config: recharge_config = cls.get_recharge_config() if recharge_config: cls.redis().set_data(redis_key, json.dumps(recharge_config)) if not isinstance(recharge_config, dict): recharge_config = json.loads(recharge_config) return recharge_config
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/center_api/model/RechargeConfig.py
RechargeConfig.py
class LogConfig: """ log config class 不同项目配置调整继承该类 """ DEBUG_LOGGER_NAME = "py_debug" LOCAL_LOGGER_NAME = "py_local" LOCAL_LOG_FILE = "/tmp/local.log" DEBUG_LOG_FILE = "/tmp/debug.log" @classmethod def dict_config(cls): """ LOG_CONFIG DICT """ log_config = { "version": 1, "disable_existing_loggers": False, "formatters": { "def_fmt": { "datefmt": "%Y-%m-%d %H:%M:%S", "class": "yyxx_game_pkg.logger.formatters.TraceFormatter", "format": ( "[%(asctime)s,%(msecs)d: %(levelname)s/%(process)d][%(filename)s:%(funcName)s:%(lineno)d]" "[%(trace_id)s] %(message)s" ), }, }, "handlers": { "rotate_file_handler": { "level": "INFO", "formatter": "def_fmt", "class": "yyxx_game_pkg.logger.handlers.MultiProcessTimedRotatingFileHandler", "filename": cls.LOCAL_LOG_FILE, "when": "MIDNIGHT", "backupCount": 7, }, "debug_file_handler": { "level": "DEBUG", "formatter": "def_fmt", "class": "logging.FileHandler", "filename": cls.DEBUG_LOG_FILE, }, "console_handler": { "level": "INFO", "formatter": "def_fmt", "class": "logging.StreamHandler", }, }, "loggers": { "": { # root logger "handlers": ["rotate_file_handler", "console_handler"], "level": "WARNING", "propagate": False, }, cls.LOCAL_LOGGER_NAME: { "handlers": ["rotate_file_handler", "console_handler"], "level": "INFO", "propagate": False, }, cls.DEBUG_LOGGER_NAME: { "handlers": ["debug_file_handler", "console_handler"], "level": "DEBUG", "propagate": False, }, }, } return log_config
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/logger/config.py
config.py
import logging.config import traceback from typing import Literal, Type, TypeVar from pathlib import Path from .config import LogConfig # log日志级别 LogLevelTyping = Literal["critical", "error", "warning", "info", "debug"] # LogConfig类及其子类 LogConfigTyping = TypeVar("LogConfigTyping", bound=LogConfig) def root_log(msg, level: LogLevelTyping = "warning", stacklevel: int = 2, addstacklevel=0): """ root logger :param msg: 消息文本 :param level: 消息级别 :param stacklevel: 堆栈信息向上查找层数(默认2层,即为调用此函数的堆栈) :param addstacklevel: 以调用此函数的堆栈(stacklevel的值)作为基础,继续向上查找的层数,即stacklevel+addstacklevel层 使用此参数无需关心下层函数的层级,只需要关心调用函数上层的层级即可 """ getattr(logging.getLogger(), level.lower())(msg, stacklevel=stacklevel+addstacklevel) class Log: """ singleton Log """ _instance = None _init = False config = None def __new__(cls, *args, **kwargs): if cls._instance is None: cls._instance = super().__new__(cls) return cls._instance def __init__(self, log_config: Type[LogConfigTyping] = LogConfig): if self._init: return self._init = True # 日志配置初始化 self.init_config(log_config) @classmethod def init_config(cls, log_config: Type[LogConfigTyping] = LogConfig): """应用新配置""" self = cls() if log_config == self.config: return try: self.config = log_config self.make_path() logging.config.dictConfig(log_config.dict_config()) root_log("logger init") except ValueError as _e: traceback.print_exc() def make_path(self): """ 检查日志输出文件路径, 不存在则创建 """ handlers_config = self.config.dict_config().get("handlers", {}) if not handlers_config: return file_paths = [] for _, configs in handlers_config.items(): for cfg_key, val in configs.items(): if cfg_key != "filename": continue file_paths.append(val) try: for path in file_paths: path_obj = Path(path) path_obj.parent.mkdir(parents=True, exist_ok=True) path_obj.touch(exist_ok=True) except OSError as _e: traceback.print_exc() def root_logger(self) -> logging.Logger: """ local_logger :return: """ return logging.getLogger() def local_logger(self) -> logging.Logger: """ local_logger :return: """ return logging.getLogger(self.config.LOCAL_LOGGER_NAME) def debug_logger(self) -> logging.Logger: """ debug_logger :return: """ return logging.getLogger(self.config.DEBUG_LOGGER_NAME) def local_log(self, msg: str, level: LogLevelTyping = "info", stacklevel: int = 2, addstacklevel=0, **kwargs): """ 正常滚动日志 输出路径见 config.LOG_FILE :param msg: 消息文本 :param level: 消息级别 :param stacklevel: 堆栈信息向上查找层数(默认2层,即为调用此函数的堆栈) :param addstacklevel: 以调用此函数的堆栈(stacklevel的值)作为基础,继续向上查找的层数,即stacklevel+addstacklevel层 使用此参数无需关心下层函数的层级,只需要关心调用函数上层的层级即可 :param kwargs: 额外参数 :return: """ if kwargs: self.root_logger().warning(f"[yyxx-Log] Unexpected parameters => {kwargs}") getattr(self.local_logger(), level.lower())(msg, stacklevel=stacklevel+addstacklevel) def debug_log(self, msg: str, level: LogLevelTyping = "info", stacklevel: int = 2, addstacklevel=0, **kwargs): """ 测试日志 不滚动 输出路径见 config.LOG_FILE :param msg: 消息文本 :param level: 消息级别 :param stacklevel: 堆栈信息向上查找层数(默认2层,即为调用此函数的堆栈) :param addstacklevel: 以调用此函数的堆栈(stacklevel的值)作为基础,继续向上查找的层数,即stacklevel+addstacklevel层 使用此参数无需关心下层函数的层级,只需要关心调用函数上层的层级即可 :param kwargs: 额外参数 :return: """ if kwargs: self.root_logger().warning(f"[yyxx-Log] Unexpected parameters => {kwargs}") getattr(self.debug_logger(), level.lower())(msg, stacklevel=stacklevel + addstacklevel) logger = Log() local_logger = logger.local_logger() local_log = logger.local_log debug_logger = logger.debug_logger() debug_log = logger.debug_log
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/logger/log.py
log.py
import os import time import fcntl import traceback import logging.handlers class MultiProcessTimedRotatingFileHandler(logging.handlers.TimedRotatingFileHandler): """ 自定义多进程下TimedRotatingFileHandler """ def rollover_at(self): """ 计算下次滚动时间 """ current_time = int(time.time()) dst_now = time.localtime(current_time)[-1] new_rollover_at = self.computeRollover(current_time) while new_rollover_at <= current_time: new_rollover_at = new_rollover_at + self.interval # If DST changes and midnight or weekly rollover, adjust for this. if (self.when == "MIDNIGHT" or self.when.startswith("W")) and not self.utc: dst_at_rollover = time.localtime(new_rollover_at)[-1] if dst_now != dst_at_rollover: if ( not dst_now ): # DST kicks in before next rollover, so we need to deduct an hour addend = -3600 else: # DST bows out before next rollover, so we need to add an hour addend = 3600 dst_at_rollover += addend self.rolloverAt = new_rollover_at def doRollover(self): """ do a rollover; in this case, a date/time stamp is appended to the filename when the rollover happens. However, you want the file to be named for the start of the interval, not the current time. If there is a backup count, then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix. """ if self.stream: self.stream.close() self.stream = None # get the time that this sequence started at and make it a TimeTuple current_time = int(time.time()) dst_now = time.localtime(current_time)[-1] diff_t = self.rolloverAt - self.interval if self.utc: time_tuple = time.gmtime(diff_t) else: time_tuple = time.localtime(diff_t) dst_then = time_tuple[-1] if dst_now != dst_then: if dst_now: addend = 3600 else: addend = -3600 time_tuple = time.localtime(diff_t + addend) dfn = self.baseFilename + "." + time.strftime(self.suffix, time_tuple) if os.path.exists(dfn): self.rollover_at() return # Issue 18940: A file may not have been created if delay is True. if not os.path.exists(dfn) and os.path.exists(self.baseFilename): # lock rename file try: with open(self.baseFilename, "a", encoding="utf-8") as file: # LOCK_EX 独占 # LOCK_NB 非阻塞式 fcntl.flock(file.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) # 获取文件锁 os.rename(self.baseFilename, dfn) # 更改文件名 fcntl.flock(file.fileno(), fcntl.LOCK_UN) # 释放文件锁 except IOError: traceback.print_exc() return if self.backupCount > 0: for _d in self.getFilesToDelete(): os.remove(_d) if not self.delay: self.stream = self._open() self.rollover_at()
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/logger/handlers.py
handlers.py
import pandas as pd from yyxx_game_pkg.dbops.base import DatabaseOperation from yyxx_game_pkg.utils import xListStr class MysqlOperation(DatabaseOperation): """ Mysql数据库操作 """ def execute(self, sql, conn, params=None): """ 执行sql返回处理结果 :param sql: :param conn: :param params: :return: """ sql = self.check_sql(sql) with conn: with conn.cursor() as cursor: if params is None: cursor.execute(sql) else: cursor.execute(sql, params) conn.commit() def get_one(self, sql, conn, params=None): """ 查询一条数据, 返回元组结构 :param sql: :param conn: :param params: :return: """ sql = self.check_sql(sql) with conn: with conn.cursor() as cursor: if params is None: cursor.execute(sql) else: cursor.execute(sql, params) return cursor.fetchone() def get_all(self, sql, conn, params=None): """ 查询多条数据,返回list(元组) 结构 :param sql: :param conn: :param params: :return: """ sql = self.check_sql(sql) with conn: with conn.cursor() as cursor: if params is None: cursor.execute(sql) else: cursor.execute(sql, params) return cursor.fetchall() def get_one_df(self, *args, **kwargs): """ 获取单次数据 :param args: :param kwargs: :return: """ def get_all_df(self, sql, connection): """ 获取所有数据 dataframe :param sql: :param connection: :return: """ return pd.read_sql(sql, connection) def insert(self, conn, save_table, results): """ :param conn: :param save_table: :param results: :return: """ def get_field_str(_data): """ 根据数据长度生成{data_value} :param _data: :return: """ _size = len(_data[0]) _list = [] for _ in range(_size): _list.append("%s") _str = ",".join(_list) return _str def get_table_desc(_table_name, _data_list, _cs): """ :param _table_name: :param _data_list: :return: """ sql = f"describe {_table_name}" _cs.execute(sql) _desc = _cs.fetchall() _column = [] for _data in _desc: if _data[0] in ("id", "create_time"): # 自增id和默认插入时间过滤 continue _column.append(_data[0]) _size = len(_data_list[0]) table_column = _column[:_size] return ",".join(table_column) insert_sql_template = ( "INSERT INTO {save_table} ({column_value}) VALUES({data_value})" ) results = xListStr.split_list(results) with conn: with conn.cursor() as cursor: for result in results: if not result: continue field_str = get_field_str(result) column_value = get_table_desc(save_table, result, cursor) insert_sql = insert_sql_template.format( save_table=save_table, column_value=column_value, data_value=field_str ) cursor.executemany(insert_sql, result) conn.commit()
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/dbops/mysql_op.py
mysql_op.py
import re import requests import numpy as np import pandas as pd import ujson as json def trans_unsupported_types(val): """ 转化json.dumps不支持的数据类型 : int64, bytes, ... :param val: :return: """ if isinstance(val, dict): new_dict = {} for k, _v in val.items(): k = trans_unsupported_types(k) _v = trans_unsupported_types(_v) new_dict[k] = _v return new_dict if isinstance(val, list): for idx, _v in enumerate(val): _v = trans_unsupported_types(_v) val[idx] = _v elif isinstance(val, np.int64): val = int(val) elif isinstance(val, bytes): val = val.decode(encoding="utf8") return val class DasApiException(Exception): pass class DasApiChQueryException(DasApiException): pass class DasApiChExecuteException(DasApiException): pass class DasApiMongoQueryException(DasApiException): pass class DasApiEsQueryException(DasApiException): pass class DasApiEsInsertException(DasApiException): pass class DasApi: """ DasApi py """ @staticmethod def _post(das_url, post_type, post_data): url = f"{das_url}/{post_type}" post_data = trans_unsupported_types(post_data) res = requests.post(json=post_data, url=url, timeout=600) return res.ok, res.content @staticmethod def mongo_query(das_url, post_data): """ sql语句 查询 mongo 库 :param das_url: das_http_url :param post_data: { 'sql': sql, # sql语句 支持sql 和 js_sql 'server': mongo_url # mongo链接 } :return: """ b_ok, res = DasApi._post(das_url, "das/mgo/query", post_data=post_data) if not b_ok: raise DasApiMongoQueryException(res) res = re.sub( r'{\\"\$numberLong\\": \\"\d+\\"}', lambda m: re.search(r"\d+", m.group()).group(), res.decode("utf-8"), ) data = json.loads(res) data_list = data["data"] res_list = [] if data_list: for data in data_list: res_list.append(json.loads(data)) res_df = pd.DataFrame(res_list) return res_df @staticmethod def es_query(das_url, post_data): """ sql语句 查询 elasticsearch 库 :param das_url: das_http_url :param post_data: { "sql": sql, # sql语句 "engine": 1, # es引擎版本 1:官方 2: open distro "search_from": search_from, # 分页查询offset 最大5w "fetch_size": fetch_size # 单次查询总行数 } :return: """ b_ok, res = DasApi._post(das_url, "das/es/query", post_data=post_data) if not b_ok: raise DasApiEsQueryException(res) engine = post_data.get("engine", 0) use_search = post_data.get("search_from", -1) >= 0 data = json.loads(res) if engine == 0: # opendistro col_dict_lst = data["schema"] data_rows = data["datarows"] # total = data["total"] # size = data["size"] # status = data["status"] else: # origin if use_search: data_rows = data["map_rows"] return pd.DataFrame(data_rows) col_dict_lst = data["columns"] data_rows = data["rows"] df_cols = [col_dict["name"] for col_dict in col_dict_lst] if not data_rows: return pd.DataFrame(columns=df_cols) res_df = pd.DataFrame(np.array(data_rows), columns=df_cols) return res_df @staticmethod def es_insert(das_url, post_data): """ elasticsearch 数据插入 :param das_url: das_http_url :param post_data = { "kafka_addr": kafka_addr, # kafka地址 "topic": topic, # kafka Topic "data_rows": data_rows # 数据行 } :return: """ b_ok, res = DasApi._post(das_url, "das/es/insert", post_data=post_data) if not b_ok: raise DasApiEsInsertException(res) return res @staticmethod def ch_query(das_url, post_data): """ sql语句 查询 clickhouse 库 :param das_url: das_http_url :param post_data: { "sql": sql, # sql语句 } :return: """ b_ok, res = DasApi._post(das_url, "/das/ch/query", post_data=post_data) if not b_ok: raise DasApiChQueryException(res) data = json.loads(res) res_df = pd.DataFrame(data["datarows"], columns=data["columns"]) return res_df @staticmethod def ch_execute(das_url, post_data): """ clickhouse 执行 sql (数据插入) :param das_url: das_http_url :param post_data: { "sql": sql, # sql语句 } :return: """ b_ok, res = DasApi._post(das_url, "/das/ch/exec", post_data=post_data) if not b_ok: raise DasApiChExecuteException(res) return b_ok # if __name__ == '__main__': # post_type = "das/mgo/query" # post_data_ = dict() # post_data_['js_sql'] = 'db.getSiblingDB("fumo_test").getCollection("player").find({})' # post_data_['server'] = 'test' # # # DasApi.post(post_type=post_type, post_data=post_data) # res_ = DasApi.mongo_query(post_data_) # # post_data_ = dict() # post_data_['sql'] = 'SELECT * FROM log_money LIMIT 1' # post_data_['engine'] = 1 # res_ = DasApi.es_query(post_data_) # post_data = dict() # post_data['sql'] = 'select * from main_test.log_player_op limit 10;' # res_ = DasApi.ch_query(post_data) # # print (res_)
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/dbops/das_api.py
das_api.py
from abc import abstractmethod import pandas as pd from pymongo import MongoClient from yyxx_game_pkg.dbops.base import DatabaseOperation from yyxx_game_pkg.dbops.mongo_op.sql2mongo import sql_to_mongo_spec from yyxx_game_pkg.utils.decorator import ( except_monitor, log_execute_time_monitor, singleton_unique, ) @singleton_unique class SingletonMongoClient(MongoClient): """ SingletonMongo 根据db链接确定单例 """ def __init__(self, mongo_uri): super().__init__(mongo_uri) def query_sql(self, sql, collection=None): """ sql 查询接口 仅支持select语法 暂不支持join 别名仅支持关键字使用[仅能识别 name as player_name 不能识别 name player_name]: as 支持判断关键字: = > < != in like 支持聚合关键字: [group by [cols]] sum, count, avg, min, max 支持排序关键字: order by desc[asc] 支持翻页关键字: limit 0 [,30] :param sql: :param collection: :return: """ assert collection is not None mongo_spec = sql_to_mongo_spec(sql) pipeline = [] for k, val in mongo_spec.items(): if k == "documents": continue if not val: continue pipeline.append({k: val}) docs = mongo_spec.get("documents") cursor = self[collection][docs].aggregate(pipeline) return pd.DataFrame(list(cursor)) class PyMongoClient: """ PyMongoClient """ def __init__(self, mongo_uri, db_name): self.db_name = db_name self.mgo_client = SingletonMongoClient(mongo_uri) def __getattr__(self, item): return self.mgo_client.__getattr__(item) def __getitem__(self, item): return self.mgo_client.__getitem__(item) @property def game_db(self): """ :return: """ return self.mgo_client[self.db_name] def query(self, sql): """ :param sql: :return: """ return self.mgo_client.query_sql(sql, self.db_name) class MongoOperation(DatabaseOperation): """ MongoOperation """ @abstractmethod def get_mongo_info(self, *args, **kwargs) -> {str, str}: """ :param args: :param kwargs: :return: """ @staticmethod def new_client(mongo_url, game_db) -> PyMongoClient: """ :param mongo_url: :param game_db: :return: """ mgo_client = PyMongoClient(mongo_url, game_db) return mgo_client @except_monitor @log_execute_time_monitor() def get_one_df(self, sql, *args, **kwargs): """ :param sql: :param args: :param kwargs: :return: """ mongo_url, game_db = self.get_mongo_info(*args, **kwargs) res_df = self.new_client(mongo_url, game_db).query(sql) return res_df.iloc[0] if not res_df.empty else res_df @except_monitor @log_execute_time_monitor() def get_all_df(self, sql, *args, **kwargs): """ :param sql: :param args: :param kwargs: :return: """ mongo_url, game_db = self.get_mongo_info(*args, **kwargs) res_df = self.new_client(mongo_url, game_db).query(sql) return res_df
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/dbops/mongo_op/mongo_op.py
mongo_op.py
from pyparsing import ( Word, alphas, CaselessKeyword, Group, Optional, ZeroOrMore, Forward, Suppress, alphanums, OneOrMore, quotedString, Combine, Keyword, Literal, replaceWith, oneOf, nums, removeQuotes, QuotedString, Dict, ) # keyword declare LPAREN, RPAREN = map(Suppress, "()") EXPLAIN = CaselessKeyword("EXPLAIN").setParseAction(lambda t: {"explain": True}) SELECT = Suppress(CaselessKeyword("SELECT")) DISTINCT = CaselessKeyword("distinct") COUNT = CaselessKeyword("count") WHERE = Suppress(CaselessKeyword("WHERE")) FROM = Suppress(CaselessKeyword("FROM")) CONDITIONS = oneOf("= != < > <= >= like", caseless=True) AND = CaselessKeyword("and") OR = CaselessKeyword("or") ORDER_BY = Suppress(CaselessKeyword("ORDER BY")) GROUP_BY = Suppress(CaselessKeyword("GROUP BY")) DESC = CaselessKeyword("desc") ASC = CaselessKeyword("asc") LIMIT = Suppress(CaselessKeyword("LIMIT")) SKIP = Suppress(CaselessKeyword("SKIP")) # aggregate func AGG_SUM = CaselessKeyword("sum") AGG_AVG = CaselessKeyword("avg") AGG_MAX = CaselessKeyword("max") AGG_MIN = CaselessKeyword("min") AGG_WORDS = AGG_SUM | AGG_AVG | AGG_MIN | AGG_MAX def sql_to_spec(query_sql): """ Convert a SQL query to a spec dict for parsing. Support Sql Statement [select, from ,where, limit, count(*), order by, group by] param query_sql: string. standard sql return: None or a dictionary """ # morphology word_match = Word(alphanums + "._") | quotedString optional_as = Optional(Suppress(CaselessKeyword("as")) + word_match) word_as_match = Group(word_match + optional_as) number = Word(nums) # select select_word = word_as_match | Group(Keyword("*")) count_ = Group(COUNT + LPAREN + Keyword("*") + RPAREN) count_word = Group(count_ + optional_as) select_agg = Group(AGG_WORDS + Suppress(LPAREN) + word_match + Suppress(RPAREN)) select_agg_word = Group(select_agg + optional_as) select_complex = count_word | select_agg_word | select_word select_clause = ( SELECT + select_complex + ZeroOrMore(Suppress(",") + select_complex) ).setParseAction(lambda matches: {"select": matches.asList()}) # from from_clause = (FROM + word_match).setParseAction( lambda matches: {"from": matches[0]} ) # where in_condition = ( word_match + CaselessKeyword("in") + LPAREN + (word_match + ZeroOrMore(Suppress(",") + word_match)) + RPAREN ) def condition_prefix(matches=None): vals = matches[2:] fix_vals = [] for val in vals: if val.find("'") == -1 and val.isdigit(): val = int(val) else: val = val.strip("'") fix_vals.append(val) return [matches[0:2] + fix_vals] condition = (in_condition | (word_match + CONDITIONS + word_match)).setParseAction( condition_prefix ) def condition_combine(matches=None): if not matches: return {} if len(matches) == 1: return matches res = {f"{matches[1]}": [matches[0], matches[2]]} left_ = matches[3:] for i in range(0, len(left_), 2): key_word, cond = left_[i], left_[i + 1] res = {f"{key_word}": [res, cond]} return res term = ( OneOrMore(condition) + ZeroOrMore((AND + condition) | (OR + condition)) ).setParseAction(condition_combine) where_clause = (WHERE + term).setParseAction( lambda matches: {"where": matches.asList()} ) # group by group_by_clause = ( GROUP_BY + word_match + ZeroOrMore(Suppress(",") + word_match) ).setParseAction(lambda matches: {"group": matches.asList()}) # order by order_by_word = Group(word_match + Optional(DESC | ASC)) order_by_clause = ( ORDER_BY + order_by_word + ZeroOrMore(Suppress(",") + order_by_word) ).setParseAction(lambda matches: {"order": matches.asList()}) # limit def limit_prefix(matches=None): matches = list(map(int, matches)) return {"limit": matches} limit_clause = (LIMIT + number + Optional(Suppress(",") + number)).setParseAction( limit_prefix ) list_term = ( Optional(EXPLAIN) + select_clause + from_clause + Optional(where_clause) + Optional(group_by_clause) + Optional(order_by_clause) + Optional(limit_clause) ) expr = Forward() expr << list_term ret = expr.parseString(query_sql.strip()) spec_dict = {} for d in ret: spec_dict.update(d) return spec_dict COND_KEYWORDS = { "=": "$eq", "!=": "$ne", ">": "$gt", ">=": "$gte", "<": "$lt", "<=": "$lte", "like": "$regex", "or": "$or", "and": "$and", "in": "$in", } def create_mongo_spec(spec_dict): """ param sql: string. standard sql return: dict mongo aggregate pipeline params """ # parsing from from_spec = spec_dict.get("from") if not from_spec: raise ValueError(f"Error 'from' spec {spec_dict}") spec_parse_results = {} # parsing select op_func_map = { "count": "$sum", "sum": "$sum", "avg": "$avg", "max": "$max", "min": "$min", } select_spec = spec_dict.get("select") select_results = { "$project": {}, "$addFields": {}, "$group": {}, "documents": from_spec, } drop_id = True for lst_field in select_spec: if len(lst_field) == 2: real_field, as_field = lst_field else: real_field, as_field = lst_field[0], None if isinstance(real_field, str): if not isinstance(real_field, str): continue if real_field == "*": drop_id = False break if real_field == "_id": drop_id = False if as_field: select_results["$project"].update({f"{as_field}": f"${real_field}"}) else: select_results["$project"].update({real_field: 1}) elif isinstance(real_field, list): # [count, sum ,avg, ...] select_results["$group"].update({"_id": None}) agg_func, agg_key = real_field real_field = f"{agg_func}({agg_key})" op_func = op_func_map[agg_func] op_val = 1 if agg_key == "*" else f"${agg_key}" if as_field: select_results["$group"].update({as_field: {op_func: op_val}}) else: select_results["$group"].update({real_field: {op_func: op_val}}) if drop_id: select_results["$project"].update({"_id": 0}) # where parsing where_spec = spec_dict.get("where") where_results = {} if where_spec: where_spec = where_spec[0] where_results.update({"$match": combine_where(where_spec)}) if select_results["$project"]: # if project is empty means "select *" don't need to update other keys where_projects = update_projects(where_spec, {}) select_results["$project"].update(where_projects) # limit parsing limit_spec = spec_dict.get("limit") limit_results = {} if limit_spec: if len(limit_spec) == 1: limit_results["$limit"] = limit_spec[0] else: limit_results["$skip"] = limit_spec[0] limit_results["$limit"] = limit_spec[1] # group by parsing group_spec = spec_dict.get("group") group_id = {} if group_spec: for group_key in group_spec: group_id[group_key] = f"${group_key}" select_results["$group"].update({"_id": group_id}) # order by parsing order_spec = spec_dict.get("order") order_results = {} if order_spec: order_results["$sort"] = {} for order_lst in order_spec: if len(order_lst) == 1: order_results["$sort"].update({order_lst[0]: 1}) else: asc = 1 if order_lst[1] == "asc" else -1 order_results["$sort"].update({order_lst[0]: asc}) spec_parse_results.update(select_results) spec_parse_results.update(where_results) spec_parse_results.update(limit_results) spec_parse_results.update(order_results) return spec_parse_results def combine_where(where_spec): if isinstance(where_spec, list): if isinstance(where_spec[0], str): key, op_word = where_spec[:2] vals = where_spec[2:] op_word = COND_KEYWORDS[op_word] if op_word == "$in": val = vals else: val = vals[0] if op_word == "$regex": val = val.strip("'") if val[0] == "%": val = val[1:] else: val = f"^{val}" if val[-1] == "%": val = val[:-1] else: val = f"{val}$" return {key: {op_word: val}} res = [] for spec in where_spec: res.append(combine_where(spec)) return res for op_word, vals in where_spec.items(): val_res = combine_where(vals) return {COND_KEYWORDS[op_word]: val_res} def update_projects(where_spec, projects): """ auto update where key to projects """ if isinstance(where_spec, list): if isinstance(where_spec[0], str): key, _ = where_spec[:2] projects.update({key: 1}) return projects res = [] for spec in where_spec: res.append(update_projects(spec, projects)) return res for _, vals in where_spec.items(): update_projects(vals, projects) return projects if __name__ == "__main__": # sql = """ # select gid, name, leader_name, level, nMember, power, create_tm # from test_2999999.guild # where create_tm > 1664431200.0 # AND create_tm <= 1666799999.0 # AND name like '%吃啥%' # OR leader_name like '999' # gid in (1001, '1002', '12223') # order by level, power limit 0,30 # """ # sql = """ # SELECT * FROM player WHERE _id = 2079 and name = 'c是的' and pid='2079' # """ # # sql = """ # select count(*) as a # from player # group by online, _id # """ # sql = """ # select * # from test_999999.player # where _id = 1146 and max_power >= 3000 or pid > 1010 # limit 10, 10 # """ # sql = """ # select gid, name, leader, level, nMember, power, create_tm # from guild # where create_tm > 1684396800 # and create_tm <= 1688486399 # # order by level desc, power # limit 0,30 # """ # todo unit test sql = """ select * from player where pid = 4868020 and sid = 2 """ sql_spec = sql_to_spec(sql) print(sql_spec) mongo_spec = create_mongo_spec(sql_spec) print(mongo_spec)
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/dbops/mongo_op/sql2mongo/sql2mongo.py
sql2mongo.py
import argparse from celery import Celery from yyxx_game_pkg.stat.log import root_log class CeleryInstance: """ celery 接口 """ # region external @staticmethod def get_celery_instance(): """ 加载celery相关配置 获取celery实例 :return: """ celery_name = CeleryInstance._args().name _app = Celery(celery_name) # 初始化celery _app.config_from_envvar("CELERY_CONFIG_MODULE") # 加载配置 conf_jaeger = _app.conf.get("JAEGER") if conf_jaeger: from opentelemetry.instrumentation.celery import CeleryInstrumentor from opentelemetry.instrumentation.requests import RequestsInstrumentor from yyxx_game_pkg.xtrace.helper import register_to_jaeger if celery_name: conf_jaeger["service_name"] += f"-{celery_name}" register_to_jaeger(**conf_jaeger) CeleryInstrumentor().instrument() RequestsInstrumentor().instrument() root_log(f"<CeleryInstance> tracer on, jaeger:{conf_jaeger}") log_str = ( f"<CeleryInstance> get_celery_instance, app_name:{celery_name}, config:{_app.conf}, publish_flag:" f"{_app.conf.get('PUBLISH_FLAG')}" ) root_log(log_str) return _app @staticmethod def get_current_task_id(): """ 当前task id [如果有] :return: """ from celery import current_task try: return current_task.request.id except: return -1 # endregion # region inner @staticmethod def _args(): """ argparse -n 服务名 -c 配置文件 :return: """ parser = argparse.ArgumentParser(allow_abbrev=False) parser.add_argument("-n", "--name") args = parser.parse_known_args() return args[0] # endregion # region celery实例化 """ app.conf.get('worker_max_tasks_per_child', 0) """ # app = CeleryInstance.get_celery_instance() # endregion
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/stat/xcelery/instance.py
instance.py
import traceback from yyxx_game_pkg.stat.dispatch.common.common import fastapi_except_monitor from yyxx_game_pkg.stat.log import local_log from yyxx_game_pkg.stat.dispatch.core.manager import RuleManager from yyxx_game_pkg.stat.dispatch.core.structs import ProtoSchedule from yyxx_game_pkg.stat.dispatch.core.workflows import WorkFlowMethods # region logic入口 from yyxx_game_pkg.xtrace.helper import get_current_trace_id @fastapi_except_monitor def task_logic(msg): # 解析命令,构建任务标签列表 task_sig_list = parse_task(msg) if not task_sig_list: err_msg = f"<task_logic> main_dispatch_logic, parse task failed: {traceback.format_exc()}" local_log(err_msg) return [] # 分发任务 return dispatch_tasks(task_sig_list) # endregion # region 任务解析 def parse_task(schedule): """ 解析命令 :param schedule: :return: """ task_sig_list = [] # 反序列化 schedule = ProtoSchedule().to_schedule(schedule) instance_name = schedule.SCHEDULE_DISPATCH_RULE_INSTANCE_NAME # 校验队列名 if schedule.SCHEDULE_QUEUE_NAME is None: local_log( f"<parse_command_data> SCHEDULE_QUEUE_NAME is None, schedule:{schedule}" ) return task_sig_list # 获取对应计划解析规则 rule = RuleManager().rules.get(instance_name) if not rule: local_log(f"<parse_command_data> rule is None, instance_name:{instance_name}") return task_sig_list # 构建signature列表 schedule_sig = rule.build(schedule) if not schedule_sig: return task_sig_list # link if isinstance(schedule_sig, list): task_sig_list.extend(schedule_sig) else: task_sig_list.append(schedule_sig) return task_sig_list # endregion # region 任务分发 def _dispatch_one_task(task_sig, queue_priority, queue_name=None): common_options = { "priority": queue_priority, # 'serializer': 'pickle' "headers": {"X-Trace-ID": get_current_trace_id()}, } if queue_name is not None: # 强制指定队列名 res = task_sig.apply_async(queue=queue_name, **common_options) else: # 动态队列名 res = task_sig.apply_async(**common_options) # 根据res获取task id task_id_list = [] WorkFlowMethods.fill_res_task_id_list(res, task_id_list) return res.id, task_id_list def dispatch_tasks(task_sig_list): task_id_list = [] # task id列表 task_type_list = [] # task类型列表(日志显示用) task_queue_flag_list = [] # task队列名列表(日志显示用) task_cnt = 0 # task数(日志显示用) max_sig_cnt = 0 # 单次提交任务数峰值(日志显示用) for task_sig in task_sig_list: task_type_list.append(type(task_sig)) queue_flag = WorkFlowMethods.get_task_sig_queue_name(task_sig) task_queue_flag_list.append(queue_flag) # 解析queue_flag,获取队列名和优先级 queue_name, queue_priority = _parse_queue_flag(queue_flag) # 获取任务数 WorkFlowMethods.reset_max_sig_cnt() task_cnt += WorkFlowMethods.calculate_sig_cnt(task_sig) max_sig_cnt = max(WorkFlowMethods.get_max_sig_cnt(), max_sig_cnt) # 提交任务 m_task_id, s_task_id_list = _dispatch_one_task(task_sig, queue_priority) task_id_list.append(m_task_id) local_log( f"<dispatch_tasks> record_task_id, queue:{queue_name}, " f"priority:{queue_priority}, m_task_id:{m_task_id}, " f"s_task_len:{len(s_task_id_list)}, s_task_id_list:{s_task_id_list}" ) local_log( f"<dispatch_tasks> dispatch_tasks, queue_name:{task_queue_flag_list} " f"task_cnt:{task_cnt}, max_sig_cnt:{max_sig_cnt}" ) return task_id_list def _parse_queue_flag(queue_flag): """ 解析队列名标识 :param queue_flag: :return: """ default_priority = 3 # 默认队列优先级 if queue_flag is None: # assert False return [None], default_priority res_list = queue_flag.split("@") queue_name = res_list[0] priority = min(int(res_list[1]), 10) if len(res_list) > 1 else default_priority return queue_name, priority # endregion
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/stat/dispatch/logic/task_logic.py
task_logic.py
from yyxx_game_pkg.stat.log import local_log from yyxx_game_pkg.stat.dispatch.core.manager import rule_register from yyxx_game_pkg.stat.dispatch.core.workflows import WorkFlowMethods from yyxx_game_pkg.stat.dispatch.logic.task_logic import parse_task from yyxx_game_pkg.stat.dispatch.rules.rule_base import RuleBase @rule_register(inst_name_list=["work_flow_instance"]) class DispatchRuleWorkFlow(RuleBase): def __init__(self): super(self.__class__, self).__init__() # region 继承方法 def build(self, schedule): """ 构建分发任务标签 :return: [group, chord, chain, signature] """ return self.__logic_make_sig(schedule) # endregion # region 内部方法 def __logic_make_sig(self, schedule): flow_content_dict = schedule.SCHEDULE_CONTENT assert isinstance(flow_content_dict, dict) sig_list = [] for _, flow_content in flow_content_dict.items(): sig = self.__make_sig_by_content(schedule, flow_content) if not sig: continue sig_list.append(sig) return sig_list def __parse_flow_content(self, flow_content): assert isinstance(flow_content, dict) dict_step_sig_list = dict() min_step = 65535 max_step = -1 for step, content_list in flow_content.items(): step = int(step) min_step = min(step, min_step) max_step = max(step, max_step) for schedule_str in content_list: if schedule_str == self.inst_name: # 工作流的子计划中不能再包含工作流 local_log( "[ERROR] <DispatchRuleWorkFlow> __parse_flow_content, " "workflow can not contain workflow, schedule:{}".format( schedule_str ) ) return None, -1, -1 sub_sig_list = parse_task(schedule_str) if not sub_sig_list: # 不能跳过sig local_log( "[ERROR] <DispatchRuleWorkFlow> __parse_flow_content, " "parse_schedule_str_to_signature, schedule:{}".format( schedule_str ) ) return None, -1, -1 if not dict_step_sig_list.get(step): dict_step_sig_list[step] = [] if isinstance(sub_sig_list, list): dict_step_sig_list[step].extend(sub_sig_list) else: dict_step_sig_list[step].append(sub_sig_list) return dict_step_sig_list, min_step, max_step def __make_sig_by_content(self, schedule, flow_content): dict_step_sig_list, min_step, max_step = self.__parse_flow_content(flow_content) if dict_step_sig_list is None: local_log( "[ERROR] <DispatchRuleWorkFlow>dict_step_sig_list is None, content:{}".format( flow_content ) ) return None queue_name = dict_step_sig_list[min_step][0].options.get("queue") # step合并 step_sig_list = [] for step in range(min_step, max_step + 1): # 按照step先后顺序构建sig列表 sig_list = dict_step_sig_list.get(step) if not sig_list: continue res_sig = WorkFlowMethods.merge_sig_list(sig_list) # 多个相同同step的sig合并 step_sig_list.append(res_sig) # 构建chord ch = WorkFlowMethods.link_signatures(step_sig_list) if ch is None: local_log( "[ERROR] <DispatchRuleWorkFlow>__make_sig_by_content, make chord error, content:{}".format( flow_content ) ) else: local_log( "<DispatchRuleWorkFlow>__make_sig_by_content, queue:{} steps:{}".format( queue_name, max_step ) ) return ch # endregion
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/stat/dispatch/rules/rule_workflow.py
rule_workflow.py
class WorkFlowMethods(object): @staticmethod def get_task_sig_queue_name(task_sig): queue_flag = "queue_flag" from celery import chord from celery import group from celery.canvas import Signature from celery.canvas import _chain if isinstance(task_sig, chord): # chord queue_name = task_sig.tasks[0].options.get(queue_flag) elif isinstance(task_sig, group) or isinstance(task_sig, _chain): # group, chain sig = task_sig.tasks[0] return WorkFlowMethods.get_task_sig_queue_name(sig) elif isinstance(task_sig, Signature): # signature queue_name = task_sig.options.get(queue_flag) else: return None return queue_name @staticmethod def merge_sig_list(sig_list): if not sig_list: return None if len(sig_list) > 1: from celery import group sig_gather = group(*tuple(sig_list)) return sig_gather else: return sig_list[0] @staticmethod def fill_res_task_id_list(res, task_id_list): if not res: return False from celery.result import GroupResult from celery.result import AsyncResult if isinstance(res, GroupResult): # GroupResult for res_ in res.results: task_id_list.append(res_.task_id) elif isinstance(res, AsyncResult): # AsyncResult task_id_list.append(res.task_id) else: return False if res.parent is not None: WorkFlowMethods.fill_res_task_id_list(res.parent, task_id_list) return True # region Signature Statistic max_sig_cnt = 0 @staticmethod def reset_max_sig_cnt(): WorkFlowMethods.max_sig_cnt = 0 @staticmethod def get_max_sig_cnt(): return WorkFlowMethods.max_sig_cnt @staticmethod def update_max_sig_cnt(cnt): WorkFlowMethods.max_sig_cnt = max(WorkFlowMethods.max_sig_cnt, cnt) @staticmethod def calculate_sig_cnt(sig): pass from celery import chord from celery.canvas import _chain from celery import group if isinstance(sig, chord): body_cnt = WorkFlowMethods.calculate_sig_cnt(sig.body) WorkFlowMethods.update_max_sig_cnt(body_cnt) return WorkFlowMethods.calculate_sig_cnt(sig.tasks) + body_cnt elif isinstance(sig, _chain): return WorkFlowMethods.calculate_sig_cnt(sig.tasks) elif isinstance(sig, group): cnt = WorkFlowMethods.calculate_sig_cnt(sig.tasks) WorkFlowMethods.update_max_sig_cnt(cnt) return cnt elif isinstance(sig, tuple): cnt = 0 for s in sig: cnt += WorkFlowMethods.calculate_sig_cnt(s) WorkFlowMethods.update_max_sig_cnt(cnt) return cnt elif isinstance(sig, list): cnt = 0 for s in sig: cnt += WorkFlowMethods.calculate_sig_cnt(s) return cnt else: return 1 # endregion # region make sig @staticmethod def link_signatures(sig_list): from celery import chain sig = chain(tuple(sig_list)) return sig @staticmethod def make_signature_batch( task_path, business_inst_name, kwargs_list=None, queue_name=None ): sig = None s_container = [] if kwargs_list: for kwargs in kwargs_list: s = WorkFlowMethods._make_signature( task_path, business_inst_name, **kwargs ) s_container.append(s) # 填充队列名 WorkFlowMethods._fill_sig_queue_name(s_container, queue_name) if not s_container: return sig if len(s_container) > 1: from celery import group sig = group(tuple(s_container)) WorkFlowMethods._fill_sig_queue_name(sig, queue_name) else: sig = s_container[0] return sig @staticmethod def _make_signature(task_path, business_inst_name, *args, **kwargs): exec(f"from {task_path} import {business_inst_name}") return eval(business_inst_name).s(*args, **kwargs) @staticmethod def _fill_sig_queue_name(sig_list, queue_name): from celery import group if queue_name is None: assert False from yyxx_game_pkg.stat.dispatch.common.common import get_queue_name real_queue_name = get_queue_name(queue_name) if isinstance(sig_list, list): for s in sig_list: s.options["queue"] = real_queue_name s.options["queue_flag"] = queue_name elif isinstance(sig_list, group): sig_list.options["queue"] = real_queue_name sig_list.options["queue_flag"] = queue_name # endregion # # if __name__ == '__main__': # from logic.celery_core.task_register import add # s = add.s(1, 1) # res = s.apply_async() # print res # # from celery import group # # g = group(s, s) # # c = WorkFlowMethods.link_sig_list([g, s, g, s, s]) # # res_g = c.apply_async() # # task_id_list = [] # # WorkFlowMethods.fill_res_task_id_list(res_g, task_id_list) # # print task_id_list
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/stat/dispatch/core/workflows.py
workflows.py
from yyxx_game_pkg.utils.xdate import split_date_str_by_day from yyxx_game_pkg.logger.log import root_log __schedule_file_path = "None" __api_addr = "http://localhost:8080" # region 内部方法 def _to_protocol_by_schedule( schedule, is_work_flow=False, custom_content=None, custom_queue=None ): proto_dict = dict() schedule_name = schedule.SCHEDULE_NAME instance_name = schedule.SCHEDULE_DISPATCH_RULE_INSTANCE_NAME queue_name = ( schedule.SCHEDULE_QUEUE_NAME if hasattr(schedule, "SCHEDULE_QUEUE_NAME") else None ) if custom_queue is not None: queue_name = custom_queue import copy content = copy.deepcopy(schedule.SCHEDULE_CONTENT) if custom_content: for c in content: if not isinstance(c, dict) or not isinstance(custom_content, dict): continue content_dict = c.get("custom_content") if not content_dict: c.update(custom_content) else: data_dict = custom_content.get("custom_content") content_dict.update(data_dict) proto_dict["SCHEDULE_NAME"] = schedule_name proto_dict["SCHEDULE_DISPATCH_RULE_INSTANCE_NAME"] = instance_name if queue_name: proto_dict["SCHEDULE_QUEUE_NAME"] = queue_name if is_work_flow: dict_rule = dict() for schedule_param in content: group = schedule_param.get("group") step = schedule_param.get("step") custom_content = schedule_param.get("custom_content") sub_schedule_name = schedule_param.get("schedule") if step is None or sub_schedule_name is None: continue if group is None: group = 1 if not dict_rule.get(group): dict_rule[group] = dict() if not dict_rule[group].get(step): dict_rule[group][step] = [] schedule_str = to_protocol( sub_schedule_name, custom_content=custom_content, custom_queue=queue_name, ) dict_rule[group][step].append(schedule_str) content = dict_rule proto_dict["SCHEDULE_CONTENT"] = content return proto_dict def _get_schedule(schedule_name): import importlib schedule_dir = "schedule" if schedule_name.find("@") > -1: schedule_name, schedule_dir = schedule_name.split("@") schedule = None is_work_flow = False try: module = f"{__schedule_file_path}.{schedule_dir}.statistic_task.{schedule_name}" schedule = importlib.import_module(module) is_work_flow = ( schedule.SCHEDULE_DISPATCH_RULE_INSTANCE_NAME.find("work_flow") >= 0 ) except Exception as e: try: module = f"{__schedule_file_path}.{schedule_dir}.work_flow.{schedule_name}" schedule = importlib.import_module(module) is_work_flow = True except Exception as e: root_log(e) return schedule, is_work_flow def _parse_proto_dict(proto_dict): process_proto_list = [] schedule_name = proto_dict.get("SCHEDULE_DISPATCH_RULE_INSTANCE_NAME") if schedule_name.find("work_flow") >= 0: schedule_content = proto_dict.get("SCHEDULE_CONTENT") step_schedule_content = schedule_content[1][1][0].get("SCHEDULE_CONTENT")[0] date_interval = step_schedule_content.get("day_interval") if date_interval and date_interval == "SPLIT_DATE_BY_DAY": date_appoint = step_schedule_content["date_appoint"] date_list = split_date_str_by_day(date_appoint[0], date_appoint[1]) for date_offset in date_list: content_k_v = dict() content_k_v["date_appoint"] = "" content_k_v["day_interval"] = date_offset _modify_proto_content(schedule_content, content_k_v) process_proto_list.append(proto_dict) else: process_proto_list.append(proto_dict) else: process_proto_list.append(proto_dict) return process_proto_list def _modify_proto_content(schedule_content, content_key_value): if not isinstance(schedule_content, dict) or not isinstance( content_key_value, dict ): return for content_dict in schedule_content.values(): # 替换工作流content的key, value for key, content_list in content_dict.items(): temp_list = [] for content in content_list: content = content for con_key, con_value in content_key_value.items(): if con_key == "SCHEDULE_QUEUE_NAME": content[con_key] = con_value else: s_content = content["SCHEDULE_CONTENT"] for c in s_content: c[con_key] = con_value temp_list.append(content) content_dict[key] = temp_list # endregion # region 外部方法 def set_config(path: str, api_addr: str): global __schedule_file_path, __api_addr __schedule_file_path = path __api_addr = api_addr def to_protocol(schedule_name, custom_content=None, custom_queue=None): schedule, is_work_flow = _get_schedule(schedule_name) if not schedule: return None return _to_protocol_by_schedule( schedule, is_work_flow, custom_content, custom_queue ) def process_proto(proto_dict): res_list = [] # 工作流切割 process_proto_list = _parse_proto_dict(proto_dict) for p in process_proto_list: res_list.append(p) return res_list def send(proto): import requests url = f"{__api_addr}/submit" post_data = {"content": proto} res = requests.post(json=post_data, url=url, timeout=600) return res # endregion
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/stat/submit/logic/submit_logic.py
submit_logic.py
from yyxx_game_pkg.center_api.sdk.recharge import BaseRecharge from .map_factor import MapRecharge class Recharge(MapRecharge, BaseRecharge): # 父类中核心代码,此处可删除 # def get_params(self, data) -> dict: # self.modify_params() # extra = data.get(self.params.extra, "") # if not extra: # return {} # # ext_ary = extra.split(",") # data_ary = {"extra": extra} # self.get_params_core(data, data_ary, ext_ary) # self.get_params_helper(data, data_ary) # # return data_ary def modify_params(self): """ 修改 self.params 属性 默认值: extra: str = "extra" cp_order_id: str = "billno" channel_order_id: str = "order_id" player_id: str = "role_id" channel_username: str = "openid" is_check_username: int = 1 is_test: int = 0 self.params.cp_order_id = "xxx" """ pass # 项目通用方法,一般不需要修改,直接使用父类方法,此处可删除 def get_params_core(self, data, data_ary, ext_ary) -> None: """ -------------------------------- 默认获取以下数据 data_ary["cp_order_id"] = data.get(self.params.cp_order_id, "") data_ary["channel_order_id"] = data.get(self.params.channel_order_id, "") data_ary["player_id"] = data.get(self.params.player_id) data_ary["is_check_username"] = self.params.is_check_username data_ary["channel_username"] = data.get(self.params.channel_username, "") if len(ext_ary) > 6: data_ary["recharge_id"] = int(ext_ary[5]) -------------------------------- """ super().get_params_core(data, data_ary, ext_ary) def get_params_helper(self, data, data_ary) -> None: """ 补充数据, 添加额外参数 对 get_params 中 data_ary 数据的补充 无法在 get_params_core 中通过通用方式获得的参数,在此处进行处理 -------------------------------- money 金额 real_money 实付金额 extra_gold 赠送元宝(渠道返利) extra_gold_bind 赠送绑元(渠道返利) pay_dt 充值时间(秒) -------------------------------- """ super().get_params_helper(data, data_ary) def make_sign_helper(self, values) -> (dict, str): """ ext_ary = values[self.extra_key].split(",") plat_code = ext_ary[0] game_channel_id = ext_ary[1] sdk_data = self.operator.get_key(plat_code, game_channel_id) pay_key = sdk_data.get("pay_key", "") return values, pay_key :param values: :return: values, pay_key -------------------------------- 如果对 values 或 pay_key 有调整,在此处修改 values, pay_key = super().make_sign_helper(values) ... (具体修改过程) return values, pay_key -------------------------------- """ return super().make_sign_helper(values) # 签名核心方法,此处可删除 # def make_sign(self, values, sign_key=None) -> str: # values, pay_key = self.make_sign_helper(values) # return self.channel_make_sign(values, pay_key) def channel_make_sign(self, values, sign_key) -> str: """ 默认签名方式为 md5(yyxx_game_pkg.crypto.basic.md5) post_data 中的键按照首字母升序排列 也可继承或重新 MapRecharge MapFactor 中的方法 :return: 签名字符串 """ return super().channel_make_sign(values, sign_key) def feedback(self, error_code, data: dict = None, msg="", *args, **kwargs): """ 根据需求 return 相应的数据 """ return error_code
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/template/sdk/sdk_{{cookiecutter.sdk_name}}/recharge.py
recharge.py
import time from yyxx_game_pkg.center_api.sdk.check_token import BaseCheckToken from yyxx_game_pkg.utils.error_code import ErrorCode from .map_factor import MapFactor class Check_Token(MapFactor, BaseCheckToken): # is_https = True # method = "POST" # time_param = ("time", int(time.time()) # 根据接口文档填写 params 的键值 # params key: post_data的键 # params value: kwargs的键 # --> post_data[key] = kwargs[value] params = {} sdk_exclude = () # 父类中核心代码,此处可删除 # def run_check_token(self, *args, **kwargs) -> dict: # """ # run check token # """ # sdk_helper, response_helper = self.sdk_version_choice(**kwargs) # if sdk_helper is None: # return self.sdk_rechfeed(ErrorCode.ERROR_INVALID_PARAM) # # channel_data, post_data = sdk_helper(**kwargs) # response = self.sdk_check_token(channel_data, post_data) # # return response_helper(response, **kwargs) def sdk_helper(self, sdk_exclude=(), **kwargs) -> (dict, dict): """ channel_data = kwargs.get("channel_data", {}) post_data = {} for k, v in self._params.items(): post_data[k] = kwargs.get(v) if self.Time not in sdk_exclude: post_data[self.Time] = int(time.time()) if self.Flag not in sdk_exclude: post_data[self.Flag] = self.channel_make_sign( post_data, channel_data.get("app_key", "") ) return channel_data, post_data :param sdk_exclude: exclude parameters :param kwargs: 参数 :return: channel_data, post_data -------------------------------- 如果 post_data 有修改或者补充,可以在此方法中添加 channel_data, post_data = super().sdk_helper(**kwargs) post_data["需要修改或添加的键"] = "需要修改或添加的值" return channel_data, post_data -------------------------------- """ return super().sdk_helper(**kwargs) def channel_make_sign(self, values, sign_key) -> str: """ 在sdk_helper中,如果只是 签名 的方法需要修改, 可以在此方法中重写 :param values: sdk_helper 中的 post_data :param sign_key: sdk_helper 中的 channel_data.get("app_key", "") :return: 签名字符串 """ return super().channel_make_sign(values, sign_key) def sdk_check_token(self, channel_data, post_data): """ 默认使用 登录验证URL进行二次验证 如果不是使用登录验证URL的方式进行二次验证 重写此方法进行验证 同时,可以删除 sdk_helper 和 channel_make_sign return 的值在 response_helper 中使用 具体返回什么视情况而定 """ return super().sdk_check_token(channel_data, post_data) def response_helper(self, response: dict, **kwargs) -> dict: """ 返回数据 根据渠道文档,设置返回数据 """ if response and response["code"] == 0: data = { # ----- ret,user_id为必传参数 ------ "ret": 1, "user_id": kwargs["?"], # ? 值根据具体参数填写 # -------------------------------- # 如果还有其他参数, 按需添加 } return data return super().response_helper(response, **kwargs) # sdk 版本映射,一般不需要改变,此处可删除 # @property # def sdk_version_map(self) -> dict: # """ # sdk version map # 如果存在多个version版本,需要添加对应的版本映射 # """ # return super().sdk_version_map
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/template/sdk/sdk_{{cookiecutter.sdk_name}}/check_token.py
check_token.py
import socket import struct import io import sys # xdb默认参数 HeaderInfoLength = 256 VectorIndexRows = 256 VectorIndexCols = 256 VectorIndexSize = 8 SegmentIndexSize = 14 class XdbSearcher(object): __f = None # the minimal memory allocation. vectorIndex = None # 整个读取xdb,保存在内存中 contentBuff = None @staticmethod def loadVectorIndexFromFile(dbfile): try: f = io.open(dbfile, "rb") f.seek(HeaderInfoLength) vi_len = VectorIndexRows * VectorIndexCols * SegmentIndexSize vector_data = f.read(vi_len) f.close() return vector_data except IOError as e: print("[Error]: %s" % e) @staticmethod def loadContentFromFile(dbfile): try: f = io.open(dbfile, "rb") all_data = f.read() f.close() return all_data except IOError as e: print("[Error]: %s" % e) def __init__(self, dbfile=None, vectorIndex=None, contentBuff=None): self.initDatabase(dbfile, vectorIndex, contentBuff) def search(self, ip): if isinstance(ip, str): if not ip.isdigit(): ip = self.ip2long(ip) return self.searchByIPLong(ip) else: return self.searchByIPLong(ip) def searchByIPStr(self, ip): if not ip.isdigit(): ip = self.ip2long(ip) return self.searchByIPLong(ip) def searchByIPLong(self, ip): # locate the segment index block based on the vector index sPtr = ePtr = 0 il0 = (int)((ip >> 24) & 0xFF) il1 = (int)((ip >> 16) & 0xFF) idx = il0 * VectorIndexCols * VectorIndexSize + il1 * VectorIndexSize if self.vectorIndex is not None: sPtr = self.getLong(self.vectorIndex, idx) ePtr = self.getLong(self.vectorIndex, idx + 4) elif self.contentBuff is not None: sPtr = self.getLong(self.contentBuff, HeaderInfoLength + idx) ePtr = self.getLong(self.contentBuff, HeaderInfoLength + idx + 4) else: self.__f.seek(HeaderInfoLength + idx) buffer_ptr = self.__f.read(8) sPtr = self.getLong(buffer_ptr, 0) ePtr = self.getLong(buffer_ptr, 4) # binary search the segment index block to get the region info dataLen = dataPtr = int(-1) l = int(0) h = int((ePtr - sPtr) / SegmentIndexSize) while l <= h: m = int((l + h) >> 1) p = int(sPtr + m * SegmentIndexSize) # read the segment index buffer_sip = self.readBuffer(p, SegmentIndexSize) sip = self.getLong(buffer_sip, 0) if ip < sip: h = m - 1 else: eip = self.getLong(buffer_sip, 4) if ip > eip: l = m + 1 else: dataLen = self.getInt2(buffer_sip, 8) dataPtr = self.getLong(buffer_sip, 10) break # empty match interception if dataPtr < 0: return "" buffer_string = self.readBuffer(dataPtr, dataLen) return_string = buffer_string.decode("utf-8") return return_string def readBuffer(self, offset, length): buffer = None # check the in-memory buffer first if self.contentBuff is not None: buffer = self.contentBuff[offset:offset + length] return buffer # read from the file handle if self.__f is not None: self.__f.seek(offset) buffer = self.__f.read(length) return buffer def initDatabase(self, dbfile, vi, cb): """ " initialize the database for search " param: dbFile, vectorIndex, contentBuff """ try: if cb is not None: self.__f = None self.vectorIndex = None self.contentBuff = cb else: self.__f = io.open(dbfile, "rb") self.vectorIndex = vi except IOError as e: print("[Error]: %s" % e) sys.exit() def ip2long(self, ip): _ip = socket.inet_aton(ip) return struct.unpack("!L", _ip)[0] def isip(self, ip): p = ip.split(".") if len(p) != 4: return False for pp in p: if not pp.isdigit(): return False if len(pp) > 3: return False if int(pp) > 255: return False return True def getLong(self, b, offset): if len(b[offset:offset + 4]) == 4: return struct.unpack('I', b[offset:offset + 4])[0] return 0 def getInt2(self, b, offset): return (b[offset] & 0x000000FF) | (b[offset + 1] & 0x0000FF00) def close(self): if self.__f is not None: self.__f.close() self.vectorIndex = None self.contentBuff = None if __name__ == '__main__': ip_array = [ "1.2.3.4", "117.136.122.164" ] # 1. 缓存 dbPath = "./data/ip2region.xdb" cb = XdbSearcher.loadContentFromFile(dbfile=dbPath) # 2. 创建查询对象 searcher = XdbSearcher(contentBuff=cb) # 3. 执行查询 # ip = "1.2.3.4" for ip in ip_array: region_str = searcher.searchByIPStr(ip) print(region_str) searcher.close()
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/ip2region/xdbSearcher.py
xdbSearcher.py
import os from typing import Iterable import numpy as np import pandas as pd from django.core.files.uploadedfile import InMemoryUploadedFile from yyxx_game_pkg.helpers.op_helper import OPHelper from yyxx_game_pkg.utils.files import get_file_size class CheckUploadFile(OPHelper): def __init__( self, *, input_file_name="upload_file", file_max_size="10MB", file_allow_ext=(".xls", ".xlsx"), min_column=0, **kwargs, ): self.total_num = 0 self.insert_data = [] self.df = None self.input_file_name = input_file_name self.file_max_size = file_max_size self.file_allow_ext = file_allow_ext self.min_column = min_column self.kwargs = kwargs self.conn = self.connection() def check(self, request): """Check upload File""" input_file_name = self.input_file_name if input_file_name not in request.FILES: return "请选择文件" f: InMemoryUploadedFile = request.FILES[input_file_name] file_size = f.size file_name = f.name if file_size > get_file_size(self.file_max_size): return "文件大小不大于{}".format(self.file_max_size) file_ext = os.path.splitext(file_name)[1].lower() if file_ext not in self.file_allow_ext: return "文件格式只支持{}".format(",".join(self.file_allow_ext)) with f.file as file_io: self.df = pd.read_excel(file_io) if self.df.empty: return "没有符合的记录" if len(self.df.iloc[0].tolist()) < self.min_column: return "列数至少包含{}行".format(self.min_column) self.prepare() for i, row in self.df.iterrows(): row_data = row.tolist() serial = row_data[0] if np.isnan(serial): return "上传的数据,不可以有空行(数据不完整),上传失败" msg = self.process(row_data) if msg: return msg if self.total_num == 0: return "提交记录至少包含一条" if self.total_num > 1000: return "单词最多只能提交1000条记录" return "" def prepare(self): """数据预处理""" pass def process(self, row_data): """处理每行的数据 :param row_data: 行数据 """ self.insert_data.append(row_data) self.total_num += 1 return "" @staticmethod def insert_sql( table: str, fields: Iterable, update=False, update_fields: Iterable = () ) -> str: """合成sql语句 :param table: 要插入的表名 :param fields: 要插入的字段 :param update: 是否需要 ON DUPLICATE KEY UPDATE :param update_fields: 需要更新的字段,如果没有,则使用fields """ sql = f"INSERT INTO {table} ({','.join(fields)}) VALUES({','.join(['%s'] * len(fields))})" if update: update_list = [] _fields = update_fields if update_fields else fields for field in _fields: update_list.append(f"{field}=VALUES({field})") sql += f" ON DUPLICATE KEY UPDATE {','.join(update_list)}" return sql def insert( self, sql: str = None, table: str = None, fields: tuple | list = None, update=False, update_fields=(), ): """把数据插入表处理 :param sql: 完整的sql语句 :param table: 要插入的表名 :param fields: 要插入的字段 :param update: 是否需要 ON DUPLICATE KEY UPDATE :param update_fields: 需要更新的字段,如果没有,则使用fields """ code = -1001 if table and fields: sql = self.insert_sql(table, fields, update, update_fields) if sql is None: msg = "请填写sql" return code, msg with self.conn.cursor() as cursor: for item in self.insert_data: try: cursor.execute(sql, item) except Exception as e: msg = str(e) self.conn.rollback() return code, msg self.conn.commit() return 1, "" def __del__(self): self.conn.close()
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/server_center/check_upload_file.py
check_upload_file.py
import io import time import cProfile from functools import wraps from line_profiler import LineProfiler class Profiler: """ Profiler """ def __init__(self): self.__lp = LineProfiler() def execute(self, main_func_inst, *args, assist_func_list=None, **kwargs): """ 执行分析 :param main_func_inst: 主函数入口 :param assist_func_list: 子函数列表 :param args: :param kwargs: :return: """ if assist_func_list is not None: for func in assist_func_list: self.__lp.add_function(func) lp_wrapper = self.__lp(main_func_inst) lp_wrapper(*args, **kwargs) self.__lp.print_stats() def func_time(func): """ 简单记录执行时间 :param func: :return: """ @wraps(func) def wrapper(*args, **kwargs): start = time.time() result = func(*args, **kwargs) end = time.time() print(func.__name__, args, kwargs, 'took', end - start, 'seconds') return result return wrapper def func_cprofile(func): """ 内建分析器 """ @wraps(func) def wrapper(*args, **kwargs): profile = cProfile.Profile() try: profile.enable() result = func(*args, **kwargs) profile.disable() return result finally: profile.print_stats(sort='time') return wrapper def func_line_time(follow=()): """ 每行代码执行时间详细报告 :param follow: 内部调用方法 :return: """ def decorate(func): @wraps(func) def profiled_func(*args, **kwargs): profiler = LineProfiler() try: profiler.add_function(func) for _f in follow: profiler.add_function(_f) profiler.enable_by_count() return func(*args, **kwargs) finally: _s = io.StringIO() profiler.print_stats(stream=_s) print(f"<line_profiler> {_s.getvalue()}") return profiled_func return decorate """ # example def do_stuff(numbers): do_other_stuff(numbers) s = sum(numbers) l = [numbers[i]/43 for i in range(len(numbers))] m = ['hello'+str(numbers[i]) for i in range(len(numbers))] @func_line_time() def do_other_stuff(numbers): s = sum(numbers) def main_instance(): import random numbers = [random.randint(1, 100) for i in range(1000)] do_stuff(numbers) do_other_stuff(numbers) if __name__ == '__main__': main_instance() # profile = Profiler() # profile.execute(main_func_inst=main_instance, assist_func_list=[do_stuff, do_other_stuff]) """
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/utils/profiler.py
profiler.py
import functools import pickle import random import time import traceback from concurrent import futures from yyxx_game_pkg.logger.log import root_log from yyxx_game_pkg.xtrace.helper import get_current_trace_id def fix_str(obj, max_len=5000): """ 切割过长str, 避免打印过多无用信息 """ msg = str(obj) return msg[0 : min(len(msg), max_len)] def log_execute_time_monitor(exec_lmt_time=20): """ 超时函数监控 :param exec_lmt_time:秒 :return: """ def decorator(func): @functools.wraps(func) def inner(*args, **kwargs): begin_dt = time.time() res = func(*args, **kwargs) end_dt = time.time() offset = end_dt - begin_dt if offset >= exec_lmt_time: ex_info = None if kwargs.get("connection") is not None: ex_info = kwargs.get("connection")._con._kwargs.get("host") _args = [] for _arg in args: _args.append(fix_str(_arg, 100)) for k, _v in kwargs.items(): kwargs[k] = fix_str(_v, 100) trace_id = get_current_trace_id() root_log( f"<log_execute_time_monitor> trace_id: {trace_id} " f"func <<{func.__name__}>> deal over time " f"begin_at: {begin_dt} end_at: {end_dt}, sec: {offset}" f"ex_info{ex_info}, params: {str(args)}, {str(kwargs)}" ) return res return inner return decorator def except_monitor(func): """ 异常处理捕捉装饰器 打印全部参数 :return: """ @functools.wraps(func) def inner(*args, **kwargs): res = None try: res = func(*args, **kwargs) except Exception as e: _args = [] for _arg in args: _args.append(fix_str(_arg, 100)) for k, _v in kwargs.items(): kwargs[k] = fix_str(_v, 100) root_log( "<except_monitor>" f"func:{func.__module__}.{func.__name__}, args:{str(_args)}, kwargs:{str(kwargs)}, " f"exc: {traceback.format_exc()} {e}", level="error", ) return res return inner def except_return(default=None, echo_raise=True): """ # 异常后指定返回值 :param default: 返回值(或者可执行函数) :param echo_raise: 是否打印报错信息 :return: """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: if echo_raise: _args = [] for _arg in args: _args.append(fix_str(_arg, 100)) for k, _v in kwargs.items(): kwargs[k] = fix_str(_v, 100) root_log( "<except_return>" f"func:{func.__module__}.{func.__name__}, args:{str(_args)}, kwargs:{str(kwargs)}, " f"exc: {traceback.format_exc()} {e}", level="error", ) return default(e=e, f_args=args, f_kwargs=kwargs) if callable(default) else default return wrapper return decorator def singleton(cls): instances = {} @functools.wraps(cls) def get_instance(*args, **kw): if cls not in instances: instances[cls] = cls(*args, **kw) return instances[cls] return get_instance def singleton_unique(cls): instances = {} @functools.wraps(cls) def get_instance(*args, **kw): unique_key = f"{cls}_{args}_{kw}" if unique_key not in instances: instances[unique_key] = cls(*args, **kw) return instances[unique_key] return get_instance def singleton_unique_obj_args(cls): # object 需重写 __str__ instances = {} @functools.wraps(cls) def get_instance(*args, **kw): unique_key = f"{cls}_{list(map(str, args))}_{kw}" if unique_key not in instances: instances[unique_key] = cls(*args, **kw) return instances[unique_key] return get_instance def timeout_run(timeout=2, default=None): def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): try: executor = futures.ThreadPoolExecutor(1) future = executor.submit(func, *args, **kw) return future.result(timeout=timeout) except Exception as e: root_log(f"timeout_run {func} error {e} args:{args} kw:{kw}") return default return wrapper return decorator # 缓存装饰器[仅支持可序列化返回值] # todo 重启服务清空缓存 def redis_cache_result(handle, redis_key=None, prefix="_fix", sec=3600): """ :param handle: redis连接 :param redis_key: 需保持唯一性 默认为函数名 :param prefix: key前缀 避免冲突 :param sec: 过期时间(秒) + 随机0 ~ 30 :return: """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: _arg = pickle.dumps(args) except Exception: # 静默处理 _arg = pickle.dumps(args[1:]) _kwargs = pickle.dumps(kwargs) # 不指明redis_key默认用func.name cache_key = redis_key if redis_key else func.__name__ # prefix 防止与其他模块的缓存key冲突 cache_key = f"{prefix}_{cache_key}_{_arg}_{_kwargs}" cache_data = handle.get_data(cache_key) if cache_data: res = pickle.loads(cache_data) return res res = func(*args, **kwargs) handle.set_data( cache_key, pickle.dumps(res), ex=sec + random.randint(0, 30) ) return res return wrapper return decorator
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/utils/decorator.py
decorator.py
import re import time import datetime from enum import Enum DAY = 1 WEEK = 2 MONTH = 3 VERSION = 4 # 时间转换 def str2date(date_str): """ 时间字符串转datetime obj """ if isinstance(date_str, bytes): date_str = date_str.decode(encoding="utf8") if isinstance(date_str, (int, float)) or date_str.isnumeric(): # 时间戳 if len(str(date_str)) == 8: # 20230101 return datetime.datetime.strptime(str(date_str), "%Y%m%d") # 1672502400 or 1672502400000 return datetime.datetime.fromtimestamp(date_str) if len(date_str) == 19: # 常用时间格式 2023-01-01 00:00:00 return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S") iso_regex = ( r"^\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}(\.\d{1,6})?([+-]\d{2}:\d{2})?$" ) if re.match(iso_regex, date_str.replace("Z", "+00:00")): # 符合iso格式的时间字符串 2022-03-08T16:30:00.000Z or 2023-03-08T20:45:17+08:00 return datetime.datetime.fromisoformat(date_str) millisecond_regex = r".*(\.\d{1,6})$" if re.match(millisecond_regex, date_str): # 带毫秒的时间 return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") # 常用时间格式 2023-01-01 return datetime.datetime.strptime(date_str, "%Y-%m-%d") def str2date_str(date_str, fmt="%Y-%m-%d %H:%M:%S"): """ 将时间字符串转成另外格式的时间字符串 """ return str2date(date_str).strftime(fmt) def str2day(date_str): """ "2021-05-31 12:23:40" to YYYYMMDD """ return datetime.datetime.strptime(date_str, "%Y%m%d") def date2dt_day(date, _h=0, _m=0, _s=0): """ "2021-05-31 12:23:40" to "2021-05-31 00:00:00" :param date: datetime obj :param _h: hour :param _m: minute :param _s: second :return: datetime obj """ return datetime.datetime(date.year, date.month, date.day, int(_h), int(_m), int(_s)) def date2dt_day_end(date) -> datetime.datetime: """ "2021-05-31 12:23:40" to "2021-05-31 23:59:59" :param date: datetime obj :return: datetime obj """ return date2dt_day(date, 23, 59, 59) def day2date(day, fmt="%Y%m%d", end=0) -> datetime.datetime: """ "20210531" to "2021-05-31 00:00:00" :param day: 时间字符串 :param fmt: 时间字符串格式 默认 "%Y%m%d" :param end: 0: 00:00:00; 1: 23:59:59 :return: datetime obj """ date = datetime.datetime.strptime(str(day), fmt) if end: return datetime.datetime(date.year, date.month, date.day, 23, 59, 59) return date def day2str_date(day) -> str: """ '20220301' -> '2022-03-01' 效率更高 """ day_s = str(day) return day_s[:4] + '-' + day_s[4:6] + '-' + day_s[6:] def date2day(date): """ "2021-05-31 12:23:40" to "20210531" """ return date.strftime("%Y%m%d") def date2date(date, _h=0, _m=0, _s=0, end=0): """ "2021-05-31 12:23:40" to "2021-05-31 00:00:00" :param date: datetime obj :param _h: hour :param _m: minute :param _s: second :param end: 0: 00:00:00; 1: 23:59:59 :return: datetime obj """ if end: return datetime.datetime(date.year, date.month, date.day, 23, 59, 59) return datetime.datetime(date.year, date.month, date.day, int(_h), int(_m), int(_s)) def day_diff(day1, day2): """ day_diff(20210531, 20210529) -> 2 """ return (day2date(day2) - day2date(day1)).days # 根据date获取第delta天时间 def delta_dt_day(date, delta=0, end=0): """ :param date: 起始时间 :param delta: 第几天 :param end: 0: 00:00:00 / 1: 23:59:59 :return: """ if end: return date2dt_day_end(date) + datetime.timedelta(days=delta) return date2dt_day(date) + datetime.timedelta(days=delta) def add_days(date, delta, end=0): """ 易读接口 """ return delta_dt_day(date, delta, end) def date2stamp(dt_date): """ datetime转时间戳 """ return time.mktime(dt_date.timetuple()) def stamp2str(t_stamp, fmt="%Y-%m-%d %H:%M:%S"): """ 时间戳转日期字符串 :param t_stamp: 时间戳 :param fmt: 生成时间字符串格式 默认 %Y-%m-%d %H:%M:%S :return: 时间字符串 """ if not t_stamp: return "" time_array = time.localtime(t_stamp) return time.strftime(fmt, time_array) def get_week_str(date, fmt="%Y%m%d"): """ 当前周 开始结束时间段 get_week_str("2023-03-09 11:15:20") -> "2023-03-05~2023-03-11" :param date: 时间字符串 :param fmt: 时间字符串格式 默认 "%Y%m%d" :return: 周期字符串 """ sdate = datetime.datetime.strptime(str(date), fmt) _, _, s_week_day = sdate.isocalendar() sday = (sdate - datetime.timedelta(days=s_week_day - 1)).strftime("%Y-%m-%d") eday = (sdate - datetime.timedelta(days=s_week_day - 7)).strftime("%Y-%m-%d") return f"{sday}~{eday}" def date_type_trans(day, date_type=DAY, fmt="%Y%m%d", version_configs=None): """ 周期时间格式化 :param day: 时间字符串 20230201 :param date_type: 周期类型(1: 天 2: 周 3: 月) :param fmt: 时间字符串格式 默认 "%Y%m%d" :param version_configs: 版本配置list [[版本号, 开始day, 结束day]] :return: 周期字符串 """ if date_type == DAY: return datetime.datetime.strptime(str(day), fmt).strftime("%Y-%m-%d") if date_type == WEEK: return get_week_str(day, fmt) if date_type == MONTH: return datetime.datetime.strptime(str(day), fmt).strftime("%Y年%m月") if date_type == VERSION: if version_configs is None: return day for version, start_day, end_day in version_configs: if start_day <= day <= end_day: return f"{version}版本" return "未配置版本" return day def to_start_of_interval(_t: datetime.datetime, unit="minute", interval=5): """ to_start_of_interval("2023-03-09 11:16:20", 'minute', interval=5) -> datetime(2023-03-09 11:15:00) to_start_of_interval("2023-03-09 11:16:20", 'hour', interval=1) -> datetime(2023-03-09 11:00:00) """ if unit == "minute": fix = _t.minute - _t.minute % interval _t = _t.replace(minute=fix, second=0, microsecond=0) elif unit == "hour": fix = _t.hour - _t.hour % interval _t = _t.replace(hour=fix, minute=0, second=0, microsecond=0) return _t def split_date_str_by_day(sdate_str, edate_str, day_slice=1): """ split_date_str_by_day """ res_list = [] if not sdate_str or not edate_str: return res_list # 按时间分配(天数) interval = datetime.timedelta(days=day_slice) start_dt = datetime.datetime.strptime(sdate_str, "%Y-%m-%d %H:%M:%S") edate_str = edate_str.replace("00:00:00", "23:59:59") end_dt = datetime.datetime.strptime(edate_str, "%Y-%m-%d %H:%M:%S") offset = datetime.timedelta(seconds=1) while start_dt < end_dt: next_dt = min((start_dt + interval - offset), end_dt) res_list.append( { "sdate": start_dt.strftime("%Y-%m-%d %H:%M:%S"), "edate": next_dt.strftime("%Y-%m-%d %H:%M:%S"), } ) start_dt = next_dt + offset return res_list
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/utils/xdate.py
xdate.py
class ErrorCode: # 基本错误码 ZERO = {"code": 0, "msg": "无"} SUCCESS = {"code": 1, "msg": "ok"} ERROR_INVALID_PARAM = {"code": -1, "msg": "参数无效"} ERROR_RESTFUL_ERROR = {"code": -2, "msg": "提交类型错误"} ERROR_SIGNATURE_ERROR = {"code": -3, "msg": "签名错误"} ERROR_TIME_OVERTIME = {"code": -4, "msg": "时间超时"} ERROR_PLATFORM_FUNCTION_ERROR = {"code": -5, "msg": "运营商映射方法不存在"} ERROR_CHECK_PUBLIC = {"code": -6, "msg": "公共校验错误"} ERROR_ROUTING_ERROR = {"code": -7, "msg": "请求错误"} ERROR_IP_ACCESS_RESTRICTION = {"code": -9, "msg": "限制访问"} # 方法内错误码 ERROR_PARAMS_ERROR = {"code": -1001, "msg": "参数错误"} ERROR_IP_WHITE_LIST_ERROR = {"code": -1002, "msg": "IP白名单错误"} ERROR_SERVER_API_URL_EMPTY = {"code": -1003, "msg": "单服接口不存在"} ERROR_REDIS_SET_ERROR = {"code": -1004, "msg": "REDIS设置失败"} ERROR_REDIS_PUSH_ERROR = {"code": -1005, "msg": "REDIS入队列失败"} ERROR_REQUEST_OFTEN = {"code": -1006, "msg": "请求过于频繁"} ERROR_GIFT_LOG_SET_ERROR = {"code": -1007, "msg": "媒体卡记录出错"} ERROR_GIFT_CODE_SET_ERROR = {"code": -1008, "msg": "媒体卡设置状态出错"} ERROR_NOTICE_VERSION_ERROR = {"code": -1009, "msg": "更新公告版本号错误"} ERROR_NOTICE_REWARDS_ERROR = {"code": -1010, "msg": "公告奖励错误"} ERROR_PARAMS_ERROR_NULL = {"code": -1011, "msg": "错误没数据返回"} ERROR_GIFT_LOG_SET_OFTEN = {"code": -1012, "msg": "记录错误"} ERROR_GIFT_CODE_SET_OFTEN = {"code": -1013, "msg": "改状态错误"} ERROR_UPDATE_DATA_ERROR = {"code": -1014, "msg": "更新数据错误"} ERROR_CERTIFICATION_OFTEN = {"code": -1015, "msg": "该身份证号认证次数过多"} ERROR_REPEAT_SUBMISSION = {"code": -1016, "msg": "重复提交"} ERROR_REWARD_ERROR = {"code": -1018, "msg": "奖励配置错误"} ERROR_ORDER_INFO_ERROR = {"code": -1019, "msg": "查询订单错误"} ERROR_CREATE_ORDER_ERROR = {"code": -1020, "msg": "创建订单错误"} # 请求接口返回的错误码 ERROR_SERVER_API_URL_ERROR = {"code": -2001, "msg": "单服接口错误"} ERROR_RECHARGE_ERROR = {"code": -2002, "msg": "充值失败"} ERROR_API_PLAYER_ERROR = {"code": -2003, "msg": "单服接口玩家错误"} ERROR_API_DATA_EMPTY = {"code": -2004, "msg": "数据为空"} # 服务器验证错误码 ERROR_SERVER_ERROR = {"code": -3001, "msg": "服务器错误"} ERROR_SERVER_CONN_ERROR = {"code": -3002, "msg": "服务器链接错误"} ERROR_MYSQL_CONN_ERROR = {"code": -3003, "msg": "MySQL链接错误"} ERROR_REDIS_CONN_ERROR = {"code": -3004, "msg": "Redis链接错误"} ERROR_MYSQL_REDIS_CONN_ERROR = {"code": -3005, "msg": "MySQL和Redis链接错误"} # 渠道要求返回码 ERROR_SIGN_ERROR = {"errno": -3, "errmsg": "签名错误"} API_SUCCESS = {"errno": 0, "errmsg": "成功"}
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/utils/error_code.py
error_code.py
import ujson as json def lst2str(lst, isdigit=True, symbol=",", warp="'") -> str: """ list转字符串 lst2str(['a', 'b', 'c]) -> "'a', 'b', 'c'" :param lst: :param isdigit: :param symbol: :param warp: 字符串包裹符 默认单引号 :return: """ if not lst: return "" if isinstance(lst, int): lst = [lst] if not isinstance(lst, list): lst = list(lst) # 简单情况自动处理 if not str(lst[0]).isdigit(): isdigit = False def _str(_s): return f"{warp}{_s}{warp}" lst = list(map(str, lst)) if isdigit else list(map(_str, lst)) lst_str = symbol.join(lst) return lst_str def load_js_str_keys(js_str, keys, default=None) -> dict: """ load json字符串中指定key列表 :param js_str: :param keys: :param default: :return: dict """ # 返回键值对 if default is None: default = {} if not js_str: return {} js_dict = json.loads(js_str) res = {} for key in keys: res[key] = js_dict.get(key, default) return res def str2list(list_str, split_symbol) -> list: """ str转list 去除空项 str2list("#1#2##", "#") -> ['1', '2'] :param list_str: :param split_symbol: :return: """ def filter_func(val): if not val: return False return True res = list(filter(filter_func, list_str.split(split_symbol))) return res def split_list(pending_lst, split_size=50000) -> list: """ 列表切分 split_list([[1, 2, 3, 4, 5]], 3) -> [[1, 2, 3], [4, 5]] split_list([1, 2, 3, 4, 5], 3) -> [[1, 2, 3], [4, 5]] :param pending_lst: :param split_size: :return: """ if not isinstance(pending_lst, (list, tuple)): return pending_lst if not isinstance(pending_lst[0], (list, tuple)): pending_lst = [pending_lst] if split_size == -1: return pending_lst base_num = split_size result = pending_lst[0] size = len(result) / base_num if len(result) % base_num != 0: size += 1 data_list = [] for index in range(int(size)): data_list.append(result[index * base_num : (index + 1) * base_num]) return data_list def split_list_ex(target_list, res_len): """ 把target_list分割成若干个小list(间隔切割) :param target_list: [1, 2, 3, 4, 5, 6] :param res_len: 3 :return: [[1,4], [2,5], [3,6]] """ if not isinstance(target_list, list): return [] if res_len <= 0: return [[]] target_list_len = len(target_list) if res_len >= target_list_len: return [target_list] split_parts_len = target_list_len / res_len + (1 if target_list_len % res_len > 0 else 0) res_list = [] for x in range(split_parts_len): res_list.append([]) for idx, val in enumerate(target_list): res_list[(idx % split_parts_len)].append(val) return res_list
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/utils/xListStr.py
xListStr.py
import functools import json from bisect import bisect_left import pandas as pd import numpy as np def empty_df(columns=None): """ :param columns: :return: """ if not columns: return pd.DataFrame() return pd.DataFrame(columns=columns) def df_col2row_idx(_df, index_keys, data_key): """ # df 列转行索引 # # day player_id money # 0 20210527 1508277000053 6.0 # 1 20210527 1508277000058 6.0 # 3 20210528 1508277000058 12.0 # # res_df = df_col2row_idx(res_df, ['player_id', 'day'], 'money') # # player_id 20210527 20210528 # 0 1508277000053 6.0 NaN # 1 1508277000058 6.0 12.0 """ df_index = _df.set_index(index_keys)[data_key] _df = df_index.unstack() _df = _df.rename_axis(None, axis="columns").reset_index() return _df def cut_bins(val, bins, concat="-"): """ :param val: :param bins: :param concat: :return: """ if not val: return val if val > bins[-1]: val = bins[-1] position = bisect_left(bins, val) labels = f"{bins[position - 1] + 1}{concat}{ bins[position]}" return labels, bins[position] def df_cut_bins(_df, key, bins, insert_zero=True): """ :param _df: :param key: :param bins: :param insert_zero: :return: """ def prefix_bins(_bins): """ 排序 :param _bins: :return: """ _bins = sorted(map(int, _bins)) if insert_zero and _bins[0] != 0: _bins.insert(0, 0) return _bins bins = prefix_bins(bins) return _df[key].apply(cut_bins, bins=bins) def cal_round_rate(data, precision=2, suffix="%", invalid_value="-"): """ :param data: :param precision: :param suffix: :param invalid_value: :return: """ if isinstance(data, pd.DataFrame): return data.apply(cal_round_rate, args=(precision, suffix), axis=0) if isinstance(data, pd.Series): if str(invalid_value).isdigit(): data = data.fillna(invalid_value) data = data.astype(float).round(precision) if precision == 0: data = data.astype(int) return data.apply( lambda d: invalid_value if (d == np.inf or np.isnan(d)) else f"{d}{suffix}" ) if isinstance(data, (int, float)): if np.isnan(data) or data == np.inf: return invalid_value if precision == 0: return str(int(data)) + suffix return str(round(data, precision)) + suffix return invalid_value def func_cal_round_rate(func, **kw): """ 用于快速构造用agg或apply传递的cal_round_rate函数 :param func: :param kw: :return: """ @functools.wraps(func) def wrapper(data, *args, **kwargs): if isinstance(func, str): data = getattr(data, func)() else: data = func(data) return cal_round_rate(data, **kw) return wrapper def dict_to_json(data): """用于es对象转json,并且正常显示中文""" if not data: if not isinstance(data, (str, bytes)): data = str(data) return data if isinstance(data, float) and pd.isna(data): return "" return json.dumps(data, ensure_ascii=False) def df_json_normalize(_df, columns, prefixes=None, sep=".", column_prefix=False): """ df: 原df数据 record_paths: 需要解析的列名list record_prefixes: 需要填充前缀list sep: 填充前缀的分隔符 column_prefix: 使用字段名作为前缀 """ for idx, record_column in enumerate(columns): if record_column not in _df.columns: continue tmp_df = pd.DataFrame(_df[record_column].apply(fill_dict).tolist()) record_prefix = None if column_prefix: record_prefix = record_column elif prefixes is not None: record_prefix = prefixes[idx] if record_prefix: tmp_df.columns = [f"{record_prefix}{sep}{col}" for col in tmp_df.columns] _df[tmp_df.columns] = tmp_df _df = _df.drop(columns=record_column) return _df def df_fill_columns(_df, columns, default="", tpe=None): """ 填充列,以确保列存在 """ if isinstance(columns, (list, tuple)): for column in columns: if column not in _df.columns: _df[column] = default elif tpe: _df[column] = _df[column].fillna(default).astype(tpe) else: _df[column] = _df[column].fillna(default) elif isinstance(columns, dict): for column, val in columns.items(): if column not in _df.columns: _df[column] = val elif tpe: _df[column] = _df[column].fillna(default).astype(tpe) else: _df[column] = _df[column].fillna(default) else: if columns not in _df.columns: _df[columns] = default elif tpe: _df[columns] = _df[columns].fillna(default).astype(tpe) else: _df[columns] = _df[columns].fillna(default) return _df def df_rm_columns(_df, columns): """ 安全删除列 :param _df:dataframe or series :param columns:需删除的列或index :return:新的dataframe or series """ if isinstance(_df, pd.Series): rm_columns = [column for column in columns if column in _df.index] if rm_columns: _df = _df.drop(rm_columns) else: rm_columns = [column for column in columns if column in _df.columns] if rm_columns: _df = _df.drop(columns=rm_columns) return _df def fill_dict(data): """填充{}到nan""" return {} if not isinstance(data, dict) and pd.isna(data) else data def fill_list(data): """填充[]到nan""" return [] if not isinstance(data, list) and pd.isna(data) else data def div_rate(data_df: pd.DataFrame, top_key, bottom_key, precision=2) -> pd.Series: """ dataframe div函数计算百分比 top_key / bottom_key example: data_df["pay_rate"] = div_rate(data_df, "pid_cnt", "act_player_cnt") :return: """ fmt_show = f"%0.{precision}f" if isinstance(top_key, list): return ( data_df[top_key] .div(data_df[bottom_key], axis=0) .round(precision + 2) .fillna(0) .applymap(lambda x: f"{ fmt_show % round(x * 100, precision) }%") ) return ( data_df[top_key] .div(data_df[bottom_key], axis=0) .round(precision + 2) .fillna(0) .apply(lambda x: f"{fmt_show % round(x * 100, precision) }%") ) def div_round(data_df: pd.DataFrame, top_key, bottom_key, precision=2) -> pd.Series: """ dataframe div函数 top_key / bottom_key example: data_df["pay_rate"] = div_round(data_df, "pid_cnt", "act_player_cnt") :return: """ return data_df[top_key].div(data_df[bottom_key], axis=0).round(precision) def concat_cols(data_df: pd.DataFrame, cols: list, concat_by="|") -> pd.Series: """ 合将列,汇总后的列为:recharge_cnt|recharge_type_id example: data_df["show_pid_cnt"] = concat_cols(data_df, ["pid_cnt", "pid_rate"]) -> 98|10.0% """ res = None for col in cols: if res is None: res = data_df[col].astype(str) else: res = res + data_df[col].astype(str) if col == cols[-1]: continue res = res + concat_by return res def df_astype(_df: pd.DataFrame, columns=(), excludes=(), tpe=str): """ dataframe转类型,可指定列进行转换,也可反向排除某些列,进行转换 主要用于某些数据列,仅少数列无需转,多数列需要转时,需要列举所有的列,此举可减少编写 columns:需转换的列 excludes:除了excludes外的列将进行转换(优先级更高) tpe:需转换的类型 """ if excludes: df_columns = _df.columns.tolist() columns = list(set(df_columns) - set(excludes)) if columns: _df[columns] = _df[columns].astype(tpe) return _df def show_range_labels(_df, key, bins, insert_zero=True, max_label_fmt=None): """ # money_df #### # player_id, money # 19296, 0 # 21169, 8 # 24003, 98 money_df[["money_label", "label_rank"]] = show_range_labels( money_df, "money", bins=[0, 8, 41], max_label_fmt="{}+" ) => # player_id, money, money_label, label_rank # 19296, 0, "", -1 # 21169, 8, "1-8", 8 # 24003, 98, "41+”, 41 insert_zero : 是否在bins最前面插入0 :return: """ def prefix_bins(_bins): _bins = sorted(map(int, _bins)) if insert_zero and _bins[0] != 0: _bins.insert(0, 0) return _bins bins = prefix_bins(bins) concat = "-" def cut_bins(row): val = row[key] if not val: return "", -1 if val > bins[-1]: val = bins[-1] position = bisect_left(bins, val) if position <= 0: return "", -1 left_val = bins[position - 1] + 1 right_val = bins[position] labels = f"{left_val}{concat}{right_val}" if position == len(bins) - 1 and max_label_fmt is not None: labels = max_label_fmt.format(left_val) return labels, bins[position] return _df.apply(cut_bins, axis=1, result_type="expand")
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/utils/xdataframe.py
xdataframe.py
import time import hashlib import urllib.parse import ujson as json import requests from celery import current_app from yyxx_game_pkg.logger.log import root_log as local_log def http_request( url, data, is_https=False, method="post", is_json_type=False, add_headers=None ): try: headers = {} if is_json_type is True: content_type = "application/json; charset=UTF-8" else: content_type = "application/x-www-form-urlencoded; charset=UTF-8" if is_https is True: url = f"https://{url}" else: url = f"http://{url}" headers["Content-Type"] = content_type if add_headers: headers.update(add_headers) post_data = set_params(data) if is_json_type is False else json.dumps(data) if method == "post": result = requests.post(url, data=post_data, headers=headers, verify=False) else: result = requests.get(url + "?" + post_data, headers=headers, verify=False) content = result.content if not content: return None return content except Exception as e: local_log(f"http_request Error Exception: {e}") return None def md5(md5_str): """ md5加密[center接口定] :param md5_str: :return: """ sign_str = hashlib.md5() sign_str.update(md5_str.encode("utf-8")) return sign_str.hexdigest() def set_params(params=None): """ 生成参数 """ if not isinstance(params, dict): raise TypeError("You must pass in a dictionary!") params_list = [] for k, _v in params.items(): if isinstance(_v, list) and _v: if isinstance(_v[0], dict): params_list.append((k, json.dumps(_v))) else: params_list.extend([(k, x) for x in _v]) elif isinstance(_v, dict): params_list.append((k, json.dumps(_v))) else: params_list.append((k, _v)) return urllib.parse.urlencode(params_list) def http_push_server(url, data, server_api_key): """ 单服推送 :param url: :param data: :param server_api_key: :return: """ if not url: local_log(f"Error http_push_server url: {url} data: {json.dumps(data)}") return None _t = int(time.time()) values = {"time": _t, "params": json.dumps(data)} keys = values.keys() keys = sorted(keys) params = [] for key in keys: params.append(f"{key}={values[key]}") params = "&".join(params) timestamp = str(_t + (_t % 38975)) _tmp = md5(f"{params}{server_api_key}") sign = md5(f"{timestamp}{_tmp}") post_data = {"time": _t, "params": data, "sign": sign} post_data_log = json.dumps(post_data, ensure_ascii=False) local_log(f"http_push_server url:{url} post_data: {post_data_log}") result = http_request(url, post_data, False, "post") local_log(f"http_push_server url:{url} res: {result}") return result def make_post_data(ex_params, api_key): """ 生成post_data """ _t = int(time.time()) values = {"time": _t, "params": json.dumps(ex_params)} keys = values.keys() keys = sorted(keys) params = [] for key in keys: params.append(f"{key}={values[key]}") params = "&".join(params) timestamp = str(_t + (_t % 38975)) _tmp = md5(f"{params}{api_key}") sign = md5(f"{timestamp}{_tmp}") post_data = {"time": _t, "params": ex_params, "sign": sign} post_data = set_params(post_data) return post_data
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/utils/xhttp.py
xhttp.py
import copy import operator import os from importlib import import_module from yyxx_game_pkg.conf import global_settings empty = object() ENVIRONMENT_VARIABLE = "SETTINGS" def new_method_proxy(func): def inner(self, *args): if (_wrapped := self._wrapped) is empty: self._setup() _wrapped = self._wrapped return func(_wrapped, *args) inner._mask_wrapped = False return inner def unpickle_lazyobject(wrapped): return wrapped class ImproperlyConfigured(Exception): pass class UserSettingsHolder: SETTINGS_MODULE = None def __init__(self, default_settings): self.__dict__["_deleted"] = set() self.default_settings = default_settings def __getattr__(self, name): if not name.isupper() or name in self._deleted: raise AttributeError return getattr(self.default_settings, name) def __setattr__(self, name, value): self._deleted.discard(name) def __delattr__(self, name): self._deleted.add(name) if hasattr(self, name): super().__delattr__(name) def __dir__(self): return sorted( s for s in [*self.__dict__, *dir(self.default_settings)] if s not in self._deleted ) def is_overridden(self, setting): deleted = setting in self._deleted set_locally = setting in self.__dict__ set_on_default = getattr( self.default_settings, "is_overridden", lambda s: False )(setting) return deleted or set_locally or set_on_default def __repr__(self): return f"<{self.__class__.__name__}>" class Settings: def __init__(self, settings_module=None): for setting in dir(global_settings): if setting.isupper(): setattr(self, setting, getattr(global_settings, setting)) self.SETTINGS_MODULE = settings_module module = import_module(self.SETTINGS_MODULE) self._explicit_settings = set() for setting in dir(module): if setting.isupper(): setting_value = getattr(module, setting) setattr(self, setting, setting_value) self._explicit_settings.add(setting) def __repr__(self): return f'<{self.__class__.__name__} "{self.SETTINGS_MODULE}">' class LazyObject: _wrapped = None def __init__(self): self._wrapped = empty def __getattribute__(self, name): if name == "_wrapped": return super().__getattribute__(name) value = super().__getattribute__(name) if not getattr(value, "_mask_wrapped", True): raise AttributeError return value __getattr__ = new_method_proxy(getattr) def __setattr__(self, name, value): if name == "_wrapped": self.__dict__["_wrapped"] = value else: if self._wrapped is empty: self._setup() setattr(self._wrapped, name, value) def __delattr__(self, name): if name == "_wrapped": raise TypeError("can't delete _wrapped.") if self._wrapped is empty: self._setup() delattr(self._wrapped, name) def _setup(self): raise NotImplementedError( "subclasses of LazyObject must provide a _setup() method" ) def __reduce__(self): if self._wrapped is empty: self._setup() return unpickle_lazyobject, (self._wrapped,) def __copy__(self): if self._wrapped is empty: return type(self)() else: return copy.copy(self._wrapped) def __deepcopy__(self, memo): if self._wrapped is empty: result = type(self)() memo[id(self)] = result return result return copy.deepcopy(self._wrapped, memo) __bytes__ = new_method_proxy(bytes) __str__ = new_method_proxy(str) __bool__ = new_method_proxy(bool) # Introspection support __dir__ = new_method_proxy(dir) # Need to pretend to be the wrapped class, for the sake of objects that # care about this (especially in equality tests) __class__ = property(new_method_proxy(operator.attrgetter("__class__"))) __eq__ = new_method_proxy(operator.eq) __lt__ = new_method_proxy(operator.lt) __gt__ = new_method_proxy(operator.gt) __ne__ = new_method_proxy(operator.ne) __hash__ = new_method_proxy(hash) # List/Tuple/Dictionary methods support __getitem__ = new_method_proxy(operator.getitem) __setitem__ = new_method_proxy(operator.setitem) __delitem__ = new_method_proxy(operator.delitem) __iter__ = new_method_proxy(iter) __len__ = new_method_proxy(len) __contains__ = new_method_proxy(operator.contains) class LazySettings(LazyObject): def _setup(self, name=None): settings_module = os.environ.get(ENVIRONMENT_VARIABLE) if not settings_module: desc = ("setting %s" % name) if name else "settings" raise ImproperlyConfigured( "Requested %s, but settings are not configured. " "You must either define the environment variable %s " "or call settings.configure() before accessing settings." % (desc, ENVIRONMENT_VARIABLE) ) self._wrapped = Settings(settings_module) def __repr__(self): if self._wrapped is empty: return "<LazySettings [Unevaluated]>" return '<LazySettings "%(settings_module)s">' % { "settings_module": self._wrapped.SETTINGS_MODULE, } def __getattr__(self, name): if (_wrapped := self._wrapped) is empty: self._setup(name) _wrapped = self._wrapped val = getattr(_wrapped, name) self.__dict__[name] = val return val def __setattr__(self, name, value): if name == "_wrapped": self.__dict__.clear() else: self.__dict__.pop(name, None) super().__setattr__(name, value) def __delattr__(self, name): super().__delattr__(name) self.__dict__.pop(name, None) def configure(self, default_settings=None, **options): if self._wrapped is not empty: raise RuntimeError("Settings already configured.") holder = UserSettingsHolder(default_settings) for name, value in options.items(): if not name.isupper(): raise TypeError(f"Setting {name} must be uppercase.") setattr(holder, name, value) self._wrapped = holder @property def configured(self): return self._wrapped is not empty settings = LazySettings()
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/conf/__init__.py
__init__.py
import base64 from Crypto import Random from Crypto.Cipher import PKCS1_v1_5 as PKCS1_cipher from Crypto.Hash import SHA256 from Crypto.PublicKey import RSA from Crypto.Signature import PKCS1_v1_5 as PKCS1_signature class RSACrypto: @staticmethod def generator_rsa(filename=None): random_generator = Random.new().read rsa = RSA.generate(1024, random_generator) rsa_private_key = rsa.exportKey() rsa_public_key = rsa.publickey().exportKey() if filename is not None: with open(f"{filename}_private.pem", "w") as f: f.write(rsa_private_key.decode()) with open(f"{filename}_public.pem", "w") as f: f.write(rsa_public_key.decode()) @staticmethod def rsa_public_crypto(raw_str: str, public_key: str) -> str: """ 公钥加密 :param raw_str: raw string :param public_key: public key """ cipher = PKCS1_cipher.new(RSA.importKey(public_key)) encrypt_text = base64.b64encode(cipher.encrypt(raw_str.encode("utf-8"))) return encrypt_text.decode("utf-8") @staticmethod def rsa_private_crypto(crypto_str: str, private_key: str) -> str: """ 私钥解密 :param crypto_str: 加密字符串 :param private_key: private key """ cipher = PKCS1_cipher.new(RSA.importKey(private_key)) decrypt_text = cipher.decrypt(base64.b64decode(crypto_str), Random.new().read) return decrypt_text.decode("utf-8") @staticmethod def rsa_private_sign(raw_str: str, private_key: str) -> str: """ 私钥签名 :param raw_str: raw string :param private_key: """ private_key = RSA.importKey(private_key) signer = PKCS1_signature.new(private_key) digest = SHA256.new() digest.update(raw_str.encode("utf8")) sign = signer.sign(digest) signature = base64.b64encode(sign) signature = signature.decode("utf-8") return signature @staticmethod def rsa_public_sign(raw_str: str, sign: str, public_key: str) -> bool: """ 公钥验证签名 :param raw_str: raw string :param sign: 签名 :param public_key: public key """ public_key = RSA.importKey(public_key) verifier = PKCS1_signature.new(public_key) digest = SHA256.new() digest.update(raw_str.encode("utf-8")) return verifier.verify(digest, base64.b64decode(sign))
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/crypto/rsa.py
rsa.py
from functools import wraps from opentelemetry import trace from opentelemetry.exporter.jaeger.thrift import JaegerExporter from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry.trace import get_current_span from opentelemetry.trace.status import Status, StatusCode _tracer = trace.get_tracer(__name__) def get_tracer(): """:cvar 获取全局tracer实例 """ return _tracer def register_to_jaeger(service_name: str, jaeger_host: str, jaeger_port: int = 6831): """ 注册服务到jaeger,这样就可以发送tracer相关信息到jaeger服务器 Args: service_name: 注册的服务明 jaeger_host: jaeger地址 jaeger_port: Returns: TracerProvider """ provider = TracerProvider(resource=Resource.create({SERVICE_NAME: service_name})) trace.set_tracer_provider(provider) # create a JaegerExporter jaeger_exporter = JaegerExporter( agent_host_name=jaeger_host, agent_port=jaeger_port, ) # Create a BatchSpanProcessor and add the exporter to it span_processor = BatchSpanProcessor(jaeger_exporter) # add to the tracer trace.get_tracer_provider().add_span_processor(span_processor) def trace_span(ret_trace_id: bool = False, set_attributes: bool = False, operation_name: str = ""): """:cvar 函数的span装饰器 """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): _operation_name = operation_name if not _operation_name: _operation_name = f"{func.__module__}.{func.__name__}" with _tracer.start_as_current_span(_operation_name) as span: try: result = func(*args, **kwargs) if ret_trace_id: return result, hex(span.get_span_context().trace_id) if set_attributes: span.set_attributes({"kwargs": str(kwargs), "args": str(args)}) return result except Exception as e: span.set_status(Status(StatusCode.ERROR, str(e))) raise return wrapper return decorator def get_current_trace_id(): """:cvar 获取当前trace id """ # 获取当前请求的span和trace id span = get_current_span() # 获取 trace_id trace_id = span.get_span_context().trace_id return hex(trace_id) def add_span_tags(attributes: dict): """:cvar 当前span添加tags """ span = get_current_span() span.set_attributes(attributes) def add_span_events(event_name: str, events: dict): """:cvar 当前span添加tags """ span = get_current_span() span.add_event(event_name, events)
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/xtrace/helper.py
helper.py
import gzip import json from django.conf import settings from django.utils.deprecation import MiddlewareMixin from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator import yyxx_game_pkg.xtrace.helper as xtrace_helper from yyxx_game_pkg.xtrace.django.util.common import get_django_middleware_setting class _DjangoJaegerMiddleware(MiddlewareMixin): _jaeger_config = getattr(settings, "JAEGER", {}) _log_max_size = _jaeger_config.get("log_max_size", 2048) _is_log = _jaeger_config.get("is_log", False) _ignore_paths = _jaeger_config.get("ignore_paths", []) def __call__(self, request): try: span = xtrace_helper.get_current_span() path_info = request.environ['PATH_INFO'] span.update_name(f"{request.environ['REQUEST_METHOD']} {path_info}") if path_info not in self._ignore_paths: if getattr(request, "REQUEST", None): request_params = dict(request.REQUEST) else: request_params = {} request_params.update(request.GET) request_params.update(request.POST) span.add_event("request", {"params": json.dumps(request_params)[:self._log_max_size]}) except Exception as e: print(e) return super().__call__(request) def process_response(self, request, response): try: if self._is_log and (request.environ['PATH_INFO'] not in self._ignore_paths): span = xtrace_helper.get_current_span() admin_alias = getattr(getattr(request, "admin", None), "alias", None) if admin_alias: span.set_attributes({"request.admin.alias": admin_alias}) settings_middleware = getattr(settings, get_django_middleware_setting(), []) if "django.middleware.gzip.GZipMiddleware" in settings_middleware and response.headers.get( "Content-Encoding") == 'gzip': span.add_event("response", {"params": gzip.decompress(response.content).decode()[:self._log_max_size]}) else: span.add_event("response", {"params": response.content.decode()[:self._log_max_size]}) # inject trace parent to response header TraceContextTextMapPropagator().inject(response.headers) except Exception as e: print(e) return response def process_exception(self, request, exception): try: span = xtrace_helper.get_current_span() span.set_status(xtrace_helper.Status(xtrace_helper.StatusCode.ERROR, exception.__str__())) except Exception as e: print(e) return None
yyxx-game-pkg
/yyxx_game_pkg-2023.8.24.1-py3-none-any.whl/yyxx_game_pkg/xtrace/django/middleware.py
middleware.py
import datetime from dateutil.relativedelta import relativedelta import re import math from typing import Tuple, Union def today() -> str: """ Returns 'yyyy-mm-dd' date for today >>> today() == datetime.date.today().strftime('%Y-%m-%d') True """ return datetime.date.today().strftime("%Y-%m-%d") def yesterday() -> str: """ Returns 'yyyy-mm-dd' date for yesterday >>> yesterday() == (datetime.date.today() - datetime.timedelta(1)).strftime('%Y-%m-%d') True """ return yyyy_mm_dd(move_yyyy_mm_dd(today(), -1)) def tomorrow() -> str: """ Returns 'yyyy-mm-dd' date for tomorrow >>> tomorrow() == (datetime.date.today() + datetime.timedelta(1)).strftime('%Y-%m-%d') True """ return yyyy_mm_dd(move_yyyy_mm_dd(today(), 1)) def now() -> str: """ Returns current datetime as 'yyyy-mm-ddThh:mm:ss' >>> now() == datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S') True """ return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S") def move_yyyy(yyyy_mm_dd: Union[str, datetime.date], by: int) -> Union[str, datetime.date]: """ Increases or decreases a date by a certain number of years >>> move_yyyy('2020', 1) '2021' >>> move_yyyy('2020', -1) '2019' >>> move_yyyy('2020-02', 1) '2021-02' >>> move_yyyy('2020-02-14', 1) '2021-02-14' >>> move_yyyy('2020-02-14T10:20:30', 2) '2022-02-14T10:20:30' >>> move_yyyy(datetime.date(2020, 2, 14), 1) datetime.date(2021, 2, 14) >>> move_yyyy(datetime.datetime(2020, 2, 14, 0, 0), 1) datetime.datetime(2021, 2, 14, 0, 0) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y") date += relativedelta(years=by) return _strftime(date, pattern) def move_yyyy_mm(yyyy_mm_dd: Union[str, datetime.date], by: int) -> Union[str, datetime.date]: """ Increases or decreases a date by a certain number of months >>> move_yyyy_mm('2020-02', 1) '2020-03' >>> move_yyyy_mm('2020-02', -1) '2020-01' >>> move_yyyy_mm('2020-01-31', 1) '2020-02-29' >>> move_yyyy_mm('2020-01-31', 25) '2022-02-28' >>> move_yyyy_mm('2020-02-14T10:20:30', 2) '2020-04-14T10:20:30' >>> move_yyyy_mm(datetime.date(2020, 2, 14), 1) datetime.date(2020, 3, 14) >>> move_yyyy_mm(datetime.datetime(2020, 2, 14, 0, 0), 1) datetime.datetime(2020, 3, 14, 0, 0) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y-%m") date += relativedelta(months=by) return _strftime(date, pattern) def move_yyyy_mm_dd(yyyy_mm_dd: Union[str, datetime.date], by: int) -> Union[str, datetime.date]: """ Increases or decreases a date by a certain number of days >>> move_yyyy_mm_dd('2020-12-31', 1) '2021-01-01' >>> move_yyyy_mm_dd('2020-12-31', -1) '2020-12-30' >>> move_yyyy_mm_dd('2020-02-29T10:20:30', 2) '2020-03-02T10:20:30' >>> move_yyyy_mm_dd(datetime.date(2020, 2, 29), 1) datetime.date(2020, 3, 1) >>> move_yyyy_mm_dd(datetime.datetime(2020, 2, 29, 0, 0), 1) datetime.datetime(2020, 3, 1, 0, 0) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y-%m-%d") date += relativedelta(days=by) return _strftime(date, pattern) def move_yyyy_mm_dd_hh(yyyy_mm_dd_hh_mm_ss: Union[str, datetime.datetime], by: int) -> Union[str, datetime.date]: """ Increases or decreases a datetime by a certain number of hours >>> move_yyyy_mm_dd_hh('2020-12-31T23', 1) '2021-01-01T00' >>> move_yyyy_mm_dd_hh('2020-12-31T23', -1) '2020-12-31T22' >>> move_yyyy_mm_dd_hh('2020-02-29T10:20:30', 2) '2020-02-29T12:20:30' >>> move_yyyy_mm_dd_hh(datetime.datetime(2020, 2, 29, 10, 20, 30), 1) datetime.datetime(2020, 2, 29, 11, 20, 30) """ date, pattern = _parse(yyyy_mm_dd_hh_mm_ss, at_least="%Y-%m-%dT%H") date += relativedelta(hours=by) return _strftime(date, pattern) def move_yyyy_mm_dd_hh_mm(yyyy_mm_dd_hh_mm_ss: Union[str, datetime.datetime], by: int) -> Union[str, datetime.date]: """ Increases or decreases a datetime by a certain number of minutes >>> move_yyyy_mm_dd_hh_mm('2020-12-31T23:59', 1) '2021-01-01T00:00' >>> move_yyyy_mm_dd_hh_mm('2020-12-31T23:59', -1) '2020-12-31T23:58' >>> move_yyyy_mm_dd_hh_mm('2020-02-29T10:20:30', 2) '2020-02-29T10:22:30' >>> move_yyyy_mm_dd_hh_mm(datetime.datetime(2020, 2, 29, 10, 20, 30), 1) datetime.datetime(2020, 2, 29, 10, 21, 30) """ date, pattern = _parse(yyyy_mm_dd_hh_mm_ss, at_least="%Y-%m-%dT%H:%M") date += relativedelta(minutes=by) return _strftime(date, pattern) def move_yyyy_mm_dd_hh_mm_ss(yyyy_mm_dd_hh_mm_ss: Union[str, datetime.datetime], by: int) -> Union[str, datetime.date]: """ Increases or decreases a datetime by a certain number of seconds >>> move_yyyy_mm_dd_hh_mm_ss('2020-12-31T23:59:59', 1) '2021-01-01T00:00:00' >>> move_yyyy_mm_dd_hh_mm_ss('2020-12-31T23:59:59', -2) '2020-12-31T23:59:57' >>> move_yyyy_mm_dd_hh_mm_ss(datetime.datetime(2020, 2, 29, 10, 20, 30), 1) datetime.datetime(2020, 2, 29, 10, 20, 31) """ date, pattern = _parse(yyyy_mm_dd_hh_mm_ss, at_least="%Y-%m-%dT%H:%M:%S") date += relativedelta(seconds=by) return _strftime(date, pattern) def diff_yyyy(a: Union[str, datetime.date], b: Union[str, datetime.date]) -> int: """ Returns the amount of years between date A and date B >>> diff_yyyy("2020", "2021") 1 >>> diff_yyyy("2020-02", "2020-08") 0 >>> diff_yyyy("2020-02-14T10:20:30", "2021-02-14T10:20:30") 1 >>> diff_yyyy("2020-02-14T10:20:30", "2021-02-14T10:20:29") 0 >>> diff_yyyy("2021-02-14T10:20:30", "2020-02-14T10:20:30") -1 >>> diff_yyyy(datetime.date(2020, 2, 14), datetime.date(2022, 2, 14)) 2 """ date_a, _ = _parse(a, at_least="%Y") date_b, _ = _parse(b, at_least="%Y") return relativedelta(date_b, date_a).years def diff_yyyy_mm(a: Union[str, datetime.date], b: Union[str, datetime.date]) -> int: """ Returns the amount of months between date A and date B >>> diff_yyyy_mm("2020-03", "2020-04") 1 >>> diff_yyyy_mm("2020-02-14T10:20:30", "2021-02-14T10:20:30") 12 >>> diff_yyyy_mm("2020-02-14T10:20:30", "2020-01-14T10:20:30") -1 >>> diff_yyyy_mm(datetime.date(2020, 2, 14), datetime.date(2020, 4, 14)) 2 """ date_a, _ = _parse(a, at_least="%Y-%m") date_b, _ = _parse(b, at_least="%Y-%m") return relativedelta(date_b, date_a).years * 12 + relativedelta(date_b, date_a).months def diff_yyyy_mm_dd(a: Union[str, datetime.date], b: Union[str, datetime.date]) -> int: """ Returns the amount of days between date A and date B >>> diff_yyyy_mm_dd("2020-02-01", "2020-03-01") 29 >>> diff_yyyy_mm_dd("2020-02-14T10:20:30", "2021-02-14T10:20:30") 366 >>> diff_yyyy_mm_dd("2020-02-14T10:20:30", "2020-02-13T10:20:30") -1 >>> diff_yyyy_mm_dd("2020-02-14T10", "2020-02-15T09") 0 >>> diff_yyyy_mm_dd(datetime.date(2020, 2, 14), datetime.date(2020, 2, 16)) 2 """ date_a, _ = _parse(a, at_least="%Y-%m-%d") date_b, _ = _parse(b, at_least="%Y-%m-%d") return (date_b - date_a).days def diff_yyyy_mm_dd_hh(a: Union[str, datetime.datetime], b: Union[str, datetime.datetime]) -> int: """ Returns the amount of hours between datetime A and datetime B >>> diff_yyyy_mm_dd_hh("2020-02-01T10", "2020-02-01T11") 1 >>> diff_yyyy_mm_dd_hh("2020-02-14T10:20:30", "2021-02-14T10:20:30") 8784 >>> diff_yyyy_mm_dd_hh("2020-02-14T10:20:30", "2020-02-14T09:20:30") -1 >>> diff_yyyy_mm_dd_hh("2020-02-14T10:30", "2020-02-14T11:29") 0 >>> diff_yyyy_mm_dd_hh(datetime.datetime(2020, 2, 14, 10, 20, 30), datetime.datetime(2020, 2, 14, 12, 20, 30)) 2 """ date_a, _ = _parse(a, at_least="%Y-%m-%dT%H") date_b, _ = _parse(b, at_least="%Y-%m-%dT%H") return math.floor((date_b - date_a).total_seconds() / 3600) def diff_yyyy_mm_dd_hh_mm(a: Union[str, datetime.datetime], b: Union[str, datetime.datetime]) -> int: """ Returns the amount of minutes between datetime A and datetime B >>> diff_yyyy_mm_dd_hh_mm("2020-02-01T10:20", "2020-02-01T10:21") 1 >>> diff_yyyy_mm_dd_hh_mm("2020-02-14T10:20:30", "2021-02-14T10:20:30") 527040 >>> diff_yyyy_mm_dd_hh_mm("2020-02-14T10:20:30", "2020-02-14T10:19:30") -1 >>> diff_yyyy_mm_dd_hh_mm("2020-02-14T10:30", "2020-02-14T10:30:30") 0 >>> diff_yyyy_mm_dd_hh_mm(datetime.datetime(2020, 2, 14, 10, 20, 30), datetime.datetime(2020, 2, 14, 10, 22, 30)) 2 """ date_a, _ = _parse(a, at_least="%Y-%m-%dT%H:%M") date_b, _ = _parse(b, at_least="%Y-%m-%dT%H:%M") return math.floor((date_b - date_a).total_seconds() / 60) def diff_yyyy_mm_dd_hh_mm_ss(a: Union[str, datetime.datetime], b: Union[str, datetime.datetime]) -> int: """ Returns the amount of seconds between datetime A and datetime B >>> diff_yyyy_mm_dd_hh_mm_ss("2020-02-01T10:20:30", "2020-02-01T10:20:31") 1 >>> diff_yyyy_mm_dd_hh_mm_ss("2020-02-14T10:20:30", "2021-02-14T10:20:30") 31622400 >>> diff_yyyy_mm_dd_hh_mm_ss("2020-02-14T10:20:30", "2020-02-14T10:20:29") -1 >>> diff_yyyy_mm_dd_hh_mm_ss(datetime.datetime(2020, 2, 14, 10, 20, 30), datetime.datetime(2020, 2, 14, 10, 20, 32)) 2 """ date_a, _ = _parse(a, at_least="%Y-%m-%dT%H:%M:%S") date_b, _ = _parse(b, at_least="%Y-%m-%dT%H:%M:%S") return math.floor((date_b - date_a).total_seconds()) def start_of_yyyy(yyyy_mm_dd: Union[str, datetime.date]) -> Union[str, datetime.date]: """ Returns first day of the year of a given date >>> start_of_yyyy('2020') '2020-01-01' >>> start_of_yyyy('2020-05-14') '2020-01-01' >>> start_of_yyyy(datetime.date(2020, 5, 14)) datetime.date(2020, 1, 1) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y") date = datetime.datetime(date.year, 1, 1, 0, 0) if pattern not in ["date", "datetime"]: pattern = "%Y-%m-%d" return _strftime(date, pattern) def start_of_yyyy_mm(yyyy_mm_dd: Union[str, datetime.date]) -> Union[str, datetime.date]: """ Returns first day of the month of a given date >>> start_of_yyyy_mm('2020-05') '2020-05-01' >>> start_of_yyyy_mm('2020-05-14') '2020-05-01' >>> start_of_yyyy_mm(datetime.date(2020, 5, 14)) datetime.date(2020, 5, 1) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y-%m") date = datetime.datetime(date.year, date.month, 1, 0, 0) if pattern not in ["date", "datetime"]: pattern = "%Y-%m-%d" return _strftime(date, pattern) def start_of_yyyy_mm_dd(yyyy_mm_dd: Union[str, datetime.date]) -> Union[str, datetime.datetime]: """ Returns first datetime of the day of a given date >>> start_of_yyyy_mm_dd('2020-05-14') '2020-05-14T00:00:00' >>> start_of_yyyy_mm_dd('2020-05-14T13:25:10') '2020-05-14T00:00:00' >>> start_of_yyyy_mm_dd(datetime.date(2020, 5, 14)) datetime.datetime(2020, 5, 14, 0, 0) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y-%m-%d") date = datetime.datetime(date.year, date.month, date.day, 0, 0) if pattern in ["date", "datetime"]: return date return date.strftime("%Y-%m-%dT%H:%M:%S") def start_of_yyyy_mm_dd_hh(yyyy_mm_dd: Union[str, datetime.datetime]) -> Union[str, datetime.datetime]: """ Returns the start of the hour of a given datetime >>> start_of_yyyy_mm_dd_hh('2020-05-14T13:25:10') '2020-05-14T13:00:00' >>> start_of_yyyy_mm_dd_hh(datetime.datetime(2020, 5, 14, 23, 59)) datetime.datetime(2020, 5, 14, 23, 0) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y-%m-%dT%H") date = datetime.datetime(date.year, date.month, date.day, date.hour, 0) if pattern in ["date", "datetime"]: return date return date.strftime("%Y-%m-%dT%H:%M:%S") def start_of_yyyy_mm_dd_hh_mm(yyyy_mm_dd: Union[str, datetime.datetime]) -> Union[str, datetime.datetime]: """ Returns the same datetime but with seconds at 0 >>> start_of_yyyy_mm_dd_hh_mm('2020-05-14T13:25:10') '2020-05-14T13:25:00' >>> start_of_yyyy_mm_dd_hh_mm(datetime.datetime(2020, 5, 14, 23, 59, 59)) datetime.datetime(2020, 5, 14, 23, 59) """ date, pattern = _parse(yyyy_mm_dd, at_least="%Y-%m-%dT%H:%M") date = datetime.datetime(date.year, date.month, date.day, date.hour, date.minute) if pattern in ["date", "datetime"]: return date return date.strftime("%Y-%m-%dT%H:%M:%S") def end_of_yyyy(yyyy_mm_dd: Union[str, datetime.date]) -> Union[str, datetime.date]: """ Returns last day of the year of a given date >>> end_of_yyyy('2020') '2020-12-31' >>> end_of_yyyy('2020-05-14') '2020-12-31' >>> end_of_yyyy(datetime.date(2020, 5, 14)) datetime.date(2020, 12, 31) """ date, _ = _parse(yyyy_mm_dd, at_least="%Y") return move_yyyy_mm_dd(start_of_yyyy(move_yyyy(yyyy_mm_dd, 1)), -1) def end_of_yyyy_mm(yyyy_mm_dd: Union[str, datetime.date]) -> Union[str, datetime.date]: """ Returns last day of the month of a given date >>> end_of_yyyy_mm('2020-02') '2020-02-29' >>> end_of_yyyy_mm('2020-05-14') '2020-05-31' >>> end_of_yyyy_mm(datetime.date(2020, 5, 14)) datetime.date(2020, 5, 31) """ return move_yyyy_mm_dd(start_of_yyyy_mm(move_yyyy_mm(yyyy_mm_dd, 1)), -1) def end_of_yyyy_mm_dd(yyyy_mm_dd: Union[str, datetime.date]) -> Union[str, datetime.date]: """ Returns last datetime of the day of a given date >>> end_of_yyyy_mm_dd('2020-05-14') '2020-05-14T23:59:59' >>> end_of_yyyy_mm_dd('2020-05-14T13:25:10') '2020-05-14T23:59:59' >>> end_of_yyyy_mm_dd(datetime.date(2020, 5, 14)) datetime.datetime(2020, 5, 14, 23, 59, 59) """ date, _ = _parse(yyyy_mm_dd, at_least="%Y") return move_yyyy_mm_dd_hh_mm_ss(start_of_yyyy_mm_dd(move_yyyy_mm_dd(yyyy_mm_dd, 1)), -1) def yyyy(yyyy_mm_dd: Union[str, datetime.date]) -> str: """ Extracts the year of a given date >>> yyyy('2020-05-14') '2020' >>> yyyy(datetime.date(2020, 5, 14)) '2020' """ date, _ = _parse(yyyy_mm_dd, at_least="%Y") return date.strftime("%Y") def yyyy_mm(yyyy_mm_dd: Union[str, datetime.date]) -> str: """ Extracts the year and month of a given date >>> yyyy_mm('2020-05-14') '2020-05' >>> yyyy_mm(datetime.date(2020, 5, 14)) '2020-05' """ date, _ = _parse(yyyy_mm_dd, at_least="%Y-%m") return date.strftime("%Y-%m") def yyyy_mm_dd(yyyy_mm_dd: Union[str, datetime.date]) -> str: """ Extracts the date of a given datetime >>> yyyy_mm_dd('2020-05-14T10:20:30') '2020-05-14' >>> yyyy_mm_dd(datetime.date(2020, 5, 14)) '2020-05-14' """ date, _ = _parse(yyyy_mm_dd, at_least="%Y-%m-%d") return date.strftime("%Y-%m-%d") def hh_mm_ss(yyyy_mm_dd_hh_mm_ss: Union[str, datetime.datetime]) -> str: """ Extracts the time of a given datetime >>> hh_mm_ss('2020-05-14T10:20:30') '10:20:30' >>> hh_mm_ss(datetime.datetime(2020, 5, 14, 10, 20, 30)) '10:20:30' """ date, _ = _parse(yyyy_mm_dd_hh_mm_ss, at_least="%Y-%m-%dT%H:%M:%S") return date.strftime("%H:%M:%S") def hh_mm(yyyy_mm_dd_hh_mm_ss: Union[str, datetime.datetime]) -> str: """ Extracts the hour and minute of a given datetime >>> hh_mm('2020-05-14T10:20:30') '10:20' >>> hh_mm(datetime.datetime(2020, 5, 14, 10, 20, 30)) '10:20' """ date, _ = _parse(yyyy_mm_dd_hh_mm_ss, at_least="%Y-%m-%dT%H:%M") return date.strftime("%H:%M") def year(yyyy_mm_dd: Union[str, datetime.date]) -> int: """ Extracts the year of a given date, similar to yyyy function but returns an int >>> year('2020-05-14') 2020 """ date, _ = _parse(yyyy_mm_dd, at_least="%Y") return date.year def month(yyyy_mm_dd: Union[str, datetime.date]) -> int: """ Extracts the month of a given date >>> month('2020-05-14') 5 """ date, _ = _parse(yyyy_mm_dd, at_least="%Y-%m") return date.month def day(yyyy_mm_dd: Union[str, datetime.date]) -> int: """ Extracts the day of a given date >>> day('2020-05-14') 14 """ date, _ = _parse(yyyy_mm_dd, at_least="%Y-%m-%d") return date.day def hour(yyyy_mm_dd_hh_mm_ss: Union[str, datetime.datetime]) -> int: """ Extracts the hour of a given datetime >>> hour('2020-05-14T05:10:58') 5 """ date, _ = _parse(yyyy_mm_dd_hh_mm_ss, at_least="%Y-%m-%dT%H") return date.hour def from_yyyymmdd(yyyymmdd: str) -> str: """ Converts a yyyymmdd date format (no dashes) to yyyy-mm-dd date format (with dashes) >>> from_yyyymmdd('20200514') '2020-05-14' """ return datetime.datetime.strptime(yyyymmdd, "%Y%m%d").strftime("%Y-%m-%d") def to_yyyymmdd(yyyy_mm_dd: Union[str, datetime.date]) -> str: """ Converts any partial or full yyyy-mm-ddThh:mm:ss to yyyymmdd date format (no dashes) >>> to_yyyymmdd('2020-05-14') '20200514' >>> to_yyyymmdd(datetime.date(2020, 5, 14)) '20200514' """ date, _ = _parse(yyyy_mm_dd, at_least="%Y") return date.strftime("%Y%m%d") def to_datetime(yyyy_mm_dd: Union[str, datetime.date]) -> datetime.datetime: """ Converts any partial or full yyyy-mm-ddThh:mm:ss to python datetime >>> to_datetime('2020-05') datetime.datetime(2020, 5, 1, 0, 0) >>> to_datetime('2020-05-14') datetime.datetime(2020, 5, 14, 0, 0) >>> to_datetime('2020-05-14T10:20:30') datetime.datetime(2020, 5, 14, 10, 20, 30) """ date, _ = _parse(yyyy_mm_dd, at_least="%Y") return date def _parse(yyyy_mm_dd: Union[str, datetime.date, datetime.datetime], at_least: str) -> Tuple[datetime.datetime, str]: """ >>> _parse('foo', '%Y') Traceback (most recent call last): ... ValueError: Could not parse date for operation, you should provide at least %Y >>> _parse('2020-01', '%Y-%m-%d') Traceback (most recent call last): ... ValueError: Could not parse date for operation, you should provide at least %Y-%m-%d >>> _parse('2020-01-01foobar', '%Y-%m-%d') Traceback (most recent call last): ... ValueError: unconverted data remains: foobar """ if isinstance(yyyy_mm_dd, datetime.datetime): return (yyyy_mm_dd, "datetime") if isinstance(yyyy_mm_dd, datetime.date): return (datetime.datetime(yyyy_mm_dd.year, yyyy_mm_dd.month, yyyy_mm_dd.day), "datetime" if "%H" in at_least else "date") pattern = "" match = re.match( r"(\d{4})?-?(\d{2})?-?(\d{2})?T?(\d{2})?:?(\d{2})?:?(\d{2})?", yyyy_mm_dd) if not match: raise ValueError( "Could not parse date, it should be in YYYY-MM-DDTHH:MM:SS format") year, month, day, hour, minute, second = match.groups() if year is not None: pattern += "%Y" if month is not None: pattern += "-%m" if day is not None: pattern += "-%d" if hour is not None: pattern += "T%H" if minute is not None: pattern += ":%M" if second is not None: pattern += ":%S" if at_least not in pattern: raise ValueError( "Could not parse date for operation, you should provide at least %s" % at_least) return (datetime.datetime.strptime(yyyy_mm_dd, pattern), pattern) def _strftime(date: datetime.datetime, pattern: str) -> Union[str, datetime.date]: if pattern == "date" and isinstance(date, datetime.datetime): return datetime.date(date.year, date.month, date.day) if pattern == "datetime": return date return date.strftime(pattern)
yyyy-mm-dd
/yyyy_mm_dd-0.1.2-py3-none-any.whl/yyyy_mm_dd/__init__.py
__init__.py
========= yyyymmdd ========= Convenient Date and DateRange classes. ``Date`` ==================================== A ``yyyymmdd``-formatted date. ``Date`` is a subclass of ``datetime.date``, and is mostly (but not fully) compatible with it. It supports flexible creation of ``Date`` objects, e.g.: from a yyyymmdd string, from delta (number of days relative to today), aliases ('yesterday', 'today', 'MIN', 'MAX', etc.), from a ``datetime.date`` object, etc. It defines convenient date arithmetic: ``Date +/- int => Date``, ``Date - Date => int``. Here is an example:: >>> from yyyymmdd import Date >>> import datetime >>> >>> Date.today() Date(20200522) >>> Date('-7') # a week ago Date(20200515) >>> Date(20191123) == Date('20191123') == Date('2019-11-23') == Date(datetime.date(2019, 11, 23)) True >>> Date(20191123).replace(month=1, day=1) Date(20190101) >>> Date('19991231') + 1 Date(20000101) >>> Date('tomorrow') - Date('yesterday') 2 >>> Date('tomorrow') - 1 == Date('today') True >>> x = Date(20191123) >>> x.yyyy + x.mm + x.dd '20191123' ``DateRange`` ==================================== A ``DateRange`` is a `range <https://docs.python.org/3.8/library/functions.html#func-range>`_ -like type, whose elements are ``Date`` objects. This class mostly follows the semantics of the builtin ``range`` type. E.g., stop is exclusive, behavior of steps, negative steps, slicing, ``range1 == range2`` iff ``tuple(range1) == tuple(range2)``, etc. Creation of ``DateRange`` objects is flexible. ``start`` and ``stop`` parameters are converted to ``Date`` automatically. See ``Date`` class for values which can be converted. Here is an example:: from yyyymmdd import Date, DateRange >>> from yyyymmdd import Date, DateRange >>> >>> len(DateRange('today', 'today')) # empty 0 >>> DateRange.from_string('yesterday:yesterday') == DateRange.empty() True >>> len(DateRange.from_string('-7:-1')) # start and stop are relative to today 6 >>> DateRange.from_string('2020501') # a singleton range DateRange(2020501, 2020502) >>> x = DateRange.from_string('2020401:2020515:7') # 1-week step >>> list(x) [Date(2020401), Date(2020408), Date(2020415), Date(2020422), Date(2020429), Date(2020506), Date(2020513)] >>> x[0], x[-1] (Date(2020401), Date(2020513)) >>> x[1:3] DateRange(2020408, 2020422, 7) >>> Date(2020422) in x True >>> list(DateRange.from_string('2020515:2020401:-7')) # negative step [Date(2020515), Date(2020508), Date(2020501), Date(2020424), Date(2020417), Date(2020410), Date(2020403)] >>> DateRange.from_string('20200101:%+31') # "%" means "the date on the other side" DateRange(20200101, 20200201) >>> DateRange.from_string('%-365:20200101') # "%" means "the date on the other side" DateRange(20190101, 20200101) ``ArgumentParser`` integration, powered by ``apegears`` ======================================================== The ``Date`` and ``DateRange`` types can be used as cli argument types, when using `apegears' <https://pypi.org/project/apegears/>`_ ``ArgumentParser``. Here is an example:: >>> from yyyymmdd import Date, DateRange >>> from apegears import ArgumentParser >>> >>> parser = ArgumentParser() >>> parser.add_optional('x', type=Date) >>> parser.add_optional('dates', 'd', type=DateRange) >>> print(parser.parse_args('-x 20191123 --dates yest:tomorrow'.split())) Namespace(dates=DateRange(20200521, 20200523), x=Date(20191123)) # if today is 20200522 >>> print(parser.parse_args('-h'.split())) usage: [-h] [-x DATE] [--dates DATE_RANGE] optional arguments: -h, --help show this help message and exit -x DATE Date, like: yyyymmdd, +days, -days, "yesterday", etc. --dates DATE_RANGE, -d DATE_RANGE DateRange, like: "DATE:DATE" or "DATE:DATE:STEP" If you prefer using the standard ``argparse.ArgumentParser``, you can define Date arguments using ``type=Date``, and DateRange arguments using ``type=DateRange.from_string``. This isn't as powerful as using ``apegears`` (no default argument names, no default help message, no default metavar, etc.). Installation ==================================== Using pip:: pip install yyyymmdd
yyyymmdd
/yyyymmdd-0.1.2.tar.gz/yyyymmdd-0.1.2/README.rst
README.rst
# readme-template -------------- ## Introduction yzcore 目的为了开发后端服务时,提供一种代码结构规范参考。 可以通过`startproject`和`startapp`两个命令快速创建工程和内部的接口应用模块。 **安装模块** ```shell $ pip install yz-core ``` 示例: - 创建工程: ```shell $ yzcore startproject myproject ``` - 创建工程内部应用: ```shell $ yzcore startapp myapp ./src/apps/ ``` 代码结构介绍: ``` . ├── docs 说明文档、接口文档等文档的存放目录 ├── migrations 数据表迁移文件存放目录 ├── src │   ├── apps 接口应用程序的主目录 │   │   ├── __init__.py │   │   ├── myapp01 │   │   │   ├── __init__.py │   │   │   ├── controllers.py 控制层:封装数据交互操作 │   │   │   ├── models.py 模型层:实现数据表与模型的定义 │   │   │   ├── schemas.py 模式层:定义接口数据参数 │   │   │   ├── tests.py 测试文件 │   │   │   └── views.py 视图层:接口定义层 │   │   └── myapp02 │   ├── conf 配置文件的存放目录 │   ├── const 公共常量存放目录 │   ├── tests 测试文件的存放目录 │   ├── main.py 程序的入口文件 │   ├── settings.py 程序的设置文件 │   └── utils 抽离出的公共代码模块存放目录 ├── .gitignore ├── requirements.txt └── README.md ``` ## Quick start Quick Start 部分主要包括两部分内容:简易的安装部署说明(Deployment)和使用案例(Example)。特别是对于一些基础库,必须包括Example模块。 ## Documentation Documentation 部分是核心的文档,对于大型项目可以使用超链接,如使用以下这种形式: For the full story, head over to the [documentation](https://git.k8s.io/community/contributors/devel#readme). ## 数据库迁移操作 ``` # pip install alembic alembic init migrations # 创建迁移环境 alembic revision --autogenerate -m "commit content" # 自动生成迁移文件 alembic upgrade head # 升级到最近版本 alembic upgrade <revision_id> # 升级到指定版本 alembic downgrade <revision_id> # 回退到指定版本 ```
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/README.md
README.md
import os try: import yaml except: yaml = None from pydantic import BaseSettings, AnyUrl from yzcore.utils import get_random_secret_key class DefaultSetting(BaseSettings): __cml__ = {} def __init_subclass__(cls, **kwargs): """""" super().__init_subclass__() reload_reload_settings(cls()) class Config: case_sensitive = False # 是否区分大小写 DEBUG: bool = True API_V1_STR: str = "/api/v1" SECRET_KEY: str = get_random_secret_key() DB_URI: str = None ID_URL: AnyUrl = None GENERATE_UUID_PATH: str = '/uuid/generate/' EXPLAIN_UUID_PATH: str = '/uuid/explain/' TRANSLATE_PATH: str = '/uuid/translate/' MAKE_UUID_PATH: str = '/uuid/make/' default_setting = DefaultSetting() def reload_reload_settings(instance): settings = default_setting for k, v in settings.__fields__.items(): val = getattr(instance, k) setattr(settings, k, val) def get_configer(ext: str = "ini", import_path=os.curdir): profile = os.environ.get('ENV_PROFILE', 'dev') if profile == 'production': configname = 'config_production' elif profile == 'testing': configname = 'config_testing' else: configname = 'config_dev' print(f"===>当前环境为:{profile}!导入的配置文件为:{configname}.{ext}") base_path = os.path.abspath(import_path) _path = os.path.join(base_path, "conf", f"{configname}.{ext}") print(_path) if ext in ["ini", "cfg"]: import configparser conf = configparser.ConfigParser() conf.read(_path) elif ext in ["yaml", "yml"]: assert yaml is not None, "Need to install PyYaml" conf = yaml.safe_load(open(_path)) else: raise AttributeError(f"暂不支持该文件格式: {ext}") return conf def get_ini_section_to_dict( section: str, exclude: set = None, conf_parser=None ): """ 获取ini配置文件某个节选的全部数据,转换成字典 :param section: 节选名称 :param exclude: 排除的字段 :param conf_parser: 配置解析器 :return: """ conf_dict = dict() for k in conf_parser.options(section): if exclude and k in exclude: break conf_dict[k] = conf.get(section, k) return conf_dict if __name__ == '__main__': conf = get_configer("ini")
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/default_settings.py
default_settings.py
from typing import Any from fastapi import HTTPException class NotFoundObject(HTTPException): def __init__(self, detail: Any = 'Not Found', headers: dict = None): super().__init__(status_code=404, detail=detail, headers=headers) class MultiObjects(HTTPException): def __init__(self, detail: Any = '', headers: dict = None): detail = '{}存在多个对象,不符合唯一性要求'.format(detail) super().__init__(status_code=500, detail=detail, headers=headers) class CreateObjectFailed(HTTPException): def __init__(self, detail: Any = 'Object create failed', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class UpdateObjectFailed(HTTPException): def __init__(self, detail: Any = 'Object update failed', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class NoObjectCreated(HTTPException): def __init__(self, detail: Any = 'No object was created', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class AlreadyExistObject(HTTPException): def __init__(self, detail: Any = 'Already Exist', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class RequestParamsError(HTTPException): def __init__(self, detail: Any = 'Incorrect request parameters', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class RequestParamsMissing(HTTPException): def __init__(self, detail: Any = 'Missing request parameters', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class NoPermission(HTTPException): def __init__(self, detail: Any = 'Insufficient permissions', headers: dict = None): super().__init__(status_code=401, detail=detail, headers=headers) class UnknownError(HTTPException): def __init__(self, detail: Any = 'Unknown error', headers: dict = None): super().__init__(status_code=500, detail=detail, headers=headers) if __name__ == '__main__': pass
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/exceptions.py
exceptions.py
import os import json import typing from starlette.datastructures import URL from starlette.background import BackgroundTask from fastapi import Response as _Response from fastapi.responses import ( HTMLResponse, PlainTextResponse, JSONResponse, UJSONResponse, ORJSONResponse, RedirectResponse, StreamingResponse, FileResponse ) class XMLResponse(_Response): media_type = "application/xml" responses = { "xml": XMLResponse, "html": HTMLResponse, "plain": PlainTextResponse, "json": JSONResponse, "ujson": UJSONResponse, "orjson": ORJSONResponse, "redirect": RedirectResponse, "stream": StreamingResponse, "file": FileResponse, } def response( content: typing.Any = None, url: typing.Union[str, URL] = None, # RedirectResponse 重定向使用 path: str = None, # FileResponse filename: str = None, # FileResponse stat_result: os.stat_result = None, # FileResponse method: str = None, # FileResponse status_code: int = 200, headers: dict = None, media_type: str = None, background: BackgroundTask = None, mtype: str = "orjson" ): if 'json' in mtype: content = render_data(data=content) elif mtype in ['plain', 'xml', 'html'] and not isinstance(content, str): content = json.dumps(content) kwargs = dict( content=content, url=url, path=path, filename=filename, stat_result=stat_result, method=method, status_code=status_code, headers=headers, media_type=media_type, background=background, ) kwargs = {k: v for k, v in kwargs.items() if v is not None} _response_cls = responses.get(mtype) return _response_cls(**kwargs) def render_data(data=None, code=10000, message='Successfully.', limit: int = 10, offset: int = 0, total: int = 0): if data is None: return dict( code=code, message=message, info=dict(), list=dict( data=[], pagination=False ) ) if isinstance(data, list): result = dict( code=code, message=message, info=dict(), list=dict( data=data, pagination=dict( limit=limit, offset=offset, total=total ) ) ) # elif isinstance(data, Container): else: result = dict( code=code, message=message, info=data, list=dict( data=[], pagination=False ) ) return result
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/response/response.py
response.py
from enum import Enum, unique __all__ = ("RegisterCode", "ErrorCode", "TipsCode", "WebsocketCode") # 状态码注册,请同时添加注释 RegisterCode = [ # 正常码 200000, # 正常,请求处理成功 # 系统级错误代码 500000, # 服务器内部错误,无法完成请求 500001, # 服务器不支持请求的功能,无法完成请求 500002, # 无效的响应 500003, # 由于超载或系统维护,服务器暂时的无法处理客户端的请求 500004, # 服务器超时 500005, # HTTP协议的版本不支持 500006, # 兼容旧代码异常 # 接口级错误代码 400000, # 请求语法或参数有误 400001, # 未认证 400002, # 400003, # 请求太快 400004, # 找不到对象 400005, # 请求不允许 400006, # 请求不合理 400007, # 400008, # 超时 400009, # 更新冲突 400010, # 资源已不存在 ## 账号 400100, # 账号相关请求错误 400101, # 重复登录 400103, # 用户登录失败 400104, # 账号不存在 400109, # 更新信息失败 400110, # 手机号码格式不正确 400111, # 手机短信验证码错误 400113, # 60s内不能重复发送短信 400118, # 手机短信发送失败 400119, # 手机号码已被注册 400120, # email格式不正确 400121, # email验证码错误 400123, # 60s内不能重复发送邮件 400126, # 非邀请邮箱 400127, # 已经发送过邀请,请15分钟后再邀请 400128, # email发送失败 400129, # email已被注册 400130, # 400140, # 密码格式不正确 400141, # 密码不一致 400143, # 密码错误 400150, # Token过期或失效 ## 组织 400200, # 组织相关请求错误 400209, # 已切换到其他团队,请关闭当前页面 400211, # 用户未加入任何组织 400212, # 用户已加入该组织 400213, # 用户不属于此企业 400223, # 组织已存在 400224, # 组织不存在 400229, # 组织名称已被使用 400233, # 成员已存在 400234, # 成员不存在 400239, # 无法删除默认成员 ## cms 400300, # node相关请求错误 400301, # 命名格式错误: 文件夹名字只支持中文, 数字, 字母或中划线, 且在50个字符以内 400302, # 命名重复 400303, # 文件夹已存在 400304, # 文件夹不存在 400305, # 最多只能创建四层文件夹 400306, # 文件已存在 400307, # 文件不存在 400308, # 请求超时 400309, # 400310, # 标签名字格式错误: 字符长度在1-6内 400311, # 标签已存在 400312, # 标签不存在 400313, # 标签名字不能重复 400314, # 400320, # 项目已存在 400321, # 项目不存在 400322, # 400323, # 400324, # 400330, # 分组---------- 400331, # 400332, # 400340, # 属性---------- 400341, # 400342, # 400343, # 400380, # limit超出限制 400381, # 搜索字段不存在 400382, # 排序字段不存在 ## 权限 400400, # 权限相关请求错误 400401, # 权限不足 400402, # 权限创建失败 400403, # 400404, # 找不到该权限 400405, # 400408, # 权限查询超时 ## editor 400500, # 编辑器相关请求错误 400510, # 模型名称不能超过50个字符 400511, # 模型描述不能超过100个字符 400512, # 模型组合不存在 400513, # 400514, # 模型不存在 400515, # 400516, # 400520, # 材质库类型不存在 400524, # 材质不存在 400525, # 400530, # 天空盒类型不存在 400534, # 天空盒不存在 400540, # 模板场景不存在 400544, # 场景不存在 400550, # 父级渲染对象不存在 400554, # 渲染对象不存在 400560, # 父级事件组不存在 400564, # 组件不存在 400570, # 父级资源不存在 400573, # 不支持的资源类型 400574, # 资源不存在 400576, # 资源内容不存在 400584, # 事件不存在 ## 任务 400600, # Job相关请求错误 400604, # 任务不存在 400608, # 任务失败 400610, # 调用upload_policy参数有误 400614, # 对应upload_policy方法不存在 400620, # 调用cloud返回错误 400630, # 发布失败 ## 应用 400700, # 应用相关请求错误 400704, # 应用不存在 400710, # platform不存在 400720, # 域名已经存在 ## 支付 400800, # 支付相关请求错误 ] @unique class ErrorCode(Enum): # 系统级错误代码 UserNotLogin = dict(code=-10007, desc="用户尚未登录") RequestParamInvalid = dict(code=-10008, desc="参数json内容格式不正确") # 接口级错误代码 UserNameAlreadyExists = dict(code=40001, desc="用户名已被注册") MaterialLibTypeNotExists = dict(code=40002, desc="材质库类型不存在") AccessPermissionDenied = dict(code=40003, desc="访问权限不足") SpaceNotExists = dict(code=40004, desc="空间不存在或已删除") FolderNameFormatInvalid = dict( code=40005, desc="文件夹名字格式错误: 文件夹名字只支持中文, 数字, 字母或下划线, 且在50个字符以内" ) FolderNameConflict = dict(code=40006, desc="文件夹名字不能重复") FolderAlreadyExists = dict(code=40007, desc="文件夹已存在") FolderLevelLimited = dict(code=40008, desc="最多只能创建四层文件夹") DstDirFolderNotExists = dict(code=40009, desc="目标目录文件夹不存在") DirFolderNotExists = dict(code=40010, desc="当前目录文件夹不存在") FileNotExists = dict(code=40011, desc="文件不存在") FileNameConflict = dict(code=40012, desc="文件名字不能重复") FileNameFormatInvalid = dict(code=40013, desc="文件名字格式错误: 长度在1-50之间") CategoryTokenInvalid = dict(code=40014, desc="分类设置口令错误") CategoryParentPathNotExists = dict(code=40015, desc="分类父路径不存在") CategoryFormatInvalid = dict(code=40016, desc="分类名格式无效") TagNameFormatInvalid = dict(code=40017, desc="标签名字格式错误: 字符长度在1-6内") TagAlreadyExists = dict(code=40018, desc="标签已存在") TagNotExists = dict(code=40019, desc="标签不存在") TagNameConflict = dict(code=40020, desc="标签名字不能重复") TemplateNotExists = dict(code=40021, desc="空间自定义字段模板不存在") ModelNotExists = dict(code=40022, desc="模型不存在") AppCatalogNotExists = dict(code=40023, desc="目录方案不存在") CategoryAlreadyExists = dict(code=40024, desc="分类已存在") AppSpaceNotExists = dict(code=40025, desc="空间项目不存在") AppSolutionsNotFound = dict(code=40026, desc="app方案不存在") ProductionNotFound = dict(code=40027, desc="production不存在") PageSizeOverflow = dict(code=40028, desc="limit超出限制") CabinetNotFound = dict(code=40029, desc="cabinet不存在") OperationPermissionDenied = dict(code=40030, desc="未拥有此权限或操作权限不足") InvalidAccessToken = dict(code=40031, desc="invalid access token") UserNotFound = dict(code=40032, desc="用户不存在") ModelDescLengthOverflow = dict(code=40033, desc="模型描述不能超过100个字符") ModelNameLengthOverflow = dict(code=40034, desc="模型名称不能超过50个字符") CategoryNotExists = dict(code=40035, desc="分类不存在") ModelGroupNotExists = dict(code=40036, desc="模型组合不存在") ProductionNotExists = dict(code=40037, desc="production不存在") JobNotFound = dict(code=40038, desc="任务不存在") JobFailed = dict(code=40039, desc="任务失败") MaterialNotFound = dict(code=40040, desc="材质不存在") InvalidMobile = dict(code=40041, desc="手机号码格式不正确") InvalidOperationType = dict(code=40042, desc="非法操作类型") SmsSendFailed = dict(code=40043, desc="短信发送失败") CaptchaError = dict(code=40044, desc="验证码错误") InvalidUsernameOrPassword = dict(code=40045, desc="请输入正确的账号密码") PlatformNotFound = dict(code=40046, desc="platform不存在") WechatResponseError = dict(code=40047, desc="微信返回错误") SpaceNameAlreadyExists = dict(code=40048, desc="空间名称已被使用") InvalidEmail = dict(code=40049, desc="email格式不正确") InvalidUploadPolicy = dict(code=40050, desc="对应upload_policy方法不存在") InvalidPassword = dict(code=40051, desc="密码格式不正确") MobileAlreadyExists = dict(code=40052, desc="手机号码已注册") InvalidCabinetName = dict(code=40053, desc="cabinet名称格式不正确") CabinetNameAlreadyExisted = dict(code=40054, desc="cabinet名称已被使用") InvalidCallback = dict(code=40055, desc="invalid callback token") CallCloudError = dict(code=40056, desc="调用cloud返回错误") InvalidProductionName = dict(code=40057, desc="非法的产品名称") AppHomeCatalogNotExisted = dict(code=40058, desc="未设置首页方案") ClientNotFound = dict(code=40059, desc="客户端不存在") ClientChannelNotExists = dict(code=40060, desc="客户端渠道不存在") HomeSolutionsCannotDelete = dict(code=40061, desc="主页方案不能删除") InvalidRatio = dict(code=40062, desc="不正确的比例值") HostAlreadyExists = dict(code=40063, desc="域名已被使用") ProductNotFound = dict(code=40064, desc="产品不存在") SkyboxNotFound = dict(code=40065, desc="天空盒不存在") SkyboxLibTypeNotExists = dict(code=40066, desc="天空盒类型不存在") ApplicationNotExist = dict(code=40067, desc="应用不存在") UserLoginFailed = dict(code=40068, desc="用户登录失败") AccountUpdateFailed = dict(code=40069, desc="更新账号信息失败") SceneNotFound = dict(code=40070, desc="场景不存在") ParentEntityNotFound = dict(code=40071, desc="父级渲染对象不存在") EntityNotFound = dict(code=40072, desc='渲染对象不存在') ComponentNotFound = dict(code=40073, desc='组件不存在') ResourceNotFound = dict(code=40074, desc='资源不存在') ParentResourceNotFound = dict(code=40075, desc='父级资源不存在') TemplateSceneNotFound = dict(code=40076, desc='模板场景不存在') MediaNotFound = dict(code=40077, desc='资源内容不存在') EventNotFound = dict(code=40078, desc='事件不存在') ParentEventNotFound = dict(code=40079, desc='父级事件不存在') ModelIsNotPublic = dict(code=40080, desc='模型处于非公开状态') ModelIsLocked = dict(code=40081, desc='模型已被加密,请输入密码') ModelPasswordWrong = dict(code=40082, desc='模型密码错误') CollectionNotFound = dict(code=40083, desc='集合不存在') CollectionIsNotPublic = dict(code=40084, desc='集合未公开') CollectionIsLocked = dict(code=40085, desc='集合已被加密,请输入密码') CollectionPasswordWrong = dict(code=40086, desc='集合密码错误') TokenIsWrong = dict(code=40087, desc='Token过期或无效') NotInvitedEmail = dict(code=40088, desc='非邀请邮箱') RoleNotFound = dict(code=40089, desc='角色不存在') CannotDeleteDefaultRole = dict(code=40090, desc='无法删除默认角色') CatalogNotFound = dict(code=40091, desc='3D图册不存在') CorpNotFound = dict(code=40092, desc='企业不存在') RoleNameExist = dict(code=40093, desc='角色名已存在') UserIsInTeam = dict(code=40094, desc='用户已被邀请或已加入团队') UserNotInSpace = dict(code=40095, desc='未加入任何团队') InvitedEmailHasBeenSent = dict(code=40096, desc='已经发送过邀请,请15分钟后再邀请') ResetEmailHasBeenSent = dict(code=40097, desc='60s内不能重复发送邮件') TagNotFound = dict(code=40098, desc='标签不存在') UpdateUserInfoFail = dict(code=40099, desc='更新用户信息失败') DomainIsExist = dict(code=40100, desc='域名已经存在') UserNotInCorp = dict(code=40101, desc='用户不属于此企业') SearchFieldNotExist = dict(code=40102, desc='搜索字段不存在') SortFieldNotExist = dict(code=40103, desc='排序字段不存在') PublishFailed = dict(code=40104, desc='发布失败,服务器处理出错') EmailFormatError = dict(code=40105, desc='邮箱格式不正确') InstanceNotExists = dict(code=40106, desc='不存在对象') GroupNotExists = dict(code=40109, desc='群组不存在') SpaceIsChanged = dict(code=40110, desc='已切换到其他团队,请关闭当前页面') NoCorpSpace = dict(code=40111, desc='用户无企业空间') TypeNotSupport = dict(code=40112, desc='不支持的资源类型') ButtonNotFound = dict(code=40113, desc='按钮不存在') ProjectNotFound = dict(code=40114, desc='项目不存在') SceneNotInSameProject = dict(code=40115, desc='场景不在同一个项目') EventIsExist = dict(code=40116, desc='事件已存在') AccountNotExist = dict(code=40117, desc='账号不存在') CaptchaAlreadyExpired = dict(code=40118, desc='验证码已过期') TooManyRequest = dict(code=40119, desc='请求过于频繁') UserNotInvited = dict(code=40120, desc='您未被邀请加入,请联系系统管理员') ModelNumberNotEnough = dict(code=40121, desc='模型数量不足') ProjectlNumberNotEnough = dict(code=40122, desc='项目数量不足') UserAlreadyInvited = dict(code=40123, desc='用户已被邀请') UrlAlreadyExpired = dict(code=40124, desc='重置链接已过期') NotBeInvited = dict(code=40125, desc='未收到邀请或链接地址错误') PasswordErrorOverTimes = dict(code=40127, desc='错误次数超过5次,请明天再尝试') SpaceIsRemoved = dict(code=40128, desc='团队权限被删除') NeedCaptcha = dict(code=40129, desc='首次登陆需要验证码') MacAlreadyExist = dict(code=40130, desc='终端地址已存在') BeKickedOutFromTeam = dict(code=40131, desc='您已被移出团队,请联系管理员') UnauthorizedOrTimeout = dict(code=40132, desc='未授权或授权过期') UIPackInvalid = dict(code=40133, desc='UI数据包无效') UIIconPackInvalid = dict(code=40134, desc='ui icon 包无效') OnlyOnePageNotDelete = dict(code=40134, desc='只剩一个页面,不能删除') IconPackMustZip = dict(code=40134, desc='icon 包必须是zip格式压缩包') GetStaticResourceDataError = dict(code=40135, desc='服务器获取发布数据错误') NotFoundScenePublishData = dict(code=40135, desc='找不到场景发布数据') CapacityNotEnough = dict(code=40135, desc='容量不足') PasswordWrong = dict(code=40136, desc='密码错误') # 运营后台 NotAuthenticatedUser = dict(code=-40001, desc='未认证用户,请重新登录') NotVisitedPermission = dict(code=-40002, desc='你没有访问权限') NotUpdatedPermission = dict(code=-40003, desc='你没有修改权限') PermissionDenied = dict(code=-40004, desc='你没有对应的操作权限权限') GroupDeleteFailedForManagerExisted = dict(code=-40005, desc='当前群组里有成员,不能删除当前群组') GroupNameExisted = dict(code=-40006, desc='当前群组名已经存在') PasswordNotSame = dict(code=-40007, desc='两次输入的密码不一致') HasExistsAccount = dict(code=-40008, desc='当前账号已存在') # 重新刷新页面 NotSpaceReFlashPage = dict(code=-50001, desc='不存在空间,请刷新') # 兼容下-10000错误码的提示 AttributeNameAlreadyExist = dict(code=-10000, desc='该属性名已经存在') DuplicateAttributeValue = dict(code=-10000, desc='属性值重复') FileCopyFailure = dict(code=-10000, desc='不能将文件复制到自身或其子目录下') FileMoveFailure = dict(code=-10000, desc='不能将文件移动到自身或其子目录下') FolderHasDeleteFileMoveOrCopyFailure = dict(code=-10000, desc='包含转换失败的文件,不能移动或者复制,请先删除转换失败的文件') FolderAlreadyExist = dict(code=-10000, desc='已经存在该文件夹') ParentFolderNotExist = dict(code=-10000, desc='父文件不存在') # 开放平台 InvalidSpaceUserApiToken = dict(code=-70001, desc='无效的空间用户API token') SpaceNotAuthApplication = dict(code=-70002, desc='该用户空间未授权该应用') NotInternalSpaceAppUser = dict(code=-70003, desc='不是内部应用空间用户') AlreadyAddApplication = dict(code=-70004, desc='您已经添加了该应用') ApplicationNotPublishOrDeleted = dict(code=-70005, desc='应用未发布或被删除') ApplicationNotBelowCurrentSpace = dict(code=-70005, desc='该内部应用不属于当前空间') OpenServerError = dict(code=-70100, desc='open server invoke fail') CheckRepeat = dict(code=-99006, desc="重复登录错误") # cml--> 重复登录 @unique class EnErrorCode(Enum): # 英文版 # 系统级错误代码 UserNotLogin = dict(code=-10007, desc="User not logged in") RequestParamInvalid = dict(code=-10008, desc="Request param invalid") # 接口级错误代码 UserNameAlreadyExists = dict(code=40001, desc="Username is already registered") MaterialLibTypeNotExists = dict(code=40002, desc="Material lib type not exists") AccessPermissionDenied = dict(code=40003, desc="Access permission denied") SpaceNotExists = dict(code=40004, desc="Space does not exist") DstDirFolderNotExists = dict(code=40009, desc="Destination directory folder does not exist") DirFolderNotExists = dict(code=40010, desc="Current directory folder does not exist") TagAlreadyExists = dict(code=40018, desc="Tag already exist") TagNotExists = dict(code=40019, desc="Tag dose noet exist") TemplateNotExists = dict(code=40021, desc="Space custom field template does not exist") ModelNotExists = dict(code=40022, desc="Model does not exist") ProductionNotFound = dict(code=40027, desc="Production dose not exist") PageSizeOverflow = dict(code=40028, desc="Exceeding the limit") CabinetNotFound = dict(code=40029, desc="Cabinet does not exist") OperationPermissionDenied = dict(code=40030, desc="Permission denied") InvalidAccessToken = dict(code=40031, desc="invalid access token") UserNotFound = dict(code=40032, desc="User does not exist") ModelDescLengthOverflow = dict(code=40033, desc="Model description cannot exceed 100 characters") ModelNameLengthOverflow = dict(code=40034, desc="Model name cannot exceed 50 characters") CategoryNotExists = dict(code=40035, desc="Category does not exist") ModelGroupNotExists = dict(code=40036, desc="Model combination does not exist") ProductionNotExists = dict(code=40037, desc="Production dose not exist") JobNotFound = dict(code=40038, desc="Job dose not exist") JobFailed = dict(code=40039, desc="Job failure") MaterialNotFound = dict(code=40040, desc="Material not found") InvalidMobile = dict(code=40041, desc="Invalid mobile") InvalidOperationType = dict(code=40042, desc="Invalid operation type") SmsSendFailed = dict(code=40043, desc="Sms send failed") CaptchaError = dict(code=40044, desc="Captcha error") InvalidUsernameOrPassword = dict(code=40045, desc="Invalid account or password") PlatformNotFound = dict(code=40046, desc="platform not found") WechatResponseError = dict(code=40047, desc="Wechat response error") SpaceNameAlreadyExists = dict(code=40048, desc="Space name Already exist") InvalidEmail = dict(code=40049, desc="Invalid email") InvalidUploadPolicy = dict(code=40050, desc="Invalid upload policy") InvalidPassword = dict(code=40051, desc="Invalid password") MobileAlreadyExists = dict(code=40052, desc="Mobile number registered") InvalidCabinetName = dict(code=40053, desc="Invalid cabinet name") CabinetNameAlreadyExisted = dict(code=40054, desc="Cabinet already existed") InvalidCallback = dict(code=40055, desc="invalid callback token") CallCloudError = dict(code=40056, desc="Call cloud error") InvalidProductionName = dict(code=40057, desc="Invalid production name") ClientNotFound = dict(code=40059, desc="Client not found") ClientChannelNotExists = dict(code=40060, desc="Client channel not found") InvalidRatio = dict(code=40062, desc="Invalid ratio") HostAlreadyExists = dict(code=40063, desc="Domain name is already in use") ProductNotFound = dict(code=40064, desc="Product not found") SkyboxNotFound = dict(code=40065, desc="Skybox not found") SkyboxLibTypeNotExists = dict(code=40066, desc="Skybox lib type not found") ApplicationNotExist = dict(code=40067, desc="Application not found") UserLoginFailed = dict(code=40068, desc="User login failed") AccountUpdateFailed = dict(code=40069, desc="Account update failed") SceneNotFound = dict(code=40070, desc="Scene not found") ParentEntityNotFound = dict(code=40071, desc="Parent entity not found") EntityNotFound = dict(code=40072, desc='Entity not found') ComponentNotFound = dict(code=40073, desc='Component not found') ResourceNotFound = dict(code=40074, desc='Resource not found') ParentResourceNotFound = dict(code=40075, desc='Parent resource not found') TemplateSceneNotFound = dict(code=40076, desc='Template scene not found') MediaNotFound = dict(code=40077, desc='Media not found') EventNotFound = dict(code=40078, desc='Event not found') ParentEventNotFound = dict(code=40079, desc='Parent event not found') ModelIsNotPublic = dict(code=40080, desc='Model is not public') ModelIsLocked = dict(code=40081, desc='Model is locked, please imput password') ModelPasswordWrong = dict(code=40082, desc='Model password wrong') CollectionNotFound = dict(code=40083, desc='Collection not found') CollectionIsNotPublic = dict(code=40084, desc='Collection is not public') CollectionIsLocked = dict(code=40085, desc='Collection is locked, please imput password') CollectionPasswordWrong = dict(code=40086, desc='Collection password wrong') TokenIsWrong = dict(code=40087, desc='Invalid token') NotInvitedEmail = dict(code=40088, desc='Non invitation email') RoleNotFound = dict(code=40089, desc='Role not found') CannotDeleteDefaultRole = dict(code=40090, desc='Cannot delete default role') CorpNotFound = dict(code=40092, desc='Enterprise does not exist') RoleNameExist = dict(code=40093, desc='Role name already exists') UserIsInTeam = dict(code=40094, desc='User has been invited or joined the team') UserNotInSpace = dict(code=40095, desc='User not joined team') InvitedEmailHasBeenSent = dict(code=40096, desc='Invitation has been sent, please invite again in 15 minutes') ResetEmailHasBeenSent = dict(code=40097, desc='Can not send mail repeatedly within 60s') TagNotFound = dict(code=40098, desc='Label does not exist') UpdateUserInfoFail = dict(code=40099, desc='Failed to update user information') DomainIsExist = dict(code=40100, desc='Domain name already exists') UserNotInCorp = dict(code=40101, desc='User does not belong to this enterprise') SearchFieldNotExist = dict(code=40102, desc='Search field does not exist') SortFieldNotExist = dict(code=40103, desc='Sort field does not exist') PublishFailed = dict(code=40104, desc='Publishing failed, server processing error') EmailFormatError = dict(code=40105, desc='Invalid email format') InstanceNotExists = dict(code=40106, desc='Object does not exist') GroupNotExists = dict(code=40109, desc='Group not exist') SpaceIsChanged = dict(code=40110, desc='Space is changed,please close the current page') TypeNotSupport = dict(code=40112, desc='The type not support') ButtonNotFound = dict(code=40113, desc='Button not found') ProjectNotFound = dict(code=40114, desc='Project not found') SceneNotInSameProject = dict(code=40115, desc='Scene not in same project') EventIsExist = dict(code=40116, desc='Event is exist') AccountNotExist = dict(code=40117, desc='Account dose not exist') CaptchaAlreadyExpired = dict(code=40118, desc='Captcha already expired') TooManyRequest = dict(code=40119, desc='Too many requests') UserNotInvited = dict(code=40120, desc='You are not invited to join, please contact your system administrator') ModelNumberNotEnough = dict(code=40121, desc='Model number not enouth') ProjectlNumberNotEnough = dict(code=40122, desc='Project number not enouth') UserAlreadyInvited = dict(code=40123, desc='User already invited') UrlAlreadyExpired = dict(code=40124, desc='Url already expired') NotBeInvited = dict(code=40125, desc='No invitation received or wrong link address') MacLimited = dict(code=40126, desc='The current device cannot enter the target space') PasswordErrorOverTimes = dict(code=40127, desc='Password more than 5 errors, please try again tomorrow') SpaceIsRemoved = dict(code=40128, desc='Space permissions deleted') NeedCaptcha = dict(code=40129, desc='First login requires verification code') MacAlreadyExist = dict(code=40130, desc='Terminal address already exists') BeKickedOutFromTeam = dict(code=40131, desc='You have been removed from the space. Please contact your system administrator') # 运营后台 NotAuthenticatedUser = dict(code=-40001, desc='未认证用户,请重新登录') NotVisitedPermission = dict(code=-40002, desc='你没有访问权限') NotUpdatedPermission = dict(code=-40003, desc='你没有修改权限') PermissionDenied = dict(code=-40004, desc='你没有对应的操作权限权限') GroupDeleteFailedForManagerExisted = dict(code=-40005, desc='当前群组里有成员,不能删除当前群组') GroupNameExisted = dict(code=-40006, desc='当前群组名已经存在') PasswordNotSame = dict(code=-40007, desc='两次输入的密码不一致') HasExistsAccount = dict(code=-40008, desc='当前账号已存在') # 重新刷新页面 NotSpaceReFlashPage = dict(code=-50001, desc='不存在空间,请刷新') NotInSpace = dict(code=-50001, desc='团队权限被删除') ClientMacLimited = dict(code=-50002, desc='终端无法访问') # 兼容下-10000错误码的提示 AttributeNameAlreadyExist = dict(code=-10000, desc='Attribute name already eist') DuplicateAttributeValue = dict(code=-10000, desc='Duplicate attribute value') FileCopyFailure = dict(code=-10000, desc='Files cannot be copied to themselves or their subdirectories') FileMoveFailure = dict(code=-10000, desc='Files cannot be move to themselves or their subdirectories') DirHasDeleteFileMoveOrCopyFailure = dict(code=-10000, desc='The file containing the conversion failure cannot be moved or copied. Please delete the conversion failure file first') FolderAlreadyExist = dict(code=-10000, desc='Folder already exist') ParentFolderNotExist = dict(code=-10000, desc='Parent folder not exist') # 开放平台 InvalidSpaceUserApiToken = dict(code=-70001, desc='无效的空间用户API token') SpaceNotAuthApplication = dict(code=-70002, desc='空间未授权该应用') NotInternalSpaceAppUser = dict(code=-70003, desc='不是内部应用空间用户') OpenServerError = dict(code=-70100, desc='open server invoke fail') CheckRepeat = dict(code=-99006, desc="Duplication error") # cml--> 重复登录 @unique class TipsCode(Enum): CheckUserName = dict(code=-99001, message="用户名错误") # 用户名检查相关 CheckMobile = dict(code=-99002, message="手机号码错误") # 手机号码检查相关 CheckPassword = dict(code=-99003, message="密码错误") # 密码检查相关 CheckParameter = dict(code=-99004, message="参数错误") # 参数检查相关 CheckCaptcha = dict(code=-99005, message="验证码错误") CheckRepeat = dict(code=-99006, message="重复登录错误") # cml--> 重复登录 PasswordNotSame = dict(code=40103, message='密码不一致') # 密码不一致 AccountAlreadyExists = dict(code=40104, message='已存在账户') # 已存在账户 GroupNotExists = dict(code=40105, message='群组不存在') # 群组不存在 InstanceNotExists = dict(code=40106, message="不存在对象") # 不存在对象 HasJoinGroup = dict(code=40106, message="已经加入该组") # 已经加入该组 RequiredParam = dict(code=40107, message="必填参数") # 必填参数 integerOutOfRange = dict(code=40108, message="整型数太大") # 整型数太大 ModelNotPublished = dict(code=40109, message='模型未发布') InviteNoPriceSet = dict(code=-30001, message="该空间当前没有购买任何套餐") InvitePriceSetOuttime = dict(code=-30002, message="该空间套餐已过期") InviteNoPriceSetItem = dict(code=-30003, message="该空间套餐不含邀请项") InviteOutPriceSetItemLimit = dict(code=-30004, message="该空间容量已满,请升级套餐") MeetingAlreadyOver = dict(code=-40001, message='会议已经结束') OutOfPeopleNum = dict(code=-40002, message='会议参与人数上限为10人,现在已经达到10人') ParticipantAlreadyDeleted = dict(code=-40003, message='您被会议管理人员移除会议,请联系会议发起人') NotInMeeting = dict(code=-40004, message='您未参加过此会议') MaxTwoReviewScene = dict(code=-40005, message='最多同时支持两个场景评审') MeetingNotOverYet = dict(code=-40006, message='会议尚未结束') @unique class EnTipsCode(Enum): CheckUserName = dict(code=-99001, message="Account error") # 用户名检查相关 CheckMobile = dict(code=-99002, message="Wrong mobile number") # 手机号码检查相关 CheckPassword = dict(code=-99003, message="Password error") # 密码检查相关 CheckParameter = dict(code=-99004, message="Parem error") # 参数检查相关 CheckCaptcha = dict(code=-99005, message="Captcha error") CheckRepeat = dict(code=-99006, message="Duplication error") # cml--> 重复登录 PasswordNotSame = dict(code=40103, message='Passwords are inconsistent') # 密码不一致 AccountAlreadyExists = dict(code=40104, message='Account already exist') # 已存在账户 GroupNotExists = dict(code=40105, message='Group does not exist') # 群组不存在 InstanceNotExists = dict(code=40106, message="Object does not exist") # 不存在对象 HasJoinGroup = dict(code=40106, message="Already joined the group") # 已经加入该组 RequiredParam = dict(code=40107, message="Required param") # 必填参数 integerOutOfRange = dict(code=40108, message="Integer too large") # 整型数太大 ModelNotPublished = dict(code=40109, message='Model not published') InviteNoPriceSet = dict(code=-30001, message="No packages are currently purchased for this space") InvitePriceSetOuttime = dict(code=-30002, message="The space package has expired") InviteNoPriceSetItem = dict(code=-30003, message="The space package does not include invitations") InviteOutPriceSetItemLimit = dict(code=-30004, message="This space is full, please upgrade the package") MeetingAlreadyOver = dict(code=-40001, message='The meeting is over') OutOfPeopleNum = dict(code=-40002, message='The maximum number of participants in the meeting is 10, now it has reached 10') ParticipantAlreadyDeleted = dict(code=-40003, message='You have been removed from the meeting by the meeting management. Please contact the meeting sponsor') NotInMeeting = dict(code=-40004, message='You have not attended this meeting') MaxTwoReviewScene = dict(code=-40005, message='Up to two scenario reviews are supported at the same time') MeetingNotOverYet = dict(code=-40006, message='The meeting is not over') class WebsocketCode: def __init__(self, **kwargs): username = kwargs.get('username') data = kwargs.get('data', {}) subject_name = kwargs.get('subject_name') object_name = kwargs.get('object_name') team_name = kwargs.get('team_name') self.JoinTwoMeeting = dict(code=-1001, message='您在其他地方加入了会议', data=dict(content=dict(zh='您在其他地方加入了会议', en='You have joined another meeting'))) self.DeletedParticipant = dict(code=-1002, message='{username}被移出会议'.format(username=username), data=dict(username=username, content=dict(zh='被移出会议', en='is removed from the meeting'), participant_uid=data.get('uid'))) self.ParticipantExit = dict(code=-1003, message='{username}已退出会议'.format(username=username), data=dict(username=username, content=dict(zh='已退出会议', en='has left the meeting'))) self.BeKickedOutFromTeam = dict(code=-1004, message='你已被管理员移出 {team_name} 团队'.format(team_name=team_name), data=dict(team_name=team_name, content=dict(zh='你已被管理员移出团队', en='You have been removed from the team by the administrator'))) # self.DataUpdated = dict(code=1000, message='data updated', data=data) self.MasterChanged = dict(code=1001, message='{subject_name}已将主控权移交给{object_name}'.format(subject_name=subject_name, object_name=object_name), data=dict(subject_name=subject_name, object_name=object_name, content=dict(zh='已将主控权移交给', en='has transferred the mastership to'), master_uid=data.get('master_uid'))) self.JoinMeeting = dict(code=1002, message='{username}已加入会议'.format(username=username), data=dict(username=username, content=dict(zh='已加入会议', en='has joined the meeting'))) self.CreateMarker = dict(code=1003, message='create marker', data=data) self.DeleteMarker = dict(code=1004, message='delete marker', data=data) self.UpdateMakrer = dict(code=1005, message='update makrer', data=data) self.CreateReviewScene = dict(code=1006, message='create review scene', data=data) self.DeleteReviewScene = dict(code=1007, message='delete review scene', data=data) self.EndMeeting = dict(code=1008, message='您已经结束会议', data=dict(content=dict(zh='您已经结束会议', en='You have ended the meeting'))) self.MeetingAlreadyOver = dict(code=1009, message='会议已经结束', data=dict(content=dict(zh='会议已经结束', en='The meeting is over'))) if __name__ == '__main__': print(ErrorCode.BeKickedOutFromTeam.value)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/response/response_code.py
response_code.py
from yzcore.default_settings import default_setting as settings from yzcore.request import request __all__ = [ "generate_uuid", "explain_uuid", "translate2timestamp", "make_uuid", ] if settings.ID_URL is None: raise EnvironmentError("Error: 'ID_URL' is None.") def generate_uuid(limit: int = 1) -> int or list: """ 根据参数生成一定数量的ID :param limit: :return: 1: 6918460366603157505 2: [6918460407405346817, 6918460407405347841,...] """ url = f"{settings.ID_URL}{settings.GENERATE_UUID_PATH}" result, status_code = request( 'get', url, params={'limit': limit} ) if status_code == 200: if limit == 1: return int(result['id']) else: return [int(_id) for _id in result['id_list']] def explain_uuid(long_id: int) -> dict: """ 解析长整型ID为 id_object :param long_id: 6918460366603157505 :return: { "machine_id": 1, "sequence": 0, "time_duration": 867377, "generate_method": 2, "mode_type": 1, "version": 0 } """ url = f"{settings.ID_URL}{settings.EXPLAIN_UUID_PATH}" result, status_code = request( 'get', url, params={'long_id': long_id} ) if status_code == 200: return result def translate2timestamp(time_duration: int): """ 把时间间隔转换为时间戳 :param time_duration: 时间间隔 :return: { "timestamp": 1611219039, "datetime": "2021-01-21T16:50:39" } """ url = f"{settings.ID_URL}{settings.TRANSLATE_PATH}" result, status_code = request( 'get', url, params={'time_duration': time_duration}) if status_code == 200: return result def make_uuid(sequence: int, timestamp: int, machine: int=None, method: int=None, mtype: int=None, version: int=None ): """ 根据传入参数合成长整型id :param sequence: :param timestamp: :param machine: :param method: :param mtype: :param version: :return: """ data = { "sequence": sequence, "timestamp": timestamp, "machine": machine, "method": method, "mtype": mtype, "version": version } data = {k: v for k, v in data.items() if v is not None} url = f"{settings.ID_URL}{settings.MAKE_UUID_PATH}" result, status_code = request('post', url, json=data) if status_code == 200: return int(result['id'])
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/extensions/uid.py
uid.py
import os import shutil import json import base64 import hmac import datetime import hashlib import urllib import warnings # from enum import Enum try: import oss2 except: oss2 = None __all__ = [ "OssManager", "OssManagerError" ] IMAGE_FORMAT_SET = [ 'bmp', 'jpg', 'jpeg', 'png', 'tif', 'gif', 'pcx', 'tga', 'exif', 'fpx', 'svg', 'psd', 'cdr', 'pcd', 'dxf', 'ufo', 'eps', 'ai', 'raw', 'WMF', 'webp', 'tiff' ] OssManagerError = type("OssManagerError", (ValueError,), {}) warnings.warn('The aliyun_oss module is deprecated', category=DeprecationWarning, stacklevel=2) class OssManager(object): """ 使用示例: >>> from . import OssManager >>> oss_conf = dict( ... access_key_id="LTAIxxxxxxxxxxx", ... access_key_secret="Cep4Mxxxxxxxxxxxxxxxxxxxx", ... endpoint="oss-cn-shenzhen.aliyuncs.com", ... # endpoint="oss-cn-shenzhen-internal.aliyuncs.com", ... bucket_name="xxxx-local", ... cache_path="/tmp/xxxx/fm/cache" ... ) >>> oss = OssManager(**oss_conf) >>> oss.upload("/home/zhangw/Work/模型文件/狼.fbx", "test/狗.fbx") >>> oss.download("test/狗.fbx") """ acl_type = { "private": oss2.BUCKET_ACL_PRIVATE, "onlyread": oss2.BUCKET_ACL_PUBLIC_READ, "readwrite": oss2.BUCKET_ACL_PUBLIC_READ_WRITE, } # 存储类型 storage_cls = { "standard": oss2.BUCKET_STORAGE_CLASS_STANDARD, # 标准类型 "ia": oss2.BUCKET_STORAGE_CLASS_IA, # 低频访问类型 "archive": oss2.BUCKET_STORAGE_CLASS_ARCHIVE, # 归档类型 "cold_archive": oss2.BUCKET_STORAGE_CLASS_COLD_ARCHIVE, # 冷归档类型 } # 冗余类型 redundancy_type = { "lrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_LRS, # 本地冗余 "zrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_ZRS, # 同城冗余(跨机房) } def __init__( self, access_key_id, access_key_secret, bucket_name, endpoint=None, cname=None, cache_path='.', expire_time=30, **kwargs ): self.access_key_id = access_key_id self.access_key_secret = access_key_secret self.bucket_name = bucket_name self.endpoint = endpoint self.cache_path = cache_path self.scheme = kwargs.get("scheme", "https") self.image_domain = kwargs.get("image_domain") self.asset_domain = kwargs.get("asset_domain") self.policy_expire_time = kwargs.get("policy_expire_time", expire_time) self.cname = cname self.bucket = None self.__init() def __init(self, bucket_name=None): """初始化对象""" if oss2 is None: raise ImportError("'oss2' must be installed to use OssManager") if not any((self.endpoint, self.cname)): raise AttributeError( "One of 'endpoint' and 'cname' must not be None.") self.auth = oss2.Auth(self.access_key_id, self.access_key_secret) # 如果cname存在,则使用自定义域名初始化 self.endpoint = self.cname if self.cname else self.endpoint is_cname = True if self.cname else False self.bucket_name = bucket_name if bucket_name else self.bucket_name self.bucket = oss2.Bucket( self.auth, self.endpoint, self.bucket_name, is_cname=is_cname ) if self.cache_path: try: os.makedirs(self.cache_path) except OSError: pass # make_dir(self.cache_path) def reload_oss(self, **kwargs): """重新加载oss配置""" self.access_key_id = kwargs.get("access_key_id") self.access_key_secret = kwargs.get("access_key_secret") self.bucket_name = kwargs.get("bucket_name") self.endpoint = kwargs.get("endpoint") self.__init() def create_bucket(self, bucket_name=None, acl_type='private', storage_type='standard', redundancy_type='zrs'): self.__init(bucket_name=bucket_name) permission = self.acl_type.get(acl_type) config = oss2.models.BucketCreateConfig( storage_class=self.storage_cls.get(storage_type), data_redundancy_type=self.redundancy_type.get(redundancy_type) ) return self.bucket.create_bucket(permission, input=config) def iter_buckets(self, prefix='', marker='', max_keys=100, max_retries=None): """ :param prefix: 只列举匹配该前缀的Bucket :param marker: 分页符。只列举Bucket名字典序在此之后的Bucket :param max_keys: 每次调用 `list_buckets` 时的max_keys参数。注意迭代器返回的数目可能会大于该值。 :param max_retries: :return: """ if not hasattr(self, 'service'): self.service = oss2.Service(self.auth, self.endpoint) return oss2.BucketIterator( self.service, prefix=prefix, marker=marker, max_keys=max_keys, max_retries=max_retries) def list_buckets(self, prefix='', marker='', max_keys=100, params=None): """根据前缀罗列用户的Bucket。 :param str prefix: 只罗列Bucket名为该前缀的Bucket,空串表示罗列所有的Bucket :param str marker: 分页标志。首次调用传空串,后续使用返回值中的next_marker :param int max_keys: 每次调用最多返回的Bucket数目 :param dict params: list操作参数,传入'tag-key','tag-value'对结果进行过滤 :return: 罗列的结果 :rtype: oss2.models.ListBucketsResult """ if not hasattr(self, 'service'): self.service = oss2.Service(self.auth, self.endpoint) return self.service.list_buckets( prefix=prefix, marker=marker, max_keys=max_keys, params=params) def is_exist_bucket(self): """判断存储空间是否存在""" try: self.bucket.get_bucket_info() except oss2.exceptions.NoSuchBucket: return False except: raise return True def delete_bucket(self, bucket_name=None): """删除bucket""" try: resp = self.bucket.delete_bucket() if resp.status < 300: return True elif resp.status == 404: return False except: import traceback print(traceback.format_exc()) def encrypt_bucket(self): """加密bucket""" # 创建Bucket加密配置,以AES256加密为例。 rule = oss2.models.ServerSideEncryptionRule() rule.sse_algorithm = oss2.SERVER_SIDE_ENCRYPTION_AES256 # 设置KMS密钥ID,加密方式为KMS可设置此项。 # 如需使用指定的密钥加密,需输入指定的CMK ID; # 若使用OSS托管的CMK进行加密,此项为空。使用AES256进行加密时,此项必须为空。 rule.kms_master_keyid = "" # 设置Bucket加密。 result = self.bucket.put_bucket_encryption(rule) # 查看HTTP返回码。 print('http response code:', result.status) return result def delete_encrypt_bucket(self): # 删除Bucket加密配置。 result = self.bucket.delete_bucket_encryption() # 查看HTTP返回码。 print('http status:', result.status) return result def get_sign_url(self, key, expire=10): return self.bucket.sign_url("GET", key, expire) def post_sign_url(self, key, expire=10): return self.bucket.sign_url("POST", key, expire) def delete_cache_file(self, filename): """删除文件缓存""" filepath = os.path.abspath(os.path.join(self.cache_path, filename)) assert os.path.isfile(filepath), '非文件或文件不存在' os.remove(filepath) def search_cache_file(self, filename): """文件缓存搜索""" # 拼接绝对路径 filepath = os.path.abspath(os.path.join(self.cache_path, filename)) if os.path.isfile(filepath): return filepath else: return None def download(self, key, local_name=None, process=None, is_stream=False): """ 下载oss文件 :param key: :param local_name: :param process: :param is_stream: is_stream = True: >>> result = self.download('readme.txt', is_stream=True) >>> print(result.read()) 'hello world' is_stream = False: >>> result = self.download('readme.txt', '/tmp/cache/readme.txt') >>> print(result) '/tmp/cache/readme.txt' :return: """ if not local_name: local_name = os.path.abspath( os.path.join(self.cache_path, key) ) make_dir(os.path.dirname(local_name)) if is_stream: return self.bucket.get_object(key, process=process) else: self.bucket.get_object_to_file(key, local_name, process=process) return local_name def upload(self, filepath, key=None, num_threads=2): """上传oss文件""" if key is None: key = filepath.split('/')[-1] headers = None if filepath.endswith(".dds"): headers = dict() headers["Content-Type"] = "application/octet-stream" result = oss2.resumable_upload( self.bucket, key, filepath, headers=headers, num_threads=num_threads, ) # 返回下载链接 if not any((self.image_domain, self.asset_domain)): return result.resp.response.url return self.get_file_url(filepath, key) def get_policy( self, filepath, callback_url, callback_data=None, callback_content_type="application/json"): """ 授权给第三方上传 :param filepath: :param callback_url: :param callback_data: 需要回传的参数 :param callback_content_type: 回调时的Content-Type "application/json" "application/x-www-form-urlencoded" :return: """ params = urllib.parse.urlencode( dict(data=json.dumps(callback_data))) policy_encode = self._get_policy_encode(filepath) sign = self.get_signature(policy_encode) callback_dict = dict() callback_dict["callbackUrl"] = callback_url callback_dict["callbackBody"] = ( "filepath=${object}&size=${size}&mime_type=${mimeType}" "&img_height=${imageInfo.height}&img_width=${imageInfo.width}" "&img_format=${imageInfo.format}&" + params ) callback_dict["callbackBodyType"] = callback_content_type callback_param = json.dumps(callback_dict).strip().encode() base64_callback_body = base64.b64encode(callback_param) return dict( accessid=self.access_key_id, host=f"{self.scheme}://{self.bucket_name}.{self.endpoint}", policy=policy_encode.decode(), signature=sign, dir=filepath, callback=base64_callback_body.decode(), ) def _get_policy_encode(self, filepath): expire_time = datetime.datetime.now() + datetime.timedelta( seconds=self.policy_expire_time ) policy_dict = dict( expiration=expire_time.isoformat() + "Z", conditions=[ ["starts-with", "$key", filepath], # 指定值开始 # ["eq", "$success_action_redirect", "public-read"], # 精确匹配 # ["content-length-range", 1, 1024*1024*1024] # 对象大小限制 ], ) policy = json.dumps(policy_dict).strip().encode() return base64.b64encode(policy) def get_signature(self, policy_encode): """ 获取签名 :param policy_encode: :return: """ h = hmac.new( self.access_key_secret.encode("utf-8"), policy_encode, hashlib.sha1 ) sign_result = base64.encodebytes(h.digest()).strip() return sign_result.decode() def get_file_url(self, filepath, key): if filepath.split('.')[-1] in IMAGE_FORMAT_SET: resource_url = u"//{domain}/{key}".format( domain=self.image_domain, key=key) else: resource_url = u"//{domain}/{key}".format( domain=self.asset_domain, key=key) return resource_url def update_file_headers(self, key, headers): self.bucket.update_object_meta(key, headers) def make_dir(dir_path): """新建目录""" try: os.makedirs(dir_path) except OSError: pass def copy_file(src, dst): """拷贝文件""" dst_dir = os.path.dirname(dst) make_dir(dst_dir) shutil.copy(src, dst) if __name__ == '__main__': kwargs = dict( access_key_id='', access_key_secret='', endpoint='oss-cn-shenzhen.aliyuncs.com', bucket_name='', ) oss_obj = OssManager(**kwargs) file_name = '/Users/edz/realibox/base-all/base/src/core/request/beta_gangnam_style.fbx' # remote_name = 'cmltest.fbx' # 上传 result_obj = oss_obj.upload(file_name) print(result_obj) """ http://realicloud-local.oss-cn-shenzhen.aliyuncs.com/beta_gangnam_style.fbx """ """ # result: { 'resp': <oss2.http.Response object at 0x1071ddc50>, 'status': 200, 'headers': { 'Server': 'AliyunOSS', 'Date': 'Tue, 20 Oct 2020 01:22:34 GMT', 'Content-Length': '0', 'Connection': 'keep-alive', 'x-oss-request-id': '5F8E3BDAFEC931303087D9D9', 'ETag': '"D41D8CD98F00B204E9800998ECF8427E"', 'x-oss-hash-crc64ecma': '0', 'Content-MD5': '1B2M2Y8AsgTpgAmY7PhCfg==', 'x-oss-server-time': '63' }, 'request_id': '5F8E3BDAFEC931303087D9D9', 'versionid': None, 'delete_marker': None, 'etag': 'D41D8CD98F00B204E9800998ECF8427E', 'crc': 0 } # result.resp: { 'response': <Response [200]>, 'status': 200, 'headers': { 'Server': 'AliyunOSS', 'Date': 'Tue, 20 Oct 2020 02:31:03 GMT', 'Content-Length': '0', 'Connection': 'keep-alive', 'x-oss-request-id': '5F8E4BE7FEC93130387B8D5B', 'ETag': '"D41D8CD98F00B204E9800998ECF8427E"', 'x-oss-hash-crc64ecma': '0', 'Content-MD5': '1B2M2Y8AsgTpgAmY7PhCfg==', 'x-oss-server-time': '49' }, 'request_id': '5F8E4BE7FEC93130387B8D5B', '_Response__all_read': True } # result.resp.response: { '_content': False, '_content_consumed': True, '_next': None, 'status_code': 200, 'headers': { 'Server': 'AliyunOSS', 'Date': 'Tue, 20 Oct 2020 02:32:13 GMT', 'Content-Length': '0', 'Connection': 'keep-alive', 'x-oss-request-id': '5F8E4C2D4D5A2B3339F164B7', 'ETag': '"D41D8CD98F00B204E9800998ECF8427E"', 'x-oss-hash-crc64ecma': '0', 'Content-MD5': '1B2M2Y8AsgTpgAmY7PhCfg==', 'x-oss-server-time': '18' }, 'raw': <urllib3.response.HTTPResponse object at 0x104254a10>, 'url': 'http://haier-mdcp-private.oss-cn-qingdao.aliyuncs.com/cmltest.fbx', 'encoding': None, 'history': [], 'reason': 'OK', 'cookies': <RequestsCookieJar[]>, 'elapsed': datetime.timedelta(microseconds=250406), 'request': <PreparedRequest [PUT]>, 'connection': <requests.adapters.HTTPAdapter object at 0x1041fb710> } """ # 下载 # res = oss_obj.download(remote_name) # print(res)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/extensions/aliyun_oss.py
aliyun_oss.py
import os import shutil import json import base64 import hmac import datetime import hashlib import urllib from enum import Enum try: import oss2 except: oss2 = None from . import OssManagerBase __all__ = [ "OssManager", "OssManagerError" ] OssManagerError = type("OssManagerError", (ValueError,), {}) # class ACL(Enum): # private = oss2.BUCKET_ACL_PRIVATE # onlyread = oss2.BUCKET_ACL_PUBLIC_READ # readwrite = oss2.BUCKET_ACL_PUBLIC_READ_WRITE class OssManager(OssManagerBase): """ 使用示例: >>> from . import OssManager >>> oss_conf = dict( ... access_key_id="LTAIxxxxxxxxxxx", ... access_key_secret="Cep4Mxxxxxxxxxxxxxxxxxxxx", ... endpoint="oss-cn-shenzhen.aliyuncs.com", ... # endpoint="oss-cn-shenzhen-internal.aliyuncs.com", ... bucket_name="xxxx-local", ... cache_path="/tmp/xxxx/fm/cache" ... ) >>> oss = OssManager(**oss_conf) >>> oss.upload("/home/zhangw/Work/模型文件/狼.fbx", "test/狗.fbx") >>> oss.download("test/狗.fbx") """ acl_type = { "private": oss2.BUCKET_ACL_PRIVATE, "onlyread": oss2.BUCKET_ACL_PUBLIC_READ, "readwrite": oss2.BUCKET_ACL_PUBLIC_READ_WRITE, } # 存储类型 storage_cls = { "standard": oss2.BUCKET_STORAGE_CLASS_STANDARD, # 标准类型 "ia": oss2.BUCKET_STORAGE_CLASS_IA, # 低频访问类型 "archive": oss2.BUCKET_STORAGE_CLASS_ARCHIVE, # 归档类型 "cold_archive": oss2.BUCKET_STORAGE_CLASS_COLD_ARCHIVE, # 冷归档类型 } # 冗余类型 redundancy_type = { "lrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_LRS, # 本地冗余 "zrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_ZRS, # 同城冗余(跨机房) } def __init__(self, *args, **kwargs): super(OssManager, self).__init__(*args, **kwargs) def __init(self, bucket_name=None): """初始化对象""" if oss2 is None: raise ImportError("'oss2' must be installed to use OssManager") if not any((self.endpoint, self.cname)): raise AttributeError( "One of 'endpoint' and 'cname' must not be None.") self.auth = oss2.Auth(self.access_key_id, self.access_key_secret) # 如果cname存在,则使用自定义域名初始化 self.endpoint = self.cname if self.cname else self.endpoint is_cname = True if self.cname else False self.bucket_name = bucket_name if bucket_name else self.bucket_name self.bucket = oss2.Bucket( self.auth, self.endpoint, self.bucket_name, is_cname=is_cname ) if self.cache_path: try: os.makedirs(self.cache_path) except OSError: pass # make_dir(self.cache_path) def reload_oss(self, **kwargs): """重新加载oss配置""" self.access_key_id = kwargs.get("access_key_id") self.access_key_secret = kwargs.get("access_key_secret") self.bucket_name = kwargs.get("bucket_name") self.endpoint = kwargs.get("endpoint") self.__init() def create_bucket(self, bucket_name=None, acl_type='private', storage_type='standard', redundancy_type='zrs'): self.__init(bucket_name=bucket_name) permission = self.acl_type.get(acl_type) config = oss2.models.BucketCreateConfig( storage_class=self.storage_cls.get(storage_type), data_redundancy_type=self.redundancy_type.get(redundancy_type) ) return self.bucket.create_bucket(permission, input=config) def iter_buckets(self, prefix='', marker='', max_keys=100, max_retries=None): """ :param prefix: 只列举匹配该前缀的Bucket :param marker: 分页符。只列举Bucket名字典序在此之后的Bucket :param max_keys: 每次调用 `list_buckets` 时的max_keys参数。注意迭代器返回的数目可能会大于该值。 :param max_retries: :return: """ if not hasattr(self, 'service'): self.service = oss2.Service(self.auth, self.endpoint) return oss2.BucketIterator( self.service, prefix=prefix, marker=marker, max_keys=max_keys, max_retries=max_retries) def list_buckets(self, prefix='', marker='', max_keys=100, params=None): """根据前缀罗列用户的Bucket。 :param str prefix: 只罗列Bucket名为该前缀的Bucket,空串表示罗列所有的Bucket :param str marker: 分页标志。首次调用传空串,后续使用返回值中的next_marker :param int max_keys: 每次调用最多返回的Bucket数目 :param dict params: list操作参数,传入'tag-key','tag-value'对结果进行过滤 :return: 罗列的结果 :rtype: oss2.models.ListBucketsResult """ if not hasattr(self, 'service'): self.service = oss2.Service(self.auth, self.endpoint) return self.service.list_buckets( prefix=prefix, marker=marker, max_keys=max_keys, params=params) def is_exist_bucket(self): """判断存储空间是否存在""" try: self.bucket.get_bucket_info() except oss2.exceptions.NoSuchBucket: return False except: raise return True def delete_bucket(self, bucket_name=None): """删除bucket""" try: resp = self.bucket.delete_bucket() if resp.status < 300: return True elif resp.status == 404: return False except: import traceback print(traceback.format_exc()) # def encrypt_bucket(self): # """设置Bucket加密""" # # 创建Bucket加密配置,以AES256加密为例。 # rule = oss2.models.ServerSideEncryptionRule() # rule.sse_algorithm = oss2.SERVER_SIDE_ENCRYPTION_AES256 # # 设置KMS密钥ID,加密方式为KMS可设置此项。 # # 如需使用指定的密钥加密,需输入指定的CMK ID; # # 若使用OSS托管的CMK进行加密,此项为空。使用AES256进行加密时,此项必须为空。 # rule.kms_master_keyid = "" # # # 设置Bucket加密。 # result = self.bucket.put_bucket_encryption(rule) # # 查看HTTP返回码。 # print('http response code:', result.status) # return result # # def delete_encrypt_bucket(self): # # 删除Bucket加密配置。 # result = self.bucket.delete_bucket_encryption() # # 查看HTTP返回码。 # print('http status:', result.status) # return result def get_sign_url(self, key, expire=10): return self.bucket.sign_url("GET", key, expire) def post_sign_url(self, key, expire=10): return self.bucket.sign_url("POST", key, expire) def download(self, key, local_name=None, is_return_obj=False, progress_callback=None, process=None): """ 下载oss文件 :param key: :param local_name: :param process: :param load_stream_in_memory: is_stream = True: >>> result = self.download('readme.txt', load_stream_in_memory=True) >>> print(result.read()) 'hello world' is_stream = False: >>> result = self.download('readme.txt', '/tmp/cache/readme.txt') >>> print(result) '/tmp/cache/readme.txt' :return: """ if not local_name: local_name = os.path.abspath( os.path.join(self.cache_path, key) ) make_dir(os.path.dirname(local_name)) if is_return_obj: return self.bucket.get_object(key, process=process) else: self.bucket.get_object_to_file(key, local_name, process=process) return local_name def upload(self, filepath, key=None, num_threads=2): """上传oss文件""" if key is None: key = filepath.split('/')[-1] headers = None if filepath.endswith(".dds"): headers = dict() headers["Content-Type"] = "application/octet-stream" result = oss2.resumable_upload( self.bucket, key, filepath, headers=headers, num_threads=num_threads, ) # 返回下载链接 if not any((self.image_domain, self.asset_domain)): return True, result.resp.response.url return True, self.get_file_url(filepath, key) # def get_policy( # self, # filepath, # callback_url, # callback_data=None, # callback_content_type="application/json"): # """ # 授权给第三方上传 # # :param filepath: # :param callback_url: # :param callback_data: 需要回传的参数 # :param callback_content_type: 回调时的Content-Type # "application/json" # "application/x-www-form-urlencoded" # # :return: # """ # params = urllib.parse.urlencode( # dict(data=json.dumps(callback_data))) # policy_encode = self._get_policy_encode(filepath) # sign = self.get_signature(policy_encode) # # callback_dict = dict() # callback_dict["callbackUrl"] = callback_url # callback_dict["callbackBody"] = ( # "filepath=${object}&size=${size}&mime_type=${mimeType}" # "&img_height=${imageInfo.height}&img_width=${imageInfo.width}" # "&img_format=${imageInfo.format}&" + params # ) # callback_dict["callbackBodyType"] = callback_content_type # # callback_param = json.dumps(callback_dict).strip().encode() # base64_callback_body = base64.b64encode(callback_param) # # return dict( # accessid=self.access_key_id, # host=f"{self.scheme}://{self.bucket_name}.{self.endpoint}", # policy=policy_encode.decode(), # signature=sign, # dir=filepath, # callback=base64_callback_body.decode(), # ) # # def _get_policy_encode(self, filepath): # expire_time = datetime.datetime.now() + datetime.timedelta( # seconds=self.policy_expire_time # ) # policy_dict = dict( # expiration=expire_time.isoformat() + "Z", # conditions=[ # ["starts-with", "$key", filepath], # 指定值开始 # # ["eq", "$success_action_redirect", "public-read"], # 精确匹配 # # ["content-length-range", 1, 1024*1024*1024] # 对象大小限制 # ], # ) # policy = json.dumps(policy_dict).strip().encode() # return base64.b64encode(policy) # # def get_signature(self, policy_encode): # """ # 获取签名 # # :param policy_encode: # :return: # """ # h = hmac.new( # self.access_key_secret.encode("utf-8"), policy_encode, hashlib.sha1 # ) # sign_result = base64.encodebytes(h.digest()).strip() # return sign_result.decode() def update_file_headers(self, key, headers): self.bucket.update_object_meta(key, headers) def make_dir(dir_path): """新建目录""" try: os.makedirs(dir_path) except OSError: pass def copy_file(src, dst): """拷贝文件""" dst_dir = os.path.dirname(dst) make_dir(dst_dir) shutil.copy(src, dst) if __name__ == '__main__': kwargs = dict( access_key_id='', access_key_secret='', endpoint='oss-cn-shenzhen.aliyuncs.com', bucket_name='', ) oss_obj = OssManager(**kwargs) file_name = '/Users/edz/realibox/base-all/base/src/core/request/beta_gangnam_style.fbx' # remote_name = 'cmltest.fbx' # 上传 result_obj = oss_obj.upload(file_name) print(result_obj) """ http://realicloud-local.oss-cn-shenzhen.aliyuncs.com/beta_gangnam_style.fbx """ """ # result: { 'resp': <oss2.http.Response object at 0x1071ddc50>, 'status': 200, 'headers': { 'Server': 'AliyunOSS', 'Date': 'Tue, 20 Oct 2020 01:22:34 GMT', 'Content-Length': '0', 'Connection': 'keep-alive', 'x-oss-request-id': '5F8E3BDAFEC931303087D9D9', 'ETag': '"D41D8CD98F00B204E9800998ECF8427E"', 'x-oss-hash-crc64ecma': '0', 'Content-MD5': '1B2M2Y8AsgTpgAmY7PhCfg==', 'x-oss-server-time': '63' }, 'request_id': '5F8E3BDAFEC931303087D9D9', 'versionid': None, 'delete_marker': None, 'etag': 'D41D8CD98F00B204E9800998ECF8427E', 'crc': 0 } # result.resp: { 'response': <Response [200]>, 'status': 200, 'headers': { 'Server': 'AliyunOSS', 'Date': 'Tue, 20 Oct 2020 02:31:03 GMT', 'Content-Length': '0', 'Connection': 'keep-alive', 'x-oss-request-id': '5F8E4BE7FEC93130387B8D5B', 'ETag': '"D41D8CD98F00B204E9800998ECF8427E"', 'x-oss-hash-crc64ecma': '0', 'Content-MD5': '1B2M2Y8AsgTpgAmY7PhCfg==', 'x-oss-server-time': '49' }, 'request_id': '5F8E4BE7FEC93130387B8D5B', '_Response__all_read': True } # result.resp.response: { '_content': False, '_content_consumed': True, '_next': None, 'status_code': 200, 'headers': { 'Server': 'AliyunOSS', 'Date': 'Tue, 20 Oct 2020 02:32:13 GMT', 'Content-Length': '0', 'Connection': 'keep-alive', 'x-oss-request-id': '5F8E4C2D4D5A2B3339F164B7', 'ETag': '"D41D8CD98F00B204E9800998ECF8427E"', 'x-oss-hash-crc64ecma': '0', 'Content-MD5': '1B2M2Y8AsgTpgAmY7PhCfg==', 'x-oss-server-time': '18' }, 'raw': <urllib3.response.HTTPResponse object at 0x104254a10>, 'url': 'http://haier-mdcp-private.oss-cn-qingdao.aliyuncs.com/cmltest.fbx', 'encoding': None, 'history': [], 'reason': 'OK', 'cookies': <RequestsCookieJar[]>, 'elapsed': datetime.timedelta(microseconds=250406), 'request': <PreparedRequest [PUT]>, 'connection': <requests.adapters.HTTPAdapter object at 0x1041fb710> } """ # 下载 # res = oss_obj.download(remote_name) # print(res)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/extensions/oss/aliyun.py
aliyun.py
import os import json import hmac import urllib import base64 import hashlib import datetime from importlib import import_module from abc import ABCMeta, abstractmethod IMAGE_FORMAT_SET = [ 'bmp', 'jpg', 'jpeg', 'png', 'tif', 'gif', 'pcx', 'tga', 'exif', 'fpx', 'svg', 'psd', 'cdr', 'pcd', 'dxf', 'ufo', 'eps', 'ai', 'raw', 'WMF', 'webp', 'tiff' ] class OssManagerError(ValueError): """""" class OssRequestError(Exception): """""" class OssManagerBase(metaclass=ABCMeta): def __init__( self, access_key_id, access_key_secret, bucket_name, endpoint=None, cname=None, cache_path='.', expire_time=30, **kwargs ): self.access_key_id = access_key_id self.access_key_secret = access_key_secret self.bucket_name = bucket_name self.endpoint = endpoint self.cache_path = cache_path self.scheme = kwargs.get("scheme", "https") self.image_domain = kwargs.get("image_domain") self.asset_domain = kwargs.get("asset_domain") self.policy_expire_time = kwargs.get("policy_expire_time", expire_time) self.cname = cname self.bucket = None @abstractmethod def create_bucket(self): """创建bucket""" @abstractmethod def list_buckets(self): """查询bucket列表""" @abstractmethod def is_exist_bucket(self, bucket_name=None): """判断bucket是否存在""" @abstractmethod def delete_bucket(self, bucket_name=None): """删除bucket""" @abstractmethod def get_sign_url(self, key, expire=10): """生成下载对象的带授权信息的URL""" @abstractmethod def post_sign_url(self, key, expire=10): """生成上传对象的带授权信息的URL""" @abstractmethod def download(self, *args, **kwargs): """""" @abstractmethod def upload(self, *args, **kwargs): """""" def _get_policy_encode(self, key, redirect_url): expire_time = datetime.datetime.now() + datetime.timedelta( seconds=self.policy_expire_time ) policy_dict = dict( expiration=expire_time.isoformat() + "Z", conditions=[ # {"acl": "public-read"}, # {"x-obs-acl": "public-read"}, # {"x-obs-security-token": "YwkaRTbdY8g7q...."}, {"bucket": "yzcore"}, {"success_action_redirect": redirect_url}, ["starts-with", "$key", key], # 指定值开始 # ["eq", "$success_action_redirect", "public-read"], # 精确匹配 # ["content-length-range", 1, 1024*1024*1024] # 对象大小限制 ], ) policy = json.dumps(policy_dict).strip().encode() return base64.b64encode(policy) def get_signature(self, policy_encode): """ 获取签名 :param policy_encode: :return: """ h = hmac.new( self.access_key_secret.encode("utf-8"), policy_encode, hashlib.sha1 ) sign_result = base64.encodebytes(h.digest()).strip() return sign_result.decode() def get_policy( self, key, redirect_url, # callback_data=None, # callback_content_type="application/json" ): """ 授权给第三方上传 :param key: :param redirect_url: :return: """ policy_encode = self._get_policy_encode(key, redirect_url) sign = self.get_signature(policy_encode) return dict( key=key, accessid=self.access_key_id, host=f"{self.scheme}://{self.bucket_name}.{self.endpoint}", policy=policy_encode.decode(), signature=sign, success_action_redirect=redirect_url # callback=base64_callback_body.decode(), ) def get_file_url(self, filepath, key): if filepath and filepath.split('.')[-1] in IMAGE_FORMAT_SET: resource_url = u"//{domain}/{key}".format( domain=self.image_domain, key=key) else: resource_url = u"//{domain}/{key}".format( domain=self.asset_domain, key=key) return resource_url def delete_cache_file(self, filename): """删除文件缓存""" filepath = os.path.abspath(os.path.join(self.cache_path, filename)) assert os.path.isfile(filepath), '非文件或文件不存在' os.remove(filepath) def search_cache_file(self, filename): """文件缓存搜索""" # 拼接绝对路径 filepath = os.path.abspath(os.path.join(self.cache_path, filename)) if os.path.isfile(filepath): return filepath else: return None def make_dir(self, dir_path): """新建目录""" try: os.makedirs(dir_path) except OSError: pass class OssManagerProxy: def __init__(self, oss_type, **kwargs): # self.oss_type = oss_type self.client = self.select_oss(oss_type, **kwargs) def select_oss(self, oss_type, **kwargs): _module = import_module(f"yzcore.extensions.oss.{oss_type}") return _module.OssManager(**kwargs) def __getattr__(self, item): return getattr(self.client, item)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/extensions/oss/__init__.py
__init__.py
import os import functools from yzcore.extensions.oss import OssManagerBase, OssRequestError # from obs import ObsClient, StorageClass, HeadPermission try: import boto3 except: boto3 = None def wrap_request_return_bool(func): """""" @functools.wraps(func) def wrap_func(*args, **kwargs): try: resp = func(*args, **kwargs) if resp['ResponseMetadata']['HTTPStatusCode'] < 300: return True else: return False except: import traceback print(traceback.format_exc()) return wrap_func class OssManager(OssManagerBase): # acl_type = { # "private": obs.HeadPermission.PRIVATE, # "onlyread": obs.HeadPermission.PUBLIC_READ, # "readwrite": obs.HeadPermission.PUBLIC_READ_WRITE, # "bucket_read": obs.HeadPermission.PUBLIC_READ_DELIVERED, # 桶公共读,桶内对象公共读。 # "bucket_readwrite": obs.HeadPermission.PUBLIC_READ_WRITE_DELIVERED, # 桶公共读写,桶内对象公共读写。 # "owner_full_control": obs.HeadPermission.BUCKET_OWNER_FULL_CONTROL, # 桶或对象所有者拥有完全控制权限。 # } # # 存储类型 # storage_cls = { # "standard": obs.StorageClass.STANDARD, # 标准类型 # "ia": obs.StorageClass.WARM, # 低频访问类型 # # "archive": oss2.BUCKET_STORAGE_CLASS_ARCHIVE, # 归档类型 # "cold_archive": obs.StorageClass.COLD, # 冷归档类型 # } # 冗余类型 # redundancy_type = { # "lrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_LRS, # 本地冗余 # "zrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_ZRS, # 同城冗余(跨机房) # } def __init__(self, *args, **kwargs): super(OssManager, self).__init__(*args, **kwargs) self.__init() def __init(self, *args, **kwargs): """""" if boto3 is None: raise ImportError("'boto3' must be installed to use OssManager") # 创建ObsClient实例 self.client = boto3.client( 's3', # region_name=None, # api_version=None, # use_ssl=True, # verify=None, # endpoint_url=None, aws_access_key_id=self.access_key_id, aws_secret_access_key=self.access_key_secret, # aws_session_token=None, # config=None ) # self.bucket = self.client.bucketClient(self.bucket_name) @wrap_request_return_bool def create_bucket( self, bucket_name=None, location='cn-south-1' ): """""" if bucket_name is None: bucket_name = self.bucket_name return self.client.create_bucket( ACL='private', # |'public-read'|'public-read-write'|'authenticated-read', Bucket=bucket_name, CreateBucketConfiguration={ 'LocationConstraint': 'cn-northwest-1', # 'cn-north-1' }, # GrantFullControl='string', # GrantRead='string', # GrantReadACP='string', # GrantWrite='string', # GrantWriteACP='string', # ObjectLockEnabledForBucket=True|False ) # if resp['ResponseMetadata']['HTTPStatusCode'] == 200: # return True def list_buckets(self): """ :return: { 'Buckets': [ { 'Name': 'string', 'CreationDate': datetime(2015, 1, 1) }, ], 'Owner': { 'DisplayName': 'string', 'ID': 'string' } } """ return self.client.list_buckets() @wrap_request_return_bool def is_exist_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name return self.client.head_bucket( Bucket=bucket_name, # ExpectedBucketOwner='string' ) @wrap_request_return_bool def delete_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name return self.client.delete_bucket( Bucket=bucket_name, # ExpectedBucketOwner='string' ) def get_sign_url(self, key, expire=10): res = self.client.createSignedUrl("GET", self.bucket_name, key, expire) return res.signedUrl def post_sign_url(self, key, expire=10, form_param=None): if form_param: return self.client.createPostSignature( self.bucket_name, key, expire, formParams=form_param) else: res = self.client.createSignedUrl( "PUT", self.bucket_name, key, expire) return res.signedUrl def download(self, key, local_name=None, is_return_obj=False, progress_callback=None): if not local_name: local_name = os.path.abspath( os.path.join(self.cache_path, key) ) self.make_dir(os.path.dirname(local_name)) try: if is_return_obj: with open(local_name, 'wb') as fileobj: self.client.download_fileobj( self.bucket_name, key, fileobj, Callback=progress_callback) return fileobj else: self.client.download_file( self.bucket_name, key, local_name, Callback=progress_callback) return local_name except: import traceback print(traceback.format_exc()) def get_file_stream(self, key, bucket_name=None): return self.client.get_object( Bucket=bucket_name if bucket_name else self.bucket_name, Key=key, # Range='string', # IfMatch='string', # IfModifiedSince=datetime(2015, 1, 1), # IfNoneMatch='string', # IfUnmodifiedSince=datetime(2015, 1, 1), # ResponseCacheControl='string', # ResponseContentDisposition='string', # ResponseContentEncoding='string', # ResponseContentLanguage='string', # ResponseContentType='string', # ResponseExpires=datetime(2015, 1, 1), # VersionId='string', # SSECustomerAlgorithm='string', # SSECustomerKey='string', # RequestPayer='requester', # PartNumber=123, # ExpectedBucketOwner='string' ) def upload(self, key=None, filepath=None, content=None): if not any((filepath, content)): raise ValueError("not any((filepath, content))") if key is None and filepath: key = filepath.split('/')[-1] try: if content: resp = self.client.upload_fileobj( content, self.bucket_name, key) else: resp = self.client.upload_file( filepath, self.bucket_name, key) print(resp) return True, self.get_file_url(filepath, key) except: import traceback print(traceback.format_exc()) return False, None # def get_policy2(self): # # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.generate_presigned_post # return self.client.generate_presigned_post( # Bucket, Key, Fields=None, Conditions=None, ExpiresIn=3600) # def close_client(self): # return self.client.close()
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/extensions/oss/amazon.py
amazon.py
import os import functools from yzcore.extensions.oss import OssManagerBase, OssRequestError # from obs import ObsClient, StorageClass, HeadPermission try: import obs except: obs = None def wrap_request_return_bool(func): """""" @functools.wraps(func) def wrap_func(*args, **kwargs): try: resp = func(*args, **kwargs) if resp.status < 300: return True else: return False except: import traceback print(traceback.format_exc()) return wrap_func class OssManager(OssManagerBase): acl_type = { "private": obs.HeadPermission.PRIVATE, "onlyread": obs.HeadPermission.PUBLIC_READ, "readwrite": obs.HeadPermission.PUBLIC_READ_WRITE, "bucket_read": obs.HeadPermission.PUBLIC_READ_DELIVERED, # 桶公共读,桶内对象公共读。 "bucket_readwrite": obs.HeadPermission.PUBLIC_READ_WRITE_DELIVERED, # 桶公共读写,桶内对象公共读写。 "owner_full_control": obs.HeadPermission.BUCKET_OWNER_FULL_CONTROL, # 桶或对象所有者拥有完全控制权限。 } # 存储类型 storage_cls = { "standard": obs.StorageClass.STANDARD, # 标准类型 "ia": obs.StorageClass.WARM, # 低频访问类型 # "archive": oss2.BUCKET_STORAGE_CLASS_ARCHIVE, # 归档类型 "cold_archive": obs.StorageClass.COLD, # 冷归档类型 } # 冗余类型 # redundancy_type = { # "lrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_LRS, # 本地冗余 # "zrs": oss2.BUCKET_DATA_REDUNDANCY_TYPE_ZRS, # 同城冗余(跨机房) # } def __init__(self, *args, **kwargs): super(OssManager, self).__init__(*args, **kwargs) self.__init() def __init(self, *args, **kwargs): """""" if obs is None: raise ImportError("'esdk-obs-python' must be installed to use OssManager") # 创建ObsClient实例 self.obsClient = obs.ObsClient( access_key_id=self.access_key_id, secret_access_key=self.access_key_secret, server=self.endpoint ) # self.bucket = self.obsClient.bucketClient(self.bucket_name) def create_bucket( self, bucket_name=None, location='cn-south-1' ): """""" if bucket_name is None: bucket_name = self.bucket_name resp = self.obsClient.createBucket(bucket_name, location=location) if resp.status < 300: return True else: raise OssRequestError( f"errorCode: {resp.errorCode}. Message: {resp.errorMessage}.") def list_buckets(self): resp = self.obsClient.listBuckets(isQueryLocation=True) if resp.status < 300: return resp.body.buckets else: raise OssRequestError( f"errorCode: {resp.errorCode}. Message: {resp.errorMessage}.") @wrap_request_return_bool def is_exist_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name return self.obsClient.headBucket(bucket_name) @wrap_request_return_bool def delete_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name return self.obsClient.deleteBucket(bucket_name) def get_sign_url(self, key, expire=10): res = self.obsClient.createSignedUrl("GET", self.bucket_name, key, expire) return res.signedUrl def post_sign_url(self, key, expire=10, form_param=None): if form_param: return self.obsClient.createPostSignature( self.bucket_name, key, expire, formParams=form_param) else: res = self.obsClient.createSignedUrl( "PUT", self.bucket_name, key, expire) return res.signedUrl def download(self, key, local_name=None, is_return_obj=False, progress_callback=None): if not local_name: local_name = os.path.abspath( os.path.join(self.cache_path, key) ) self.make_dir(os.path.dirname(local_name)) try: resp = self.obsClient.getObject( self.bucket_name, key, downloadPath=local_name, progressCallback=progress_callback ) if resp.status < 300: if is_return_obj: with open(local_name, 'rb') as fileobj: return fileobj return resp.body.url # url: /Users/edz/yz-core/tests/cml else: print('errorCode:', resp.errorCode) print('errorMessage:', resp.errorMessage) except: import traceback print(traceback.format_exc()) def get_file_stream(self, key, bucket_name=None): try: resp = self.obsClient.getObject( self.bucket_name, key, loadStreamInMemory=True, ) if resp.status < 300: print('requestId:', resp.requestId) # 获取对象内容 print('size:', resp.body.size) return resp.body.buffer else: print('errorCode:', resp.errorCode) print('errorMessage:', resp.errorMessage) except: import traceback print(traceback.format_exc()) def upload(self, key=None, filepath=None, content=None): if not any((filepath, content)): raise ValueError("not any((filepath, content))") if key is None and filepath: key = filepath.split('/')[-1] try: if content: resp = self.obsClient.putContent( self.bucket_name, key, content=content) else: resp = self.obsClient.putFile( self.bucket_name, key, filepath) if resp.status < 300: print('requestId:', resp.requestId) return True, self.get_file_url(filepath, key) else: print('errorCode:', resp.errorCode) print('errorMessage:', resp.errorMessage) return False, None except: import traceback print(traceback.format_exc()) # def close_client(self): # return self.obsClient.close()
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/extensions/oss/huawei.py
huawei.py
import os import sys curr_path = os.path.abspath(os.path.dirname(os.curdir)) sys.path.append(curr_path) # _path = os.path.join(os.path.dirname(os.path.dirname(curr_path)), 'output') LOG_PATH = os.path.join(curr_path, 'logs') # from src.settings import log_conf # LOG_PATH = log_conf.get('log_path') from .filters import * LOGGING_CONFIG = { "version": 1, "disable_existing_loggers": False, # 不禁用完成配置之前创建的所有日志处理器 "formatters": { "simple": { # 简单的输出模式 'format': '%(asctime)s | %(levelname)s | PID:%(process)d | TID:%(threadName)s | [%(module)s: %(funcName)s] | - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'standard': { # 较为复杂的输出模式,可以进行自定义 'format': '%(asctime)s | %(levelname)s | PID:%(process)d | TID:%(threadName)s | [%(module)s: %(funcName)s] | - %(message)s' }, 'custom': { # 自定义json输出 'format': '{"level": "%(levelname)s", "now": "%(created)f", "PID": "%(process)d", "pathname": "%(pathname)s", "lineno": "%(lineno)d", "message": "%(message)s"}' }, }, # 过滤器 "filters": { 'debug_filter': { '()': DebugFilter }, 'info_filter': { '()': InfoFilter }, 'warning_filter': { '()': WarningFilter }, 'error_filter': { '()': ErrorFilter }, 'critical_filter': { '()': CriticalFilter }, 'notset_filter': { '()': NotSetFilter } }, "handlers": { # # 输出到控制台的handler # "console": { # # 定义输出流的类 # "class": "logging.StreamHandler", # # handler等级,如果实际执行等级高于此等级,则不触发handler # "level": "DEBUG", # # 输出的日志格式 # "formatter": "simple", # # 流调用系统输出 # "stream": "ext://sys.stdout" # }, # # 写入到文件的hanler,写入等级为info,命名为request是为了专门记录一些网络请求日志 # "file": { # # 定义写入文件的日志类,此类为按时间分割日志类,还有一些按日志大小分割日志的类等 # "class": "handlers.TimedRotatingFileHandlerMP", # # 日志等级 # "level": "DEBUG", # # 日志写入格式,因为要写入到文件后期可能会debug用,所以用了较为详细的standard日志格式 # "formatter": "standard", # # 要写入的文件名 # "filename": os.path.join(LOG_PATH, 'default', 'info.log'), # # 分割单位,S秒,D日,H小时,M分钟,W0-W6星期,星期一为 'W0' # # 比如文件名为test.log,到凌晨0点的时候会自动分离出test.log.yyyy-mm-dd # "when": 'D', # "interval": 1, # 'backupCount': 5, # 备份份数 # "encoding": "utf8", # "filters": ["info_filter"] # }, # "info_file": { # "class": "handlers.TimedRotatingFileHandlerMP", # "level": "INFO", # "formatter": "standard", # "filename": os.path.join(LOG_PATH, 'default', 'info.log'), # "when": 'D', # 'backupCount': 5, # 备份份数 # "encoding": "utf8", # "filters": ["info_filter"] # }, # "err_file": { # "class": "handlers.TimedRotatingFileHandlerMP", # "level": "WARN", # "formatter": "standard", # "filename": os.path.join(LOG_PATH, 'default', 'error.log'), # "when": 'D', # 'backupCount': 5, # 备份份数 # "encoding": "utf8", # "filters": ["info_filter"] # }, }, # "loggers": {}, "loggers": { # # logger名字 # "default_logger": { # # logger集成的handler # 'handlers': ['console', 'file'], # # logger等级,如果实际执行等级,高于此等级,则不触发此logger,logger中所有的handler均不会被触发 # 'level': "DEBUG", # # 是否继承root日志,如果继承,root的handler会加入到当前logger的handlers中 # 'propagate': False # }, # "debug_logger": { # 'handlers': ['console', 'debug_file'], # 'level': "DEBUG", # 'propagate': False # }, # "info_logger": { # 'handlers': ['console', 'info_file'], # 'level': "INFO", # 'propagate': False # }, # "warn_logger": { # 'handlers': ['console', 'err_file'], # 'level': "WARN", # 'propagate': False # }, # "error_logger": { # 'handlers': ['console', 'err_file'], # 'level': "ERROR", # 'propagate': False # }, # "critical_logger": { # 'handlers': ['console', 'err_file'], # 'level': "DEBUG", # 'propagate': False # }, }, # 基础logger,当不指定logger名称时,默认选择此logger # "root": { # 'handlers': ['file'], # 'level': "DEBUG", # 'propagate': True # }, # 增量添加配置,默认不增量。 # "incremental": False }
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/logger/config.py
config.py
import os import sys sys.path.append(os.path.dirname(os.pardir)) import re SYS_ENV = 'win' if re.search('[Ww]in', sys.platform) else 'unix' import logging from .config import LOG_PATH, LOGGING_CONFIG from .handlers import TimedRotatingFileHandlerMP as TRFMP # from logging.config import dictConfig # from logging.handlers import TimedRotatingFileHandler class InitLoggerConfig: __instance = {} # 单例模式存储对象 __is_init = False # 防止重复初始化 def __new__(cls, app_name='default', *args, **kwargs): """对于每个app_name单例模式""" if app_name not in cls.__instance: cls.__instance[app_name] = super().__new__(cls) return cls.__instance[app_name] def __init__(self, app_name='default', log_config=LOGGING_CONFIG, log_path=LOG_PATH, is_debug=True): """初始化logger,通过LOGGING配置logger""" if self.__is_init is True: return self.log_path = log_path log_file_dir = os.path.join(self.log_path, app_name) self.__is_init = True self.app_name = app_name self.is_debug = is_debug self.log_config = log_config # 日志名和日志等级的映射 self.log_levels = ['debug', 'info', 'warning', 'error', 'critical'] # 默认路径为当前项目根目录下的logs/${app_name} self.log_cur_path = os.path.join(self.log_path, self.app_name) if not is_debug: # 默认路径为当前项目根目录下的logs/${app_name},debug模式下全部输出到终端没必要创建文件夹 print("初始化%s的logger,日志写入:%s 文件夹下" % (app_name, log_file_dir)) self.mkdir_log_path() self.configure_logging() def mkdir_log_path(self): if not os.path.exists(self.log_cur_path): # 不存在就创建default目录 os.makedirs(self.log_cur_path) def configure_logging(self): # logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") if sys.version_info < (3, 7): # https://bugs.python.org/issue30520 import pickle import logging def __reduce__(self): if isinstance(self, logging.RootLogger): return logging.getLogger, () if logging.getLogger(self.name) is not self: raise pickle.PicklingError("logger cannot be pickled") return logging.getLogger, (self.name,) logging.Logger.__reduce__ = __reduce__ # 根据app_name动态更新LOGGING_CONFIG配置,为每个app_name创建文件夹,配置handler for level in self.log_levels: handler_name = '%s_%s' % (self.app_name, level) if level == 'debug': self.log_config['handlers'][ handler_name] = self.get_console_handler_conf() else: lev_up = level.upper() if self.is_debug: # is_debug开启输出到终端 console_handler_conf = self.get_console_handler_conf(lev_up) self.log_config['handlers'][ handler_name] = console_handler_conf else: filename = os.path.join(self.log_cur_path, (level + '.log')) self.log_config['handlers'][ handler_name] = self.get_file_handler_conf( filename=filename, level=lev_up) # 添加app logger及app_request logger logger_name = '%s_logger' % self.app_name self.log_config['loggers'][logger_name] = self.get_logger_conf() # 将LOGGING_CONFIG中的配置信息更新到logging中 if self.log_config is not None: from logging import config if isinstance(self.log_config, dict): config.dictConfig(self.log_config) else: config.fileConfig(self.log_config) # dictConfig(LOGGING_CONFIG) def get_console_handler_conf(self, level='DEBUG'): console_handler_conf = { # 定义输出流的类 "class": "logging.StreamHandler", # handler等级,如果实际执行等级高于此等级,则不触发handler "level": level, # 输出的日志格式 "formatter": "custom", # 流调用系统输出 "stream": "ext://sys.stdout", 'filters': ['%s_filter' % (level.lower())] } return console_handler_conf @staticmethod def get_file_handler_conf(filename: str, level='INFO'): file_handler_conf = { "class": f"{TRFMP.__module__}.{TRFMP.__name__}", "formatter": "custom", # 要写入的文件名 # 分割单位,D日,H小时,M分钟,W星期,一般是以小时或天为单位 # 比如文件名为test.log,到凌晨0点的时候会自动分离出test.log.yyyy-mm-dd "when": 'D', "interval": 1, 'backupCount': 5, # 备份份数 "encoding": "utf8", } if SYS_ENV == 'win': file_handler_conf[ 'class'] = 'logging.handlers.TimedRotatingFileHandler' filters = ['%s_filter' % (level.lower())] update_dict = {'filename': filename, 'level': level, 'filters': filters} file_handler_conf.update(update_dict) return file_handler_conf def get_email_handler_conf(self): """""" def get_queue_handler_conf(self): """""" def get_http_handler_conf(self): """""" def get_file_rotating_conf(self): """文件根据大小切换备份""" def get_logger_conf(self): """ logger 配置 :return: """ logger_conf = {'handlers': [], 'level': "DEBUG", 'propagate': False} # 如果只是debug级别logger,只配置打印handler,不会记录到文件中 logger_conf['handlers'] = [ '%s_%s' % (self.app_name, level) for level in self.log_levels] return logger_conf # 获取日常logger def get_logger(app_name: str, is_debug=True): InitLoggerConfig(app_name, is_debug=is_debug) logger_name = '%s_logger' % app_name logger = logging.getLogger(logger_name) return logger if __name__ == '__main__': # 单例模式测试 logger = get_logger('cml_test', is_debug=True) logger.error('error log') logger.debug('debug log') logger.debug('debug log')
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/logger/__init__.py
__init__.py
from logging import StreamHandler, FileHandler from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler, SMTPHandler import fcntl, time, os, codecs, string, re, types, pickle, struct, shutil from stat import ST_DEV, ST_INO, ST_MTIME class StreamHandlerMP(StreamHandler): """ A handler class which writes logging records, appropriately formatted, to a stream. Use for multiprocess. """ def emit(self, record): """ Emit a record. First seek the end of file for multiprocess to log to the same file 寻找文件结尾以供多进程登录到同一文件 """ try: if hasattr(self.stream, "seek"): self.stream.seek(0, os.SEEK_END) except IOError as e: pass StreamHandler.emit(self, record) class FileHandlerMP(FileHandler, StreamHandlerMP): """ A handler class which writes formatted logging records to disk files for multiprocess """ def emit(self, record): """ Emit a record. If the stream was not opened because 'delay' was specified in the constructor, open it before calling the superclass's emit. 如果由于在构造函数中指定了“delay”而未打开流,请在调用超类的emit之前将其打开。 """ if self.stream is None: self.stream = self._open() StreamHandlerMP.emit(self, record) class RotatingFileHandlerMP(RotatingFileHandler, FileHandlerMP): """ Handler for logging to a set of files, which switches from one file to the next when the current file reaches a certain size. Based on logging.RotatingFileHandler, modified for Multiprocess """ _lock_dir = '.lock' if os.path.exists(_lock_dir): pass else: os.mkdir(_lock_dir) def doRollover(self): """ Do a rollover, as described in __init__(). For multiprocess, we use shutil.copy instead of rename. """ self.stream.close() if self.backupCount > 0: for i in range(self.backupCount - 1, 0, -1): sfn = "%s.%d" % (self.baseFilename, i) dfn = "%s.%d" % (self.baseFilename, i + 1) if os.path.exists(sfn): if os.path.exists(dfn): os.remove(dfn) shutil.copy(sfn, dfn) dfn = self.baseFilename + ".1" if os.path.exists(dfn): os.remove(dfn) if os.path.exists(self.baseFilename): shutil.copy(self.baseFilename, dfn) self.mode = 'w' self.stream = self._open() def emit(self, record): """ Emit a record. Output the record to the file, catering for rollover as described in doRollover(). For multiprocess, we use file lock. Any better method ? """ try: if self.shouldRollover(record): self.doRollover() FileLock = self._lock_dir + '/' + os.path.basename(self.baseFilename) + '.' + record.levelname f = open(FileLock, "w+") fcntl.flock(f.fileno(), fcntl.LOCK_EX) FileHandlerMP.emit(self, record) fcntl.flock(f.fileno(), fcntl.LOCK_UN) f.close() except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) class TimedRotatingFileHandlerMP(TimedRotatingFileHandler, FileHandlerMP): """ Handler for logging to a file, rotating the log file at certain timed intervals. If backupCount is > 0, when rollover is done, no more than backupCount files are kept - the oldest ones are deleted. """ _lock_dir = '.lock' if os.path.exists(_lock_dir): pass else: os.mkdir(_lock_dir) def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=0, utc=0): FileHandlerMP.__init__(self, filename, 'a', encoding, delay) self.encoding = encoding self.when = when.upper() self.backupCount = backupCount self.utc = utc # Calculate the real rollover interval, which is just the number of # seconds between rollovers. Also set the filename suffix used when # a rollover occurs. Current 'when' events supported: # S - Seconds # M - Minutes # H - Hours # D - Days # midnight - roll over at midnight # W{0-6} - roll over on a certain day; 0 - Monday # # Case of the 'when' specifier is not important; lower or upper case # will work. if self.when == 'S': self.suffix = "%Y-%m-%d_%H-%M-%S" self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}$" elif self.when == 'M': self.suffix = "%Y-%m-%d_%H-%M" self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}$" elif self.when == 'H': self.suffix = "%Y-%m-%d_%H" self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}$" elif self.when == 'D' or self.when == 'MIDNIGHT': self.suffix = "%Y-%m-%d" self.extMatch = r"^\d{4}-\d{2}-\d{2}$" elif self.when.startswith('W'): if len(self.when) != 2: raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when) if self.when[1] < '0' or self.when[1] > '6': raise ValueError("Invalid day specified for weekly rollover: %s" % self.when) self.dayOfWeek = int(self.when[1]) self.suffix = "%Y-%m-%d" self.extMatch = r"^\d{4}-\d{2}-\d{2}$" else: raise ValueError("Invalid rollover interval specified: %s" % self.when) self.extMatch = re.compile(self.extMatch) if interval != 1: raise ValueError("Invalid rollover interval, must be 1") def shouldRollover(self, record): """ Determine if rollover should occur. record is not used, as we are just comparing times, but it is needed so the method signatures are the same """ if not os.path.exists(self.baseFilename): # print "file don't exist" return 0 cTime = time.localtime(time.time()) mTime = time.localtime(os.stat(self.baseFilename)[ST_MTIME]) if self.when == "S" and cTime[5] != mTime[5]: # print "cTime:", cTime[5], "mTime:", mTime[5] return 1 elif self.when == 'M' and cTime[4] != mTime[4]: # print "cTime:", cTime[4], "mTime:", mTime[4] return 1 elif self.when == 'H' and cTime[3] != mTime[3]: # print "cTime:", cTime[3], "mTime:", mTime[3] return 1 elif (self.when == 'MIDNIGHT' or self.when == 'D') and cTime[2] != mTime[2]: # print "cTime:", cTime[2], "mTime:", mTime[2] return 1 elif self.when == 'W' and cTime[1] != mTime[1]: # print "cTime:", cTime[1], "mTime:", mTime[1] return 1 else: return 0 def doRollover(self): """ do a rollover; in this case, a date/time stamp is appended to the filename when the rollover happens. However, you want the file to be named for the start of the interval, not the current time. If there is a backup count, then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix. For multiprocess, we use shutil.copy instead of rename. """ if self.stream: self.stream.close() # get the time that this sequence started at and make it a TimeTuple # t = self.rolloverAt - self.interval t = int(time.time()) if self.utc: timeTuple = time.gmtime(t) else: timeTuple = time.localtime(t) dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple) if os.path.exists(dfn): os.remove(dfn) if os.path.exists(self.baseFilename): shutil.copy(self.baseFilename, dfn) # print "%s -> %s" % (self.baseFilename, dfn) # os.rename(self.baseFilename, dfn) if self.backupCount > 0: # find the oldest log file and delete it # s = glob.glob(self.baseFilename + ".20*") # if len(s) > self.backupCount: # s.sort() # os.remove(s[0]) for s in self.getFilesToDelete(): os.remove(s) self.mode = 'w' self.stream = self._open() def emit(self, record): """ Emit a record. Output the record to the file, catering for rollover as described in doRollover(). For multiprocess, we use file lock. Any better method ? """ try: if self.shouldRollover(record): self.doRollover() FileLock = self._lock_dir + '/' + os.path.basename(self.baseFilename) + '.' + record.levelname f = open(FileLock, "w+") fcntl.flock(f.fileno(), fcntl.LOCK_EX) FileHandlerMP.emit(self, record) fcntl.flock(f.fileno(), fcntl.LOCK_UN) f.close() except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/logger/handlers.py
handlers.py
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union from pydantic import BaseModel, AnyUrl from pymongo.client_session import ClientSession try: from pymongo import InsertOne, DeleteOne, ReplaceOne, UpdateMany from pymongo.collection import Collection from pymongo import MongoClient except: pass ModelType = TypeVar("ModelType", bound=str) CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) DictorList = TypeVar("DictorList", dict, list) # DictorList = Union[Dict, List] class MongoCRUDBase(Generic[CreateSchemaType, UpdateSchemaType]): def __init__( self, collection_name: ModelType, db_name: str = "test_db", db_url: AnyUrl = "mongodb://localhost:27017/", client: ClientSession = None ): if client: self.client = client else: self.client = MongoClient(db_url) self.db = self.client[db_name] self.collection = self.db[collection_name] # self.coll_name = collection_name def count(self, opt: dict = None, session: ClientSession = None): """ 统计数目 :param opt: :param session: 事务操作 :return: """ if opt: return self.collection.count_documents(opt, session=session) return self.collection.estimated_document_count(session=session) def get(self, opt: dict = None, is_logical_del: bool = False, select_col: DictorList = None, session: ClientSession = None): """ 查询操作 :param opt: :param is_logical_del: 是否逻辑删除 :param select_col: 应在结果集中返回的字段名列表,或指定要包含或排除的字段的dict :param session: 事务操作 :return: """ if is_logical_del: opt.update({"isDelete": False}) return self.collection.find_one(opt, projection=select_col, session=session) def list(self, opt: dict = None, select_col: DictorList = None, limit: int = 0, offset: int = 0, sort: List[tuple] = None, is_logical_del: bool = False, session: ClientSession = None ): """ `projection`(可选):应在结果集中返回的字段名列表,或指定要包含或排除的字段的dict。 如果“projection”是一个列表,则始终返回“_id”。使用dict从结果中排除字段 (例如projection={'_id':False})。 :param opt: :param select_col: {'_id': 0, 'author': 1, 'age': 1} :param limit: 0 :param offset: 0 :param sort: [ ('field1', pymongo.ASCENDING), ('field2', pymongo.DESCENDING) ] :param is_logical_del: :param session: 事务操作 :return: """ if opt is None: opt = dict() if is_logical_del: opt.update({"isDelete": False}) data = dict( filter=opt, projection=select_col, skip=offset, limit=limit, sort=sort ) results = list(self.collection.find(**data, session=session)) return results def create(self, data: DictorList, is_return_obj: bool = False, session: ClientSession = None): """ 插入操作 :param data: :param is_return_obj: :param session: 事务操作 :return: """ if isinstance(data, dict): result = self.collection.insert_one(data, session=session) if is_return_obj: result = self.collection.find_one({'_id': result.inserted_id}, session=session) elif isinstance(data, list): result = self.collection.insert_many(data, session=session) if is_return_obj: result = self.list({'_id': {'$in': result.inserted_ids}}, session=session) else: raise Exception('Create failed!') return result def update(self, opt, data: Dict, is_many: bool = False, is_set: bool = True, session: ClientSession = None): """ 更新操作 :param opt: 查询条件 opt={'field1': 'xxx'} opt={'field1': 'xxx', 'field2': 123} opt={'field1': {'$gt': 'a'}, 'field2': {'$regex': '^d'}} :param data: 需要更新的数据: {'field': 'xxx'} :param is_many: 是否批量更新,默认为False :param is_set: 是否设置$set,默认为True :param session: 事务操作 :return: """ if is_set: update = {"$set": data} else: update = data if not is_many: result = self.collection.update_one(opt, update, session=session) # result = self.collection.find_one_and_update(opt, update) else: result = self.collection.update_many(opt, update, session=session) if result.acknowledged: return result def delete(self, opt, is_logical_del: bool = False, is_many: bool = False, session: ClientSession = None): """ 删除操作: 默认执行逻辑删除,当physical为True时,执行物理删除 :param opt: 搜索条件 :param is_logical_del: 是否逻辑删除 :param is_many: 是否删除多个 :param session: 事务操作 :return: """ if is_logical_del: update = {"$set": {"isDelete": True}} if not is_many: result = self.collection.update_one(filter=opt, update=update, session=session) else: result = self.collection.update_many(filter=opt, update=update, session=session) return result.modified_count else: if not is_many: result = self.collection.delete_one(filter=opt, session=session) else: result = self.collection.delete_many(filter=opt, session=session) return result.deleted_count def batch_update(self, bulk_update_datas: List[dict], session: ClientSession = None): """ 批量更新 :param bulk_update_datas: 格式:[{"opt": {}, "data": {}}] :param session: 事务操作 :return: """ if not bulk_update_datas: return 0 requests = [] for bulk_update_data in bulk_update_datas: requests.append(UpdateMany(bulk_update_data['opt'], bulk_update_data['data'])) result = self.collection.bulk_write(requests=requests, session=session) return result.modified_count def aggregate(self, pipeline: List[dict], session: ClientSession = None, **kwargs): """ 聚合管道 :param pipeline: :param session: 事务操作 :return: """ cursor = self.collection.aggregate(pipeline, session=session, **kwargs) return list(cursor) if __name__ == '__main__': db = MongoCRUDBase('hello_cml') print(db.count())
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/db/pymongo_crud_base.py
pymongo_crud_base.py
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union try: from pydantic import BaseModel import sqlalchemy from sqlalchemy import text from sqlalchemy.orm import Session from sqlalchemy.ext.declarative import as_declarative, declared_attr except ImportError: pass from yzcore.core.encoders import jsonable_encoder @as_declarative() class Base: # Generate __tablename__ automatically @declared_attr def __tablename__(cls) -> str: return cls.__name__.lower() id: Any __name__: str ModelType = TypeVar("ModelType", bound=Base) CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) class OrmCRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]): def __init__(self, model: Type[ModelType]): """ CRUD object with default methods to Create, Read, Update, Delete (CRUD). **Parameters** * `model`: A SQLAlchemy model class * `schema`: A Pydantic model (schema) class """ assert sqlalchemy is not None, "'sqlalchemy' must be installed to use OrmCRUDBase" self.model = model def count(self, db: Session, **kwargs): """ 根据条件获取总数量 :param db: :param kwargs: :return: """ if kwargs: return db.query(self.model).filter_by(**kwargs).count() return db.query(self.model).count() def get(self, db: Session, model_id: Any) -> Optional[ModelType]: """ 根据id获取数据 :param db: :param model_id: :return: """ return db.query(self.model).get(model_id) def get_one(self, db: Session, **kwargs): """ 根据查询条件获取一个数据 :param db: :param kwargs: :return: """ return db.query(self.model).filter_by(**kwargs).one_or_none() def list( self, db: Session, *, sort: List[str] = None, offset: int = 0, limit: int = 100, **kwargs ) -> List[ModelType]: """ 根据查询条件获取数据列表 :param db: :param sort: 需要排序的字段 ['-create_time', 'update_time'] (负号为降序) :param offset: :param limit: :param kwargs: :return: """ if sort: sort = text(','.join(sort)) if kwargs: return db.query(self.model).filter_by( **kwargs).order_by(sort).offset(offset).limit(limit).all() else: return db.query(self.model).order_by(sort).offset( offset).limit(limit).all() def create( self, db: Session, *, data: Union[Dict[str, Any], CreateSchemaType], is_transaction: bool = False ) -> ModelType: """ 插入操作,返回创建的模型 :param db: :param data: 创建所需的数据 :param is_transaction: 是否开启事务功能 :return: """ if isinstance(data, BaseModel): data = jsonable_encoder(data) db_obj = self.model(**data) # type: ignore db.add(db_obj) if not is_transaction: db.commit() db.refresh(db_obj) return db_obj def update( self, db: Session, *, model_id: int = None, obj: ModelType = None, query: Dict[str, Any] = None, data: Union[UpdateSchemaType, Dict[str, Any]], is_return_obj: bool = False, is_transaction: bool = False ) -> ModelType: """ 单个对象更新 更新有两种方式: 方式一: 传入需要更新的模型和更新的数据。 方式二: 传入id和更新的数据 注意: 如果传入模型来进行更新,则'is_return_obj=False'失效,返回更新后的模型 :param db: :param model_id: 模型ID :param obj: 模型对象 :param query: 模型查询参数 :param data: 需要更新的数据 :param is_return_obj: 是否需要返回模型数据,默认为False,只返回更新成功的行数 :param is_transaction: 是否开启事务功能 :return: update_count or obj or None """ if not any((model_id, obj, query)): raise ValueError('At least one of [model_id、query、obj] exists') if isinstance(data, dict): update_data = data else: update_data = data.dict(exclude_unset=True) if not is_return_obj and not obj: if model_id: update_count = db.query(self.model).filter( self.model.id == model_id).update(update_data) else: update_count = db.query(self.model).filter_by( **query).update(update_data) if not is_transaction: db.commit() return update_count else: if not obj: if model_id: obj = self.get(db, model_id) else: obj = self.get_one(db, **query) if obj: obj_data = jsonable_encoder(obj) for field in obj_data: if field in update_data: setattr(obj, field, update_data[field]) db.add(obj) if not is_transaction: db.commit() db.refresh(obj) return obj def delete( self, db: Session, *, model_id: int, is_return_obj: bool = False, is_transaction: bool = False ) -> ModelType: """ :param db: :param model_id: 模型ID :param is_return_obj: 是否需要返回模型数据,默认为False,只返回删除成功的行数 :param is_transaction: 是否开启事务功能 :return: """ if is_return_obj: obj = db.query(self.model).get(model_id) db.delete(obj) if not is_transaction: db.commit() return obj else: del_count = db.query(self.model).filter( self.model.id == model_id).delete(synchronize_session=False) if not is_transaction: db.commit() return del_count def bulk_delete(self, db: Session, ids: List[int] = None, **kwargs): """ :param db: :param kwargs: :return: """ if ids: del_count = db.query(self.model).filter( self.model.id.in_(ids)).delete(synchronize_session=False) else: del_count = db.query(self.model).filter_by( **kwargs).delete(synchronize_session=False) return del_count
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/db/sqlalchemy_crud_base.py
sqlalchemy_crud_base.py
from collections import defaultdict from enum import Enum from pathlib import PurePath from types import GeneratorType from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union from pydantic import BaseModel from pydantic.json import ENCODERS_BY_TYPE SetIntStr = Set[Union[int, str]] DictIntStrAny = Dict[Union[int, str], Any] def generate_encoders_by_class_tuples( type_encoder_map: Dict[Any, Callable] ) -> Dict[Callable, Tuple]: encoders_by_class_tuples: Dict[Callable, Tuple] = defaultdict(tuple) for type_, encoder in type_encoder_map.items(): encoders_by_class_tuples[encoder] += (type_,) return encoders_by_class_tuples encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) def jsonable_encoder( obj: Any, include: Optional[Union[SetIntStr, DictIntStrAny]] = None, exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, by_alias: bool = True, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, custom_encoder: dict = {}, sqlalchemy_safe: bool = True, ) -> Any: if include is not None and not isinstance(include, set): include = set(include) if exclude is not None and not isinstance(exclude, set): exclude = set(exclude) if isinstance(obj, BaseModel): encoder = getattr(obj.__config__, "json_encoders", {}) if custom_encoder: encoder.update(custom_encoder) obj_dict = obj.dict( include=include, exclude=exclude, by_alias=by_alias, exclude_unset=exclude_unset, exclude_none=exclude_none, exclude_defaults=exclude_defaults, ) if "__root__" in obj_dict: obj_dict = obj_dict["__root__"] return jsonable_encoder( obj_dict, exclude_none=exclude_none, exclude_defaults=exclude_defaults, custom_encoder=encoder, sqlalchemy_safe=sqlalchemy_safe, ) if isinstance(obj, Enum): return obj.value if isinstance(obj, PurePath): return str(obj) if isinstance(obj, (str, int, float, type(None))): return obj if isinstance(obj, dict): encoded_dict = {} for key, value in obj.items(): if ( ( not sqlalchemy_safe or (not isinstance(key, str)) or (not key.startswith("_sa")) ) and (value is not None or not exclude_none) and ((include and key in include) or not exclude or key not in exclude) ): encoded_key = jsonable_encoder( key, by_alias=by_alias, exclude_unset=exclude_unset, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, ) encoded_value = jsonable_encoder( value, by_alias=by_alias, exclude_unset=exclude_unset, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, ) encoded_dict[encoded_key] = encoded_value return encoded_dict if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): encoded_list = [] for item in obj: encoded_list.append( jsonable_encoder( item, include=include, exclude=exclude, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, ) ) return encoded_list if custom_encoder: if type(obj) in custom_encoder: return custom_encoder[type(obj)](obj) else: for encoder_type, encoder in custom_encoder.items(): if isinstance(obj, encoder_type): return encoder(obj) if type(obj) in ENCODERS_BY_TYPE: return ENCODERS_BY_TYPE[type(obj)](obj) for encoder, classes_tuple in encoders_by_class_tuples.items(): if isinstance(obj, classes_tuple): return encoder(obj) errors: List[Exception] = [] try: data = dict(obj) except Exception as e: errors.append(e) try: data = vars(obj) except Exception as e: errors.append(e) raise ValueError(errors) return jsonable_encoder( data, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, )
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/core/encoders.py
encoders.py
from typing import List, Tuple class ValuesSortDict(dict): """ 多值排序字典 每次value更新都会重新排序 """ def __init__(self, tuple_len_limit=4, tuple_default_value=('', 0, 0.0, 0.0)): """ 每个元组值的等长,默认为4 :param tuple_len_limit: """ self._len_limit = tuple_len_limit self._default_tuple = tuple_default_value super().__init__() def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, super().__repr__()) def __getitem__(self, key): try: _values = super().__getitem__(key) except KeyError: _values = [] return _values def __setitem__(self, key, value: List[Tuple]): if isinstance(value, list): super().__setitem__(key, value) elif isinstance(value, tuple) and len(value) == self._len_limit: super().__setitem__(key, [value]) else: raise ValueError('This is not the value I want.') # def get_one(self, key, index=0, default=None): # _values = self.__getitem__(key) # # if val == []: # return default # return val def add(self, key, value: tuple): """ :param key: :param value: :return: """ if isinstance(value, str): v_list = [value] v_list.extend([v for i, v in enumerate(self._default_tuple) if i != 0]) value = tuple(v_list) if isinstance(value, tuple): if len(value) != self._len_limit: raise ValueError(f'Error: len(tuple_value)!={self._len_limit}') _values = self.__getitem__(key) result = self._sort_values(value, _values) self.__setitem__(key, result) else: raise ValueError('This is not the value I want.') def _sort_values(self, value: Tuple, _values: List[Tuple]) -> List[Tuple]: """ :param value: :param _values: :return: """ # one # _values.append(value) # return sorted(_values, key=lambda x: x[1], reverse=False) # two 推荐 for i, v in enumerate(_values): if value[1] < v[1]: _values.insert(i, value) return _values _values.append(value) return _values def increase(self, key, v, inc=1): """ 自增元组值的序位 :param key: 键 :param v: 元组值的第一位 :param inc: :return: """ _values = self.__getitem__(key) if isinstance(v, str): # 推荐 for i, _v in enumerate(_values): if _v[0] == v: _v = _values.pop(i) self.__setitem__( key, self._sort_values( ( _v[0], _v[1]+inc, *[__v for i, __v in enumerate(_v) if i not in [0, 1]] ), _values ) ) return # 如果遍历结束还未返回,报错处理 raise ValueError(f'The value[0]:[{v}] is not exist') elif isinstance(v, tuple) and len(v) == self._len_limit: try: _values.remove(v) except ValueError: pass # v[1] += inc self.__setitem__(key, self._sort_values( (v[0], v[1]+inc, *[_v for i, _v in enumerate(v) if i not in [0, 1]]), _values)) else: raise ValueError(f'The value:[{v}] is error') if __name__ == '__main__': import random # _values = [(f"ip{i}", random.randint(1, 10000)) for i in range(100000)] _values = [(f"ip{i}", i) for i in range(100000)] ipnum = random.randint(1, 100000) value = (f"ip{ipnum}", ipnum) print(value) import time d = ValuesSortDict() # d["model"] = _values d.add('model', value) start = time.time() d.increase("model", value) end1 = time.time() # print(d) d.increase("model", value[0]) end2 = time.time() print("===>add:", end1-start) print("===>add0:", end2-end1) # print(d) # import pickle # # pickle.dump(q, file=open('history.pkl', 'wb')) # 保存,注意使用二进制 # q = pickle.load(file=open('history.pkl', 'rb')) # 读取,注意使用二进制
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/core/datastructures.py
datastructures.py
import os import shutil import stat from os import path import yzcore from . import CommandError from yzrpc.commands import CommandBase class TemplateCommand(CommandBase): requires_system_checks = False # The supported URL schemes url_schemes = ['http', 'https', 'ftp'] # Rewrite the following suffixes when determining the target filename. rewrite_template_suffixes = ( # Allow shipping invalid .py files without byte-compilation. ('.py-tpl', '.py'), ) def add_arguments(self, parser): parser.add_argument('name', help='Name of the application or project.') parser.add_argument('directory', nargs='?', help='Optional destination directory') # parser.add_argument('--template', help='The path or URL to load the template from.') def handle(self, app_or_project, name, target=None, **options): """ :param app_or_project: "app" or "project" :param name: 工程或者应用名称 :param target: :param options: :return: """ self.app_or_project = app_or_project # self.paths_to_remove = [] # self.verbosity = options['verbosity'] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) if self.app_or_project == "app": top_dir = path.join(top_dir, name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project template_dir = path.join(yzcore.__path__[0], 'templates', base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) # relative_dir = path_rest.replace(base_subdir, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if path.exists(new_path): raise CommandError("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) shutil.copyfile(old_path, new_path) # if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) def validate_name(self, name, app_or_project): a_or_an = 'an' if app_or_project == 'app' else 'a' if name is None: raise CommandError('you must provide {an} {app} name'.format( an=a_or_an, app=app_or_project, )) def make_writeable(self, filename): """ Make sure that the file is writeable. Useful if our source is read-only. """ if not os.access(filename, os.W_OK): st = os.stat(filename) new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR os.chmod(filename, new_permissions)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/core/management/templates.py
templates.py
import codecs import datetime import locale from decimal import Decimal from urllib.parse import quote class Promise: """ Base class for the proxy class created in the closure of the lazy function. It's used to recognize promises in code. """ pass class UnicodeDecodeErr(UnicodeDecodeError): def __init__(self, obj, *args): self.obj = obj super().__init__(*args) def __str__(self): return '%s. You passed in %r (%s)' % (super().__str__(), self.obj, type(self.obj)) def smart_text(s, encoding='utf-8', strings_only=False, errors='strict'): """ Return a string representing 's'. Treat bytestrings using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_text(s, encoding, strings_only, errors) _PROTECTED_TYPES = ( type(None), int, float, Decimal, datetime.datetime, datetime.date, datetime.time, ) def is_protected_type(obj): """Determine if the object instance is of a protected type. Objects of protected types are preserved as-is when passed to force_text(strings_only=True). """ return isinstance(obj, _PROTECTED_TYPES) def force_text(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first for performance reasons. if issubclass(type(s), str): return s if strings_only and is_protected_type(s): return s try: if isinstance(s, bytes): s = str(s, encoding, errors) else: s = str(s) except UnicodeDecodeError as e: raise UnicodeDecodeErr(s, *e.args) return s def smart_bytes(s, encoding='utf-8', strings_only=False, errors='strict'): """ Return a bytestring version of 's', encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_bytes(s, encoding, strings_only, errors) def force_bytes(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_bytes, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first for performance reasons. if isinstance(s, bytes): if encoding == 'utf-8': return s else: return s.decode('utf-8', errors).encode(encoding, errors) if strings_only and is_protected_type(s): return s if isinstance(s, memoryview): return bytes(s) return str(s).encode(encoding, errors) smart_str = smart_text force_str = force_text smart_str.__doc__ = """ Apply smart_text in Python 3 and smart_bytes in Python 2. This is suitable for writing to sys.stdout (for instance). """ force_str.__doc__ = """ Apply force_text in Python 3 and force_bytes in Python 2. """ def iri_to_uri(iri): """ Convert an Internationalized Resource Identifier (IRI) portion to a URI portion that is suitable for inclusion in a URL. This is the algorithm from section 3.1 of RFC 3987, slightly simplified since the input is assumed to be a string rather than an arbitrary byte stream. Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/'). """ # The list of safe characters here is constructed from the "reserved" and # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: # reserved = gen-delims / sub-delims # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" # / "*" / "+" / "," / ";" / "=" # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" # Of the unreserved characters, urllib.parse.quote() already considers all # but the ~ safe. # The % character is also added to the list of safe characters here, as the # end of section 3.1 of RFC 3987 specifically mentions that % must not be # converted. if iri is None: return iri elif isinstance(iri, Promise): iri = str(iri) return quote(iri, safe="/#%[]=:;$&()+,!?*@'~") # List of byte values that uri_to_iri() decodes from percent encoding. # First, the unreserved characters from RFC 3986: _ascii_ranges = [[45, 46, 95, 126], range(65, 91), range(97, 123)] _hextobyte = { (fmt % char).encode(): bytes((char,)) for ascii_range in _ascii_ranges for char in ascii_range for fmt in ['%02x', '%02X'] } # And then everything above 128, because bytes ≥ 128 are part of multibyte # unicode characters. _hexdig = '0123456789ABCDEFabcdef' _hextobyte.update({ (a + b).encode(): bytes.fromhex(a + b) for a in _hexdig[8:] for b in _hexdig }) def uri_to_iri(uri): """ Convert a Uniform Resource Identifier(URI) into an Internationalized Resource Identifier(IRI). This is the algorithm from section 3.2 of RFC 3987, excluding step 4. Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return a string containing the encoded result (e.g. '/I%20♥%20Django/'). """ if uri is None: return uri uri = force_bytes(uri) # Fast selective unqote: First, split on '%' and then starting with the # second block, decode the first 2 bytes if they represent a hex code to # decode. The rest of the block is the part after '%AB', not containing # any '%'. Add that to the output without further processing. bits = uri.split(b'%') if len(bits) == 1: iri = uri else: parts = [bits[0]] append = parts.append hextobyte = _hextobyte for item in bits[1:]: hex = item[:2] if hex in hextobyte: append(hextobyte[item[:2]]) append(item[2:]) else: append(b'%') append(item) iri = b''.join(parts) return repercent_broken_unicode(iri).decode() def escape_uri_path(path): """ Escape the unsafe characters from the path portion of a Uniform Resource Identifier (URI). """ # These are the "reserved" and "unreserved" characters specified in # sections 2.2 and 2.3 of RFC 2396: # reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," # unreserved = alphanum | mark # mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")" # The list of safe characters here is constructed subtracting ";", "=", # and "?" according to section 3.3 of RFC 2396. # The reason for not subtracting and escaping "/" is that we are escaping # the entire path, not a path segment. return quote(path, safe="/:@&+$,-_.!~*'()") def repercent_broken_unicode(path): """ As per section 3.2 of RFC 3987, step three of converting a URI into an IRI, repercent-encode any octet produced that is not part of a strictly legal UTF-8 octet sequence. """ while True: try: path.decode() except UnicodeDecodeError as e: # CVE-2019-14235: A recursion shouldn't be used since the exception # handling uses massive amounts of memory repercent = quote(path[e.start:e.end], safe=b"/#%[]=:;$&()+,!?*@'~") path = path[:e.start] + force_bytes(repercent) + path[e.end:] else: return path def filepath_to_uri(path): """Convert a file system path to a URI portion that is suitable for inclusion in a URL. Encode certain chars that would normally be recognized as special chars for URIs. Do not encode the ' character, as it is a valid character within URIs. See the encodeURIComponent() JavaScript function for details. """ if path is None: return path # I know about `os.sep` and `os.altsep` but I want to leave # some flexibility for hardcoding separators. return quote(path.replace("\\", "/"), safe="/~!*()'") def get_system_encoding(): """ The encoding of the default system locale. Fallback to 'ascii' if the #encoding is unsupported by Python or could not be determined. See tickets #10335 and #5846. """ try: encoding = locale.getdefaultlocale()[1] or 'ascii' codecs.lookup(encoding) except Exception: encoding = 'ascii' return encoding DEFAULT_LOCALE_ENCODING = get_system_encoding()
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/utils/encoding.py
encoding.py
""" Django's standard crypto functions and utilities. """ import hashlib import hmac import random import time from .encoding import force_bytes # from yzcore.core.default_settings import get_settings # # settings = get_settings() # Use the system PRNG if possible try: random = random.SystemRandom() using_sysrandom = True except NotImplementedError: import warnings warnings.warn('A secure pseudo-random number generator is not available ' 'on your system. Falling back to Mersenne Twister.') using_sysrandom = False def get_random_string(length=12, allowed_chars='abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'): """ Return a securely generated random string. The default length of 12 with the a-z, A-Z, 0-9 character set returns a 71-bit value. log_2((26+26+10)^12) =~ 71 bits """ # if not using_sysrandom: # # This is ugly, and a hack, but it makes things better than # # the alternative of predictability. This re-seeds the PRNG # # using a value that is hard for an attacker to predict, every # # time a random string is required. This may change the # # properties of the chosen random sequence slightly, but this # # is better than absolute predictability. # random.seed( # hashlib.sha256( # ('%s%s%s' % (random.getstate(), time.time(), settings.SECRET_KEY)).encode() # ).digest() # ) return ''.join(random.choice(allowed_chars) for i in range(length)) def constant_time_compare(val1, val2): """Return True if the two strings are equal, False otherwise.""" return hmac.compare_digest(force_bytes(val1), force_bytes(val2)) def pbkdf2(password, salt, iterations, dklen=0, digest=None): """Return the hash of password using pbkdf2.""" if digest is None: digest = hashlib.sha256 dklen = dklen or None password = force_bytes(password) salt = force_bytes(salt) return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dklen)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/utils/crypto.py
crypto.py
import asyncio from concurrent import futures from socket import AF_INET from typing import TypeVar, Union, Optional, List, Dict, AnyStr try: import json as _json except (ImportError, ModuleNotFoundError): import json as _json try: import aiohttp from pydantic import BaseModel, Field except (ImportError, ModuleNotFoundError): AioHttpParams = List[Dict[str, Union[str, int, Dict[str, Union[str, int]]]]] else: class AioHttpParams(BaseModel): method: AnyStr url: AnyStr params: Optional[Dict] data: Optional[Dict] json_: Optional[Dict] = Field(alias='json') headers: Optional[Dict] timeout: Optional[int] RequestParams = TypeVar("RequestParams", bound=AioHttpParams) SIZE_POOL_AIOHTTP = 100 CONCURRENCY = 100 # 限制并发量为1024 TIMEOUT_KEEP_ALIVE = 5 # 取决于用的web服务器设置的tcp keepalive时长,uvicorn默认5秒 class AioHTTP: """ 注意: 在FastAPI中使用时,在事件处理的勾子中全局设置 AioHTTP 的开启和关闭: app = FastAPI( on_startup=[AioHTTP.on_startup], on_shutdown=[AioHTTP.on_shutdown] ) """ semaphore: asyncio.Semaphore = asyncio.Semaphore(CONCURRENCY) session: aiohttp.ClientSession = None # def __init__(self, cookies=None, json_serialize=_json.dumps): # self.session = aiohttp.ClientSession( # cookies=cookies, # json_serialize=json_serialize # ) @classmethod def get_session(cls, cookies=None, json_serialize=_json.dumps) -> aiohttp.ClientSession: if cls.session is None or cls.session.closed: timeout = aiohttp.ClientTimeout(total=2) connector = aiohttp.TCPConnector( family=AF_INET, limit_per_host=SIZE_POOL_AIOHTTP, keepalive_timeout=TIMEOUT_KEEP_ALIVE ) cls.session = aiohttp.ClientSession( timeout=timeout, connector=connector, cookies=cookies, json_serialize=json_serialize, ) return cls.session @classmethod async def get(cls, url, params=None, data=None, json=None, headers=None, timeout=30, **kwargs): """异步GET请求""" return await cls.fetch( 'get', url, params, data, json, headers, timeout, **kwargs) @classmethod async def post(cls, url, params=None, data=None, json=None, headers=None, timeout=30, **kwargs): """异步POST请求""" return await cls.fetch( 'post', url, params, data, json, headers, timeout, **kwargs) @classmethod async def put(cls, url, params=None, data=None, json=None, headers=None, timeout=30, **kwargs): """异步PUT请求""" return await cls.fetch( 'put', url, params, data, json, headers, timeout, **kwargs) @classmethod async def patch(cls, url, params=None, data=None, json=None, headers=None, timeout=30, **kwargs): """异步PATCH请求""" return await cls.fetch( 'patch', url, params, data, json, headers, timeout, **kwargs) @classmethod async def delete(cls, url, params=None, data=None, json=None, headers=None, timeout=30, **kwargs): """异步DELETE请求""" return await cls.fetch( 'delete', url, params, data, json, headers, timeout, **kwargs) @classmethod async def fetch( cls, method: str, url: str, params=None, data=None, json=None, headers=None, timeout=30, is_close_sesion: bool = False, **kwargs ): """ 公共请求调用方法 :param method: 请求方法 :param url: 请求路由 :param params: 请求参数 :param data: 请求的Form表单参数 :param json: 请求的Json参数 :param headers: 请求头参数 :param timeout: 超时时间 :param is_close_sesion: 是否关闭Session :return: """ client_session = cls.get_session() __request = getattr(client_session, method.lower()) if params: params = {key: str(value) for key, value in params.items() if value is not None} async with cls.semaphore: try: async with __request( url, params=params, data=data, json=json, headers=headers, timeout=timeout, **kwargs ) as response: if response.content_type == 'application/json': result = await response.json() elif response.content_type == 'text/plain': result = await response.text() # elif response.content_type == 'application/octet=stream': # result = await response.read() else: result = await response.read() except Exception as e: import traceback traceback.print_exc() return {'detail': e}, 500 else: if is_close_sesion: await cls.session.close() return result, response.status @classmethod async def bulk_request(cls, querys: List[RequestParams]): """ 异步批量请求 :param querys: [ {'method': 'get', 'url': 'http://httpbin.org/get', 'params': {'key': 'value{}'.format(1)}}, {'method': 'get', 'url': 'http://httpbin.org/get', 'params': {'key': 'value{}'.format(2)}}, {'method': 'get', 'url': 'http://httpbin.org/get', 'params': {'key': 'value{}'.format(3)}}, {'method': 'get', 'url': 'http://httpbin.org/get', 'params': {'key': 'value{}'.format(4)}}, {'method': 'get', 'url': 'http://httpbin.org/get', 'params': {'key': 'value{}'.format(5)}}, {'method': 'post', 'url': 'http://httpbin.org/post', 'json': {'key': 'value{}'.format(6)}}, {'method': 'post', 'url': 'http://httpbin.org/post', 'json': {'key': 'value{}'.format(7)}}, {'method': 'post', 'url': 'http://httpbin.org/post', 'json': {'key': 'value{}'.format(8)}}, {'method': 'post', 'url': 'http://httpbin.org/post', 'json': {'key': 'value{}'.format(9)}}, {'method': 'post', 'url': 'http://httpbin.org/post', 'json': {'key': 'value{}'.format(10)}}, ] :return: """ tasks = [asyncio.ensure_future(cls.fetch(**kw)) for kw in querys] responses = await asyncio.gather(*tasks) return responses @classmethod async def close(cls): if cls.session: await cls.session.close() @classmethod async def on_startup(cls): cls.get_session() @classmethod async def on_shutdown(cls): await cls.close() def request(method: str, url: str, params=None, data=None, json=None, headers=None, timeout=30): # _func = getattr(AioHTTP, method.lower()) loop = asyncio.get_event_loop() result = loop.run_until_complete( AioHTTP.fetch( method, url, params, data, json, headers, timeout, is_close_sesion=True) ) return result if __name__ == '__main__': resp = request('get', 'http://httpbin.org/get') print(111, resp)
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/request/aio_http.py
aio_http.py
# readme-template -------------- ## Introduction yzcore 目的为了开发后端服务时,提供一种代码结构规范参考。 可以通过`startproject`和`startapp`两个命令快速创建工程和内部的接口应用模块。 **安装模块** ```shell $ pip install yz-core ``` 示例: - 创建工程: ```shell $ yzcore startproject myproject ``` - 创建工程内部应用: ```shell $ yzcore startapp myapp ./src/apps/ ``` 代码结构介绍: ``` . ├── docs 说明文档、接口文档等文档的存放目录 ├── migrations 数据表迁移文件存放目录 ├── src │   ├── apps 接口应用程序的主目录 │   │   ├── __init__.py │   │   ├── myapp01 │   │   │   ├── __init__.py │   │   │   ├── controllers.py 控制层:封装数据交互操作 │   │   │   ├── models.py 模型层:实现数据表与模型的定义 │   │   │   ├── schemas.py 模式层:定义接口数据参数 │   │   │   ├── tests.py 测试文件 │   │   │   └── views.py 视图层:接口定义层 │   │   └── myapp02 │   ├── conf 配置文件的存放目录 │   ├── const 公共常量存放目录 │   ├── tests 测试文件的存放目录 │   ├── main.py 程序的入口文件 │   ├── settings.py 程序的设置文件 │   └── utils 抽离出的公共代码模块存放目录 ├── .gitignore ├── requirements.txt └── README.md ``` ## Quick start Quick Start 部分主要包括两部分内容:简易的安装部署说明(Deployment)和使用案例(Example)。特别是对于一些基础库,必须包括Example模块。 ## Documentation Documentation 部分是核心的文档,对于大型项目可以使用超链接,如使用以下这种形式: For the full story, head over to the [documentation](https://git.k8s.io/community/contributors/devel#readme). ## 数据库迁移操作 ``` # pip install alembic alembic init migrations # 创建迁移环境 alembic revision --autogenerate -m "commit content" # 自动生成迁移文件 alembic upgrade head # 升级到最近版本 alembic upgrade <revision_id> # 升级到指定版本 alembic downgrade <revision_id> # 回退到指定版本 ```
yz-core
/yz-core-0.2.1.tar.gz/yz-core-0.2.1/yzcore/templates/project_template/README.md
README.md
# readme-template -------------- ## Introduction yzcore 目的为了开发后端服务时,提供一种代码结构规范参考。 可以通过`startproject`和`startapp`两个命令快速创建工程和内部的接口应用模块。 **安装模块** ```shell $ pip install yz-core2 ``` 示例: - 创建工程: ```shell $ yzcore startproject myproject ``` - 创建工程内部应用: ```shell $ yzcore startapp myapp ./src/apps/ ``` 代码结构介绍: ``` . ├── docs 说明文档、接口文档等文档的存放目录 ├── migrations 数据表迁移文件存放目录 ├── src │   ├── apps 接口应用程序的主目录 │   │   ├── __init__.py │   │   ├── myapp01 │   │   │   ├── __init__.py │   │   │   ├── controllers.py 控制层:封装数据交互操作 │   │   │   ├── models.py 模型层:实现数据表与模型的定义 │   │   │   ├── schemas.py 模式层:定义接口数据参数 │   │   │   ├── tests.py 测试文件 │   │   │   └── views.py 视图层:接口定义层 │   │   └── myapp02 │   ├── conf 配置文件的存放目录 │   ├── const 公共常量存放目录 │   ├── tests 测试文件的存放目录 │   ├── main.py 程序的入口文件 │   ├── settings.py 程序的设置文件 │   └── utils 抽离出的公共代码模块存放目录 ├── .gitignore ├── requirements.txt └── README.md ``` ## Quick start Quick Start 部分主要包括两部分内容:简易的安装部署说明(Deployment)和使用案例(Example)。特别是对于一些基础库,必须包括Example模块。 ## Documentation Documentation 部分是核心的文档,对于大型项目可以使用超链接,如使用以下这种形式: For the full story, head over to the [documentation](https://git.k8s.io/community/contributors/devel#readme). ## 数据库迁移操作 ``` # pip install alembic alembic init migrations # 创建迁移环境 alembic revision --autogenerate -m "commit content" # 自动生成迁移文件 alembic upgrade head # 升级到最近版本 alembic upgrade <revision_id> # 升级到指定版本 alembic downgrade <revision_id> # 回退到指定版本 ```
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/README.md
README.md
import os try: import yaml except: yaml = None from pydantic import BaseSettings, AnyUrl from yzcore.utils import get_random_secret_key class DefaultSetting(BaseSettings): __cml__ = {} def __init_subclass__(cls, **kwargs): """""" super().__init_subclass__() reload_reload_settings(cls()) class Config: case_sensitive = False # 是否区分大小写 DEBUG: bool = True API_V1_STR: str = "/api/v1" SECRET_KEY: str = get_random_secret_key() DB_URI: str = None ID_URL: AnyUrl = None GENERATE_UUID_PATH: str = '/uuid/generate/' EXPLAIN_UUID_PATH: str = '/uuid/explain/' TRANSLATE_PATH: str = '/uuid/translate/' MAKE_UUID_PATH: str = '/uuid/make/' STORAGE_CONF: dict = None default_setting = DefaultSetting() def reload_reload_settings(instance): settings = default_setting for k, v in settings.__fields__.items(): val = getattr(instance, k) setattr(settings, k, val) def get_configer(ext: str = "ini", import_path=os.curdir): profile = os.environ.get('ENV_PROFILE', 'dev') if profile == 'production': configname = 'config_production' elif profile == 'testing': configname = 'config_testing' else: configname = 'config_dev' print(f"===>当前环境为:{profile}!导入的配置文件为:{configname}.{ext}") base_path = os.path.abspath(import_path) _path = os.path.join(base_path, "conf", f"{configname}.{ext}") print(_path) if ext in ["ini", "cfg"]: import configparser conf = configparser.ConfigParser() conf.read(_path) elif ext in ["yaml", "yml"]: assert yaml is not None, "Need to install PyYaml" conf = yaml.safe_load(open(_path)) else: raise AttributeError(f"暂不支持该文件格式: {ext}") return conf def get_ini_section_to_dict( section: str, exclude: set = None, conf_parser=None ): """ 获取ini配置文件某个节选的全部数据,转换成字典 :param section: 节选名称 :param exclude: 排除的字段 :param conf_parser: 配置解析器 :return: """ conf_dict = dict() for k in conf_parser.options(section): if exclude and k in exclude: break conf_dict[k] = conf.get(section, k) return conf_dict if __name__ == '__main__': conf = get_configer("ini")
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/default_settings.py
default_settings.py
from typing import Any from fastapi import HTTPException class NotFoundObject(HTTPException): def __init__(self, detail: Any = 'Not Found', headers: dict = None): super().__init__(status_code=404, detail=detail, headers=headers) class MultiObjects(HTTPException): def __init__(self, detail: Any = '', headers: dict = None): detail = '{}存在多个对象,不符合唯一性要求'.format(detail) super().__init__(status_code=500, detail=detail, headers=headers) class CreateObjectFailed(HTTPException): def __init__(self, detail: Any = 'Object create failed', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class UpdateObjectFailed(HTTPException): def __init__(self, detail: Any = 'Object update failed', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class NoObjectCreated(HTTPException): def __init__(self, detail: Any = 'No object was created', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class AlreadyExistObject(HTTPException): def __init__(self, detail: Any = 'Already Exist', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class RequestParamsError(HTTPException): def __init__(self, detail: Any = 'Incorrect request parameters', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class RequestParamsMissing(HTTPException): def __init__(self, detail: Any = 'Missing request parameters', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) class NoPermission(HTTPException): def __init__(self, detail: Any = 'Insufficient permissions', headers: dict = None): super().__init__(status_code=401, detail=detail, headers=headers) class Forbidden(HTTPException): def __init__(self, detail: Any = 'Access Denied', headers: dict = None): super().__init__(status_code=403, detail=detail, headers=headers) class UnknownError(HTTPException): def __init__(self, detail: Any = 'Unknown error', headers: dict = None): super().__init__(status_code=500, detail=detail, headers=headers) class StorageRequestError(HTTPException): """用于对象存储调用过程中的错误""" def __init__(self, detail: Any = 'ObjectStorage Service Error', headers: dict = None): super().__init__(status_code=400, detail=detail, headers=headers) if __name__ == '__main__': pass
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/exceptions.py
exceptions.py
import os import json import typing from starlette.datastructures import URL from starlette.background import BackgroundTask from fastapi import Response as _Response from fastapi.responses import ( HTMLResponse, PlainTextResponse, JSONResponse, UJSONResponse, ORJSONResponse, RedirectResponse, StreamingResponse, FileResponse ) class XMLResponse(_Response): media_type = "application/xml" responses = { "xml": XMLResponse, "html": HTMLResponse, "plain": PlainTextResponse, "json": JSONResponse, "ujson": UJSONResponse, "orjson": ORJSONResponse, "redirect": RedirectResponse, "stream": StreamingResponse, "file": FileResponse, } def response( content: typing.Any = None, url: typing.Union[str, URL] = None, # RedirectResponse 重定向使用 path: str = None, # FileResponse filename: str = None, # FileResponse stat_result: os.stat_result = None, # FileResponse method: str = None, # FileResponse status_code: int = 200, headers: dict = None, media_type: str = None, background: BackgroundTask = None, mtype: str = "orjson" ): if 'json' in mtype: content = render_data(data=content) elif mtype in ['plain', 'xml', 'html'] and not isinstance(content, str): content = json.dumps(content) kwargs = dict( content=content, url=url, path=path, filename=filename, stat_result=stat_result, method=method, status_code=status_code, headers=headers, media_type=media_type, background=background, ) kwargs = {k: v for k, v in kwargs.items() if v is not None} _response_cls = responses.get(mtype) return _response_cls(**kwargs) def render_data(data=None, code=10000, message='Successfully.', limit: int = 10, offset: int = 0, total: int = 0): if data is None: return dict( code=code, message=message, info=dict(), list=dict( data=[], pagination=False ) ) if isinstance(data, list): result = dict( code=code, message=message, info=dict(), list=dict( data=data, pagination=dict( limit=limit, offset=offset, total=total ) ) ) # elif isinstance(data, Container): else: result = dict( code=code, message=message, info=data, list=dict( data=[], pagination=False ) ) return result
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/response/response.py
response.py
from enum import Enum, unique __all__ = ("RegisterCode", "ErrorCode", "TipsCode", "WebsocketCode") # 状态码注册,请同时添加注释 RegisterCode = [ # 正常码 200000, # 正常,请求处理成功 # 系统级错误代码 500000, # 服务器内部错误,无法完成请求 500001, # 服务器不支持请求的功能,无法完成请求 500002, # 无效的响应 500003, # 由于超载或系统维护,服务器暂时的无法处理客户端的请求 500004, # 服务器超时 500005, # HTTP协议的版本不支持 500006, # 兼容旧代码异常 # 接口级错误代码 400000, # 请求语法或参数有误 400001, # 未认证 400002, # 400003, # 请求太快 400004, # 找不到对象 400005, # 请求不允许 400006, # 请求不合理 400007, # 400008, # 超时 400009, # 更新冲突 400010, # 资源已不存在 ## 账号 400100, # 账号相关请求错误 400101, # 重复登录 400103, # 用户登录失败 400104, # 账号不存在 400109, # 更新信息失败 400110, # 手机号码格式不正确 400111, # 手机短信验证码错误 400113, # 60s内不能重复发送短信 400118, # 手机短信发送失败 400119, # 手机号码已被注册 400120, # email格式不正确 400121, # email验证码错误 400123, # 60s内不能重复发送邮件 400126, # 非邀请邮箱 400127, # 已经发送过邀请,请15分钟后再邀请 400128, # email发送失败 400129, # email已被注册 400130, # 400140, # 密码格式不正确 400141, # 密码不一致 400143, # 密码错误 400150, # Token过期或失效 ## 组织 400200, # 组织相关请求错误 400209, # 已切换到其他团队,请关闭当前页面 400211, # 用户未加入任何组织 400212, # 用户已加入该组织 400213, # 用户不属于此企业 400223, # 组织已存在 400224, # 组织不存在 400229, # 组织名称已被使用 400233, # 成员已存在 400234, # 成员不存在 400239, # 无法删除默认成员 ## cms 400300, # node相关请求错误 400301, # 命名格式错误: 文件夹名字只支持中文, 数字, 字母或中划线, 且在50个字符以内 400302, # 命名重复 400303, # 文件夹已存在 400304, # 文件夹不存在 400305, # 最多只能创建四层文件夹 400306, # 文件已存在 400307, # 文件不存在 400308, # 请求超时 400309, # 400310, # 标签名字格式错误: 字符长度在1-6内 400311, # 标签已存在 400312, # 标签不存在 400313, # 标签名字不能重复 400314, # 400320, # 项目已存在 400321, # 项目不存在 400322, # 400323, # 400324, # 400330, # 分组---------- 400331, # 400332, # 400340, # 属性---------- 400341, # 400342, # 400343, # 400380, # limit超出限制 400381, # 搜索字段不存在 400382, # 排序字段不存在 ## 权限 400400, # 权限相关请求错误 400401, # 权限不足 400402, # 权限创建失败 400403, # 400404, # 找不到该权限 400405, # 400408, # 权限查询超时 ## editor 400500, # 编辑器相关请求错误 400510, # 模型名称不能超过50个字符 400511, # 模型描述不能超过100个字符 400512, # 模型组合不存在 400513, # 400514, # 模型不存在 400515, # 400516, # 400520, # 材质库类型不存在 400524, # 材质不存在 400525, # 400530, # 天空盒类型不存在 400534, # 天空盒不存在 400540, # 模板场景不存在 400544, # 场景不存在 400550, # 父级渲染对象不存在 400554, # 渲染对象不存在 400560, # 父级事件组不存在 400564, # 组件不存在 400570, # 父级资源不存在 400573, # 不支持的资源类型 400574, # 资源不存在 400576, # 资源内容不存在 400584, # 事件不存在 ## 任务 400600, # Job相关请求错误 400604, # 任务不存在 400608, # 任务失败 400610, # 调用upload_policy参数有误 400614, # 对应upload_policy方法不存在 400620, # 调用cloud返回错误 400630, # 发布失败 ## 应用 400700, # 应用相关请求错误 400704, # 应用不存在 400710, # platform不存在 400720, # 域名已经存在 ## 支付 400800, # 支付相关请求错误 ] @unique class ErrorCode(Enum): # 系统级错误代码 UserNotLogin = dict(code=-10007, desc="用户尚未登录") RequestParamInvalid = dict(code=-10008, desc="参数json内容格式不正确") # 接口级错误代码 UserNameAlreadyExists = dict(code=40001, desc="用户名已被注册") MaterialLibTypeNotExists = dict(code=40002, desc="材质库类型不存在") AccessPermissionDenied = dict(code=40003, desc="访问权限不足") SpaceNotExists = dict(code=40004, desc="空间不存在或已删除") FolderNameFormatInvalid = dict( code=40005, desc="文件夹名字格式错误: 文件夹名字只支持中文, 数字, 字母或下划线, 且在50个字符以内" ) FolderNameConflict = dict(code=40006, desc="文件夹名字不能重复") FolderAlreadyExists = dict(code=40007, desc="文件夹已存在") FolderLevelLimited = dict(code=40008, desc="最多只能创建四层文件夹") DstDirFolderNotExists = dict(code=40009, desc="目标目录文件夹不存在") DirFolderNotExists = dict(code=40010, desc="当前目录文件夹不存在") FileNotExists = dict(code=40011, desc="文件不存在") FileNameConflict = dict(code=40012, desc="文件名字不能重复") FileNameFormatInvalid = dict(code=40013, desc="文件名字格式错误: 长度在1-50之间") CategoryTokenInvalid = dict(code=40014, desc="分类设置口令错误") CategoryParentPathNotExists = dict(code=40015, desc="分类父路径不存在") CategoryFormatInvalid = dict(code=40016, desc="分类名格式无效") TagNameFormatInvalid = dict(code=40017, desc="标签名字格式错误: 字符长度在1-6内") TagAlreadyExists = dict(code=40018, desc="标签已存在") TagNotExists = dict(code=40019, desc="标签不存在") TagNameConflict = dict(code=40020, desc="标签名字不能重复") TemplateNotExists = dict(code=40021, desc="空间自定义字段模板不存在") ModelNotExists = dict(code=40022, desc="模型不存在") AppCatalogNotExists = dict(code=40023, desc="目录方案不存在") CategoryAlreadyExists = dict(code=40024, desc="分类已存在") AppSpaceNotExists = dict(code=40025, desc="空间项目不存在") AppSolutionsNotFound = dict(code=40026, desc="app方案不存在") ProductionNotFound = dict(code=40027, desc="production不存在") PageSizeOverflow = dict(code=40028, desc="limit超出限制") CabinetNotFound = dict(code=40029, desc="cabinet不存在") OperationPermissionDenied = dict(code=40030, desc="未拥有此权限或操作权限不足") InvalidAccessToken = dict(code=40031, desc="invalid access token") UserNotFound = dict(code=40032, desc="用户不存在") ModelDescLengthOverflow = dict(code=40033, desc="模型描述不能超过100个字符") ModelNameLengthOverflow = dict(code=40034, desc="模型名称不能超过50个字符") CategoryNotExists = dict(code=40035, desc="分类不存在") ModelGroupNotExists = dict(code=40036, desc="模型组合不存在") ProductionNotExists = dict(code=40037, desc="production不存在") JobNotFound = dict(code=40038, desc="任务不存在") JobFailed = dict(code=40039, desc="任务失败") MaterialNotFound = dict(code=40040, desc="材质不存在") InvalidMobile = dict(code=40041, desc="手机号码格式不正确") InvalidOperationType = dict(code=40042, desc="非法操作类型") SmsSendFailed = dict(code=40043, desc="短信发送失败") CaptchaError = dict(code=40044, desc="验证码错误") InvalidUsernameOrPassword = dict(code=40045, desc="请输入正确的账号密码") PlatformNotFound = dict(code=40046, desc="platform不存在") WechatResponseError = dict(code=40047, desc="微信返回错误") SpaceNameAlreadyExists = dict(code=40048, desc="空间名称已被使用") InvalidEmail = dict(code=40049, desc="email格式不正确") InvalidUploadPolicy = dict(code=40050, desc="对应upload_policy方法不存在") InvalidPassword = dict(code=40051, desc="密码格式不正确") MobileAlreadyExists = dict(code=40052, desc="手机号码已注册") InvalidCabinetName = dict(code=40053, desc="cabinet名称格式不正确") CabinetNameAlreadyExisted = dict(code=40054, desc="cabinet名称已被使用") InvalidCallback = dict(code=40055, desc="invalid callback token") CallCloudError = dict(code=40056, desc="调用cloud返回错误") InvalidProductionName = dict(code=40057, desc="非法的产品名称") AppHomeCatalogNotExisted = dict(code=40058, desc="未设置首页方案") ClientNotFound = dict(code=40059, desc="客户端不存在") ClientChannelNotExists = dict(code=40060, desc="客户端渠道不存在") HomeSolutionsCannotDelete = dict(code=40061, desc="主页方案不能删除") InvalidRatio = dict(code=40062, desc="不正确的比例值") HostAlreadyExists = dict(code=40063, desc="域名已被使用") ProductNotFound = dict(code=40064, desc="产品不存在") SkyboxNotFound = dict(code=40065, desc="天空盒不存在") SkyboxLibTypeNotExists = dict(code=40066, desc="天空盒类型不存在") ApplicationNotExist = dict(code=40067, desc="应用不存在") UserLoginFailed = dict(code=40068, desc="用户登录失败") AccountUpdateFailed = dict(code=40069, desc="更新账号信息失败") SceneNotFound = dict(code=40070, desc="场景不存在") ParentEntityNotFound = dict(code=40071, desc="父级渲染对象不存在") EntityNotFound = dict(code=40072, desc='渲染对象不存在') ComponentNotFound = dict(code=40073, desc='组件不存在') ResourceNotFound = dict(code=40074, desc='资源不存在') ParentResourceNotFound = dict(code=40075, desc='父级资源不存在') TemplateSceneNotFound = dict(code=40076, desc='模板场景不存在') MediaNotFound = dict(code=40077, desc='资源内容不存在') EventNotFound = dict(code=40078, desc='事件不存在') ParentEventNotFound = dict(code=40079, desc='父级事件不存在') ModelIsNotPublic = dict(code=40080, desc='模型处于非公开状态') ModelIsLocked = dict(code=40081, desc='模型已被加密,请输入密码') ModelPasswordWrong = dict(code=40082, desc='模型密码错误') CollectionNotFound = dict(code=40083, desc='集合不存在') CollectionIsNotPublic = dict(code=40084, desc='集合未公开') CollectionIsLocked = dict(code=40085, desc='集合已被加密,请输入密码') CollectionPasswordWrong = dict(code=40086, desc='集合密码错误') TokenIsWrong = dict(code=40087, desc='Token过期或无效') NotInvitedEmail = dict(code=40088, desc='非邀请邮箱') RoleNotFound = dict(code=40089, desc='角色不存在') CannotDeleteDefaultRole = dict(code=40090, desc='无法删除默认角色') CatalogNotFound = dict(code=40091, desc='3D图册不存在') CorpNotFound = dict(code=40092, desc='企业不存在') RoleNameExist = dict(code=40093, desc='角色名已存在') UserIsInTeam = dict(code=40094, desc='用户已被邀请或已加入团队') UserNotInSpace = dict(code=40095, desc='未加入任何团队') InvitedEmailHasBeenSent = dict(code=40096, desc='已经发送过邀请,请15分钟后再邀请') ResetEmailHasBeenSent = dict(code=40097, desc='60s内不能重复发送邮件') TagNotFound = dict(code=40098, desc='标签不存在') UpdateUserInfoFail = dict(code=40099, desc='更新用户信息失败') DomainIsExist = dict(code=40100, desc='域名已经存在') UserNotInCorp = dict(code=40101, desc='用户不属于此企业') SearchFieldNotExist = dict(code=40102, desc='搜索字段不存在') SortFieldNotExist = dict(code=40103, desc='排序字段不存在') PublishFailed = dict(code=40104, desc='发布失败,服务器处理出错') EmailFormatError = dict(code=40105, desc='邮箱格式不正确') InstanceNotExists = dict(code=40106, desc='不存在对象') GroupNotExists = dict(code=40109, desc='群组不存在') SpaceIsChanged = dict(code=40110, desc='已切换到其他团队,请关闭当前页面') NoCorpSpace = dict(code=40111, desc='用户无企业空间') TypeNotSupport = dict(code=40112, desc='不支持的资源类型') ButtonNotFound = dict(code=40113, desc='按钮不存在') ProjectNotFound = dict(code=40114, desc='项目不存在') SceneNotInSameProject = dict(code=40115, desc='场景不在同一个项目') EventIsExist = dict(code=40116, desc='事件已存在') AccountNotExist = dict(code=40117, desc='账号不存在') CaptchaAlreadyExpired = dict(code=40118, desc='验证码已过期') TooManyRequest = dict(code=40119, desc='请求过于频繁') UserNotInvited = dict(code=40120, desc='您未被邀请加入,请联系系统管理员') ModelNumberNotEnough = dict(code=40121, desc='模型数量不足') ProjectlNumberNotEnough = dict(code=40122, desc='项目数量不足') UserAlreadyInvited = dict(code=40123, desc='用户已被邀请') UrlAlreadyExpired = dict(code=40124, desc='重置链接已过期') NotBeInvited = dict(code=40125, desc='未收到邀请或链接地址错误') PasswordErrorOverTimes = dict(code=40127, desc='错误次数超过5次,请明天再尝试') SpaceIsRemoved = dict(code=40128, desc='团队权限被删除') NeedCaptcha = dict(code=40129, desc='首次登陆需要验证码') MacAlreadyExist = dict(code=40130, desc='终端地址已存在') BeKickedOutFromTeam = dict(code=40131, desc='您已被移出团队,请联系管理员') UnauthorizedOrTimeout = dict(code=40132, desc='未授权或授权过期') UIPackInvalid = dict(code=40133, desc='UI数据包无效') UIIconPackInvalid = dict(code=40134, desc='ui icon 包无效') OnlyOnePageNotDelete = dict(code=40134, desc='只剩一个页面,不能删除') IconPackMustZip = dict(code=40134, desc='icon 包必须是zip格式压缩包') GetStaticResourceDataError = dict(code=40135, desc='服务器获取发布数据错误') NotFoundScenePublishData = dict(code=40135, desc='找不到场景发布数据') CapacityNotEnough = dict(code=40135, desc='容量不足') PasswordWrong = dict(code=40136, desc='密码错误') # 运营后台 NotAuthenticatedUser = dict(code=-40001, desc='未认证用户,请重新登录') NotVisitedPermission = dict(code=-40002, desc='你没有访问权限') NotUpdatedPermission = dict(code=-40003, desc='你没有修改权限') PermissionDenied = dict(code=-40004, desc='你没有对应的操作权限权限') GroupDeleteFailedForManagerExisted = dict(code=-40005, desc='当前群组里有成员,不能删除当前群组') GroupNameExisted = dict(code=-40006, desc='当前群组名已经存在') PasswordNotSame = dict(code=-40007, desc='两次输入的密码不一致') HasExistsAccount = dict(code=-40008, desc='当前账号已存在') # 重新刷新页面 NotSpaceReFlashPage = dict(code=-50001, desc='不存在空间,请刷新') # 兼容下-10000错误码的提示 AttributeNameAlreadyExist = dict(code=-10000, desc='该属性名已经存在') DuplicateAttributeValue = dict(code=-10000, desc='属性值重复') FileCopyFailure = dict(code=-10000, desc='不能将文件复制到自身或其子目录下') FileMoveFailure = dict(code=-10000, desc='不能将文件移动到自身或其子目录下') FolderHasDeleteFileMoveOrCopyFailure = dict(code=-10000, desc='包含转换失败的文件,不能移动或者复制,请先删除转换失败的文件') FolderAlreadyExist = dict(code=-10000, desc='已经存在该文件夹') ParentFolderNotExist = dict(code=-10000, desc='父文件不存在') # 开放平台 InvalidSpaceUserApiToken = dict(code=-70001, desc='无效的空间用户API token') SpaceNotAuthApplication = dict(code=-70002, desc='该用户空间未授权该应用') NotInternalSpaceAppUser = dict(code=-70003, desc='不是内部应用空间用户') AlreadyAddApplication = dict(code=-70004, desc='您已经添加了该应用') ApplicationNotPublishOrDeleted = dict(code=-70005, desc='应用未发布或被删除') ApplicationNotBelowCurrentSpace = dict(code=-70005, desc='该内部应用不属于当前空间') OpenServerError = dict(code=-70100, desc='open server invoke fail') CheckRepeat = dict(code=-99006, desc="重复登录错误") # cml--> 重复登录 @unique class EnErrorCode(Enum): # 英文版 # 系统级错误代码 UserNotLogin = dict(code=-10007, desc="User not logged in") RequestParamInvalid = dict(code=-10008, desc="Request param invalid") # 接口级错误代码 UserNameAlreadyExists = dict(code=40001, desc="Username is already registered") MaterialLibTypeNotExists = dict(code=40002, desc="Material lib type not exists") AccessPermissionDenied = dict(code=40003, desc="Access permission denied") SpaceNotExists = dict(code=40004, desc="Space does not exist") DstDirFolderNotExists = dict(code=40009, desc="Destination directory folder does not exist") DirFolderNotExists = dict(code=40010, desc="Current directory folder does not exist") TagAlreadyExists = dict(code=40018, desc="Tag already exist") TagNotExists = dict(code=40019, desc="Tag dose noet exist") TemplateNotExists = dict(code=40021, desc="Space custom field template does not exist") ModelNotExists = dict(code=40022, desc="Model does not exist") ProductionNotFound = dict(code=40027, desc="Production dose not exist") PageSizeOverflow = dict(code=40028, desc="Exceeding the limit") CabinetNotFound = dict(code=40029, desc="Cabinet does not exist") OperationPermissionDenied = dict(code=40030, desc="Permission denied") InvalidAccessToken = dict(code=40031, desc="invalid access token") UserNotFound = dict(code=40032, desc="User does not exist") ModelDescLengthOverflow = dict(code=40033, desc="Model description cannot exceed 100 characters") ModelNameLengthOverflow = dict(code=40034, desc="Model name cannot exceed 50 characters") CategoryNotExists = dict(code=40035, desc="Category does not exist") ModelGroupNotExists = dict(code=40036, desc="Model combination does not exist") ProductionNotExists = dict(code=40037, desc="Production dose not exist") JobNotFound = dict(code=40038, desc="Job dose not exist") JobFailed = dict(code=40039, desc="Job failure") MaterialNotFound = dict(code=40040, desc="Material not found") InvalidMobile = dict(code=40041, desc="Invalid mobile") InvalidOperationType = dict(code=40042, desc="Invalid operation type") SmsSendFailed = dict(code=40043, desc="Sms send failed") CaptchaError = dict(code=40044, desc="Captcha error") InvalidUsernameOrPassword = dict(code=40045, desc="Invalid account or password") PlatformNotFound = dict(code=40046, desc="platform not found") WechatResponseError = dict(code=40047, desc="Wechat response error") SpaceNameAlreadyExists = dict(code=40048, desc="Space name Already exist") InvalidEmail = dict(code=40049, desc="Invalid email") InvalidUploadPolicy = dict(code=40050, desc="Invalid upload policy") InvalidPassword = dict(code=40051, desc="Invalid password") MobileAlreadyExists = dict(code=40052, desc="Mobile number registered") InvalidCabinetName = dict(code=40053, desc="Invalid cabinet name") CabinetNameAlreadyExisted = dict(code=40054, desc="Cabinet already existed") InvalidCallback = dict(code=40055, desc="invalid callback token") CallCloudError = dict(code=40056, desc="Call cloud error") InvalidProductionName = dict(code=40057, desc="Invalid production name") ClientNotFound = dict(code=40059, desc="Client not found") ClientChannelNotExists = dict(code=40060, desc="Client channel not found") InvalidRatio = dict(code=40062, desc="Invalid ratio") HostAlreadyExists = dict(code=40063, desc="Domain name is already in use") ProductNotFound = dict(code=40064, desc="Product not found") SkyboxNotFound = dict(code=40065, desc="Skybox not found") SkyboxLibTypeNotExists = dict(code=40066, desc="Skybox lib type not found") ApplicationNotExist = dict(code=40067, desc="Application not found") UserLoginFailed = dict(code=40068, desc="User login failed") AccountUpdateFailed = dict(code=40069, desc="Account update failed") SceneNotFound = dict(code=40070, desc="Scene not found") ParentEntityNotFound = dict(code=40071, desc="Parent entity not found") EntityNotFound = dict(code=40072, desc='Entity not found') ComponentNotFound = dict(code=40073, desc='Component not found') ResourceNotFound = dict(code=40074, desc='Resource not found') ParentResourceNotFound = dict(code=40075, desc='Parent resource not found') TemplateSceneNotFound = dict(code=40076, desc='Template scene not found') MediaNotFound = dict(code=40077, desc='Media not found') EventNotFound = dict(code=40078, desc='Event not found') ParentEventNotFound = dict(code=40079, desc='Parent event not found') ModelIsNotPublic = dict(code=40080, desc='Model is not public') ModelIsLocked = dict(code=40081, desc='Model is locked, please imput password') ModelPasswordWrong = dict(code=40082, desc='Model password wrong') CollectionNotFound = dict(code=40083, desc='Collection not found') CollectionIsNotPublic = dict(code=40084, desc='Collection is not public') CollectionIsLocked = dict(code=40085, desc='Collection is locked, please imput password') CollectionPasswordWrong = dict(code=40086, desc='Collection password wrong') TokenIsWrong = dict(code=40087, desc='Invalid token') NotInvitedEmail = dict(code=40088, desc='Non invitation email') RoleNotFound = dict(code=40089, desc='Role not found') CannotDeleteDefaultRole = dict(code=40090, desc='Cannot delete default role') CorpNotFound = dict(code=40092, desc='Enterprise does not exist') RoleNameExist = dict(code=40093, desc='Role name already exists') UserIsInTeam = dict(code=40094, desc='User has been invited or joined the team') UserNotInSpace = dict(code=40095, desc='User not joined team') InvitedEmailHasBeenSent = dict(code=40096, desc='Invitation has been sent, please invite again in 15 minutes') ResetEmailHasBeenSent = dict(code=40097, desc='Can not send mail repeatedly within 60s') TagNotFound = dict(code=40098, desc='Label does not exist') UpdateUserInfoFail = dict(code=40099, desc='Failed to update user information') DomainIsExist = dict(code=40100, desc='Domain name already exists') UserNotInCorp = dict(code=40101, desc='User does not belong to this enterprise') SearchFieldNotExist = dict(code=40102, desc='Search field does not exist') SortFieldNotExist = dict(code=40103, desc='Sort field does not exist') PublishFailed = dict(code=40104, desc='Publishing failed, server processing error') EmailFormatError = dict(code=40105, desc='Invalid email format') InstanceNotExists = dict(code=40106, desc='Object does not exist') GroupNotExists = dict(code=40109, desc='Group not exist') SpaceIsChanged = dict(code=40110, desc='Space is changed,please close the current page') TypeNotSupport = dict(code=40112, desc='The type not support') ButtonNotFound = dict(code=40113, desc='Button not found') ProjectNotFound = dict(code=40114, desc='Project not found') SceneNotInSameProject = dict(code=40115, desc='Scene not in same project') EventIsExist = dict(code=40116, desc='Event is exist') AccountNotExist = dict(code=40117, desc='Account dose not exist') CaptchaAlreadyExpired = dict(code=40118, desc='Captcha already expired') TooManyRequest = dict(code=40119, desc='Too many requests') UserNotInvited = dict(code=40120, desc='You are not invited to join, please contact your system administrator') ModelNumberNotEnough = dict(code=40121, desc='Model number not enouth') ProjectlNumberNotEnough = dict(code=40122, desc='Project number not enouth') UserAlreadyInvited = dict(code=40123, desc='User already invited') UrlAlreadyExpired = dict(code=40124, desc='Url already expired') NotBeInvited = dict(code=40125, desc='No invitation received or wrong link address') MacLimited = dict(code=40126, desc='The current device cannot enter the target space') PasswordErrorOverTimes = dict(code=40127, desc='Password more than 5 errors, please try again tomorrow') SpaceIsRemoved = dict(code=40128, desc='Space permissions deleted') NeedCaptcha = dict(code=40129, desc='First login requires verification code') MacAlreadyExist = dict(code=40130, desc='Terminal address already exists') BeKickedOutFromTeam = dict(code=40131, desc='You have been removed from the space. Please contact your system administrator') # 运营后台 NotAuthenticatedUser = dict(code=-40001, desc='未认证用户,请重新登录') NotVisitedPermission = dict(code=-40002, desc='你没有访问权限') NotUpdatedPermission = dict(code=-40003, desc='你没有修改权限') PermissionDenied = dict(code=-40004, desc='你没有对应的操作权限权限') GroupDeleteFailedForManagerExisted = dict(code=-40005, desc='当前群组里有成员,不能删除当前群组') GroupNameExisted = dict(code=-40006, desc='当前群组名已经存在') PasswordNotSame = dict(code=-40007, desc='两次输入的密码不一致') HasExistsAccount = dict(code=-40008, desc='当前账号已存在') # 重新刷新页面 NotSpaceReFlashPage = dict(code=-50001, desc='不存在空间,请刷新') NotInSpace = dict(code=-50001, desc='团队权限被删除') ClientMacLimited = dict(code=-50002, desc='终端无法访问') # 兼容下-10000错误码的提示 AttributeNameAlreadyExist = dict(code=-10000, desc='Attribute name already eist') DuplicateAttributeValue = dict(code=-10000, desc='Duplicate attribute value') FileCopyFailure = dict(code=-10000, desc='Files cannot be copied to themselves or their subdirectories') FileMoveFailure = dict(code=-10000, desc='Files cannot be move to themselves or their subdirectories') DirHasDeleteFileMoveOrCopyFailure = dict(code=-10000, desc='The file containing the conversion failure cannot be moved or copied. Please delete the conversion failure file first') FolderAlreadyExist = dict(code=-10000, desc='Folder already exist') ParentFolderNotExist = dict(code=-10000, desc='Parent folder not exist') # 开放平台 InvalidSpaceUserApiToken = dict(code=-70001, desc='无效的空间用户API token') SpaceNotAuthApplication = dict(code=-70002, desc='空间未授权该应用') NotInternalSpaceAppUser = dict(code=-70003, desc='不是内部应用空间用户') OpenServerError = dict(code=-70100, desc='open server invoke fail') CheckRepeat = dict(code=-99006, desc="Duplication error") # cml--> 重复登录 @unique class TipsCode(Enum): CheckUserName = dict(code=-99001, message="用户名错误") # 用户名检查相关 CheckMobile = dict(code=-99002, message="手机号码错误") # 手机号码检查相关 CheckPassword = dict(code=-99003, message="密码错误") # 密码检查相关 CheckParameter = dict(code=-99004, message="参数错误") # 参数检查相关 CheckCaptcha = dict(code=-99005, message="验证码错误") CheckRepeat = dict(code=-99006, message="重复登录错误") # cml--> 重复登录 PasswordNotSame = dict(code=40103, message='密码不一致') # 密码不一致 AccountAlreadyExists = dict(code=40104, message='已存在账户') # 已存在账户 GroupNotExists = dict(code=40105, message='群组不存在') # 群组不存在 InstanceNotExists = dict(code=40106, message="不存在对象") # 不存在对象 HasJoinGroup = dict(code=40106, message="已经加入该组") # 已经加入该组 RequiredParam = dict(code=40107, message="必填参数") # 必填参数 integerOutOfRange = dict(code=40108, message="整型数太大") # 整型数太大 ModelNotPublished = dict(code=40109, message='模型未发布') InviteNoPriceSet = dict(code=-30001, message="该空间当前没有购买任何套餐") InvitePriceSetOuttime = dict(code=-30002, message="该空间套餐已过期") InviteNoPriceSetItem = dict(code=-30003, message="该空间套餐不含邀请项") InviteOutPriceSetItemLimit = dict(code=-30004, message="该空间容量已满,请升级套餐") MeetingAlreadyOver = dict(code=-40001, message='会议已经结束') OutOfPeopleNum = dict(code=-40002, message='会议参与人数上限为10人,现在已经达到10人') ParticipantAlreadyDeleted = dict(code=-40003, message='您被会议管理人员移除会议,请联系会议发起人') NotInMeeting = dict(code=-40004, message='您未参加过此会议') MaxTwoReviewScene = dict(code=-40005, message='最多同时支持两个场景评审') MeetingNotOverYet = dict(code=-40006, message='会议尚未结束') @unique class EnTipsCode(Enum): CheckUserName = dict(code=-99001, message="Account error") # 用户名检查相关 CheckMobile = dict(code=-99002, message="Wrong mobile number") # 手机号码检查相关 CheckPassword = dict(code=-99003, message="Password error") # 密码检查相关 CheckParameter = dict(code=-99004, message="Parem error") # 参数检查相关 CheckCaptcha = dict(code=-99005, message="Captcha error") CheckRepeat = dict(code=-99006, message="Duplication error") # cml--> 重复登录 PasswordNotSame = dict(code=40103, message='Passwords are inconsistent') # 密码不一致 AccountAlreadyExists = dict(code=40104, message='Account already exist') # 已存在账户 GroupNotExists = dict(code=40105, message='Group does not exist') # 群组不存在 InstanceNotExists = dict(code=40106, message="Object does not exist") # 不存在对象 HasJoinGroup = dict(code=40106, message="Already joined the group") # 已经加入该组 RequiredParam = dict(code=40107, message="Required param") # 必填参数 integerOutOfRange = dict(code=40108, message="Integer too large") # 整型数太大 ModelNotPublished = dict(code=40109, message='Model not published') InviteNoPriceSet = dict(code=-30001, message="No packages are currently purchased for this space") InvitePriceSetOuttime = dict(code=-30002, message="The space package has expired") InviteNoPriceSetItem = dict(code=-30003, message="The space package does not include invitations") InviteOutPriceSetItemLimit = dict(code=-30004, message="This space is full, please upgrade the package") MeetingAlreadyOver = dict(code=-40001, message='The meeting is over') OutOfPeopleNum = dict(code=-40002, message='The maximum number of participants in the meeting is 10, now it has reached 10') ParticipantAlreadyDeleted = dict(code=-40003, message='You have been removed from the meeting by the meeting management. Please contact the meeting sponsor') NotInMeeting = dict(code=-40004, message='You have not attended this meeting') MaxTwoReviewScene = dict(code=-40005, message='Up to two scenario reviews are supported at the same time') MeetingNotOverYet = dict(code=-40006, message='The meeting is not over') class WebsocketCode: def __init__(self, **kwargs): username = kwargs.get('username') data = kwargs.get('data', {}) subject_name = kwargs.get('subject_name') object_name = kwargs.get('object_name') team_name = kwargs.get('team_name') self.JoinTwoMeeting = dict(code=-1001, message='您在其他地方加入了会议', data=dict(content=dict(zh='您在其他地方加入了会议', en='You have joined another meeting'))) self.DeletedParticipant = dict(code=-1002, message='{username}被移出会议'.format(username=username), data=dict(username=username, content=dict(zh='被移出会议', en='is removed from the meeting'), participant_uid=data.get('uid'))) self.ParticipantExit = dict(code=-1003, message='{username}已退出会议'.format(username=username), data=dict(username=username, content=dict(zh='已退出会议', en='has left the meeting'))) self.BeKickedOutFromTeam = dict(code=-1004, message='你已被管理员移出 {team_name} 团队'.format(team_name=team_name), data=dict(team_name=team_name, content=dict(zh='你已被管理员移出团队', en='You have been removed from the team by the administrator'))) # self.DataUpdated = dict(code=1000, message='data updated', data=data) self.MasterChanged = dict(code=1001, message='{subject_name}已将主控权移交给{object_name}'.format(subject_name=subject_name, object_name=object_name), data=dict(subject_name=subject_name, object_name=object_name, content=dict(zh='已将主控权移交给', en='has transferred the mastership to'), master_uid=data.get('master_uid'))) self.JoinMeeting = dict(code=1002, message='{username}已加入会议'.format(username=username), data=dict(username=username, content=dict(zh='已加入会议', en='has joined the meeting'))) self.CreateMarker = dict(code=1003, message='create marker', data=data) self.DeleteMarker = dict(code=1004, message='delete marker', data=data) self.UpdateMakrer = dict(code=1005, message='update makrer', data=data) self.CreateReviewScene = dict(code=1006, message='create review scene', data=data) self.DeleteReviewScene = dict(code=1007, message='delete review scene', data=data) self.EndMeeting = dict(code=1008, message='您已经结束会议', data=dict(content=dict(zh='您已经结束会议', en='You have ended the meeting'))) self.MeetingAlreadyOver = dict(code=1009, message='会议已经结束', data=dict(content=dict(zh='会议已经结束', en='The meeting is over'))) if __name__ == '__main__': print(ErrorCode.BeKickedOutFromTeam.value)
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/response/response_code.py
response_code.py
from yzcore.default_settings import default_setting as settings from yzcore.request import request __all__ = [ "generate_uuid", "explain_uuid", "translate2timestamp", "make_uuid", ] if settings.ID_URL is None: raise EnvironmentError("Error: 'ID_URL' is None.") def generate_uuid(limit: int = 1) -> int or list: """ 根据参数生成一定数量的ID :param limit: :return: 1: 6918460366603157505 2: [6918460407405346817, 6918460407405347841,...] """ url = f"{settings.ID_URL}{settings.GENERATE_UUID_PATH}" result, status_code = request( 'get', url, params={'limit': limit} ) if status_code == 200: if limit == 1: return int(result['id']) else: return [int(_id) for _id in result['id_list']] def explain_uuid(long_id: int) -> dict: """ 解析长整型ID为 id_object :param long_id: 6918460366603157505 :return: { "machine_id": 1, "sequence": 0, "time_duration": 867377, "generate_method": 2, "mode_type": 1, "version": 0 } """ url = f"{settings.ID_URL}{settings.EXPLAIN_UUID_PATH}" result, status_code = request( 'get', url, params={'long_id': long_id} ) if status_code == 200: return result def translate2timestamp(time_duration: int): """ 把时间间隔转换为时间戳 :param time_duration: 时间间隔 :return: { "timestamp": 1611219039, "datetime": "2021-01-21T16:50:39" } """ url = f"{settings.ID_URL}{settings.TRANSLATE_PATH}" result, status_code = request( 'get', url, params={'time_duration': time_duration}) if status_code == 200: return result def make_uuid(sequence: int, timestamp: int, machine: int=None, method: int=None, mtype: int=None, version: int=None ): """ 根据传入参数合成长整型id :param sequence: :param timestamp: :param machine: :param method: :param mtype: :param version: :return: """ data = { "sequence": sequence, "timestamp": timestamp, "machine": machine, "method": method, "mtype": mtype, "version": version } data = {k: v for k, v in data.items() if v is not None} url = f"{settings.ID_URL}{settings.MAKE_UUID_PATH}" result, status_code = request('post', url, json=data) if status_code == 200: return int(result['id'])
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/uid.py
uid.py
import os import shutil from typing import Union, IO, AnyStr from abc import ABCMeta, abstractmethod from urllib.request import urlopen from urllib.error import URLError from ssl import SSLCertVerificationError from yzcore.extensions.storage.utils import create_temp_file, get_filename, get_url_path from yzcore.extensions.storage.const import IMAGE_FORMAT_SET, CONTENT_TYPE, DEFAULT_CONTENT_TYPE from yzcore.extensions.storage.schemas import BaseConfig from yzcore.exceptions import StorageRequestError from yzcore.logger import get_logger from yzcore.utils.decorator import cached_property logger = get_logger(__name__) __all__ = ['StorageManagerBase', 'logger', 'StorageRequestError'] class StorageManagerBase(metaclass=ABCMeta): @abstractmethod def __init__(self, conf: BaseConfig): self.mode = conf.mode self.access_key_id = conf.access_key_id self.access_key_secret = conf.access_key_secret self.scheme = conf.scheme.value self.bucket_name = conf.bucket_name self.endpoint = conf.endpoint self.image_domain = conf.image_domain self.asset_domain = conf.asset_domain self.cache_path = conf.cache_path self.policy_expire_time = conf.policy_expire_time # 上传policy有效时间 self.private_expire_time = conf.private_expire_time # 私有桶访问链接签名有效时间 if self.cache_path: self.make_dir(self.cache_path) @abstractmethod def create_bucket(self, bucket_name): """创建bucket""" @abstractmethod def get_bucket_cors(self): """ 获取CORS配置 :return: { 'allowed_origins': [], 'allowed_methods': [], 'allowed_headers': [], } """ @abstractmethod def list_buckets(self): """查询bucket列表""" @abstractmethod def is_exist_bucket(self, bucket_name=None): """判断bucket是否存在""" @abstractmethod def delete_bucket(self, bucket_name=None): """删除bucket""" @abstractmethod def get_sign_url(self, key, expire=0): """生成获取文件的带签名的URL""" @abstractmethod def post_sign_url(self, key): """生成POST上传对象的授权信息""" @abstractmethod def put_sign_url(self, key): """生成PUT上传对象的带授权信息的URL""" @abstractmethod def iter_objects(self, prefix='', marker=None, delimiter=None, max_keys=100): """ 遍历存储桶内的文件 目前返回的字段: [{ 'key': '', 'url: '', 'size': '', }] """ @abstractmethod def get_object_meta(self, key: str): """获取文件基本元信息,包括该Object的ETag、Size(文件大小)、LastModified,Content-Type,并不返回其内容""" def update_file_headers(self, key, headers: dict): """更改Object的元数据信息,包括Content-Type这类标准的HTTP头部""" if not headers.get('Content-Type'): headers['Content-Type'] = self.parse_content_type(key) self._set_object_headers(key, headers) return True @abstractmethod def _set_object_headers(self, key, headers): """调用对象存储SDK更新object headers""" @abstractmethod def file_exists(self, key): """检查文件是否存在""" def download(self, key, local_name=None, path=None, is_stream=False, **kwargs): """ 下载文件 :param key: :param local_name: 下载的文件在本地的路径 :param path: 文件下载路径 :param is_stream: is_stream = True: >>> result = self.download('readme.txt', is_stream=True) >>> print(result.read()) b'hello world' is_stream = False: >>> result = self.download('readme.txt', '/tmp/cache/readme.txt') >>> print(result) '/tmp/cache/readme.txt' :return: 文件对象或文件下载后的本地路径 """ if is_stream: return self.download_stream(key, **kwargs) else: if not local_name: if path: local_name = os.path.abspath(os.path.join(self.cache_path, path, get_filename(key))) else: local_name = os.path.abspath(os.path.join(self.cache_path, key)) self.make_dir(os.path.dirname(local_name)) self.download_file(key, local_name) return local_name @abstractmethod def download_stream(self, key, **kwargs): """下载文件流""" @abstractmethod def download_file(self, key, local_name, **kwargs): """下载文件""" def upload(self, filepath: Union[str, os.PathLike], key: str, **kwargs): """上传文件""" return self.upload_file(filepath, key, **kwargs) @abstractmethod def upload_file(self, filepath: Union[str, os.PathLike], key: str, **kwargs): """上传文件""" @abstractmethod def upload_obj(self, file_obj: Union[IO, AnyStr], key: str, **kwargs): """上传文件流""" @abstractmethod def delete_object(self, key: str): """删除文件""" @abstractmethod def get_policy( self, filepath: str, callback_url: str, callback_data: dict, callback_content_type: str, ): """ 授权给第三方上传 :param filepath: 对象存储中的存放路径,key的前缀 :param callback_url: 对象存储的回调地址 :param callback_data: 需要回传的参数 :param callback_content_type: 回调时的Content-Type "application/json" "application/x-www-form-urlencoded" :return: """ @cached_property def host(self): return u'//{}.{}'.format(self.bucket_name, self.endpoint) @cached_property def _host_minio(self): return u'//{}/{}'.format(self.endpoint, self.bucket_name) def get_file_url(self, key, with_scheme=False): """oss/obs: f'{bucket_name}.{endpoint}' 的方式拼接file_url""" if not any((self.image_domain, self.asset_domain)): resource_url = u"{}/{}".format(self.host, key) elif key.split('.')[-1].lower() in IMAGE_FORMAT_SET: resource_url = u"//{domain}/{key}".format(domain=self.image_domain, key=key) else: resource_url = u"//{domain}/{key}".format(domain=self.asset_domain, key=key) if with_scheme: resource_url = self.scheme + ':' + resource_url return resource_url def _get_file_url_minio(self, key, with_scheme=False): """minio/s3/azure: f'{endpoint}/{bucket_name}' 的方式拼接file_url""" if not any((self.image_domain, self.asset_domain)): resource_url = u"{}/{}".format(self._host_minio, key) elif key.split('.')[-1].lower() in IMAGE_FORMAT_SET: resource_url = u"//{domain}/{bucket}/{key}".format( domain=self.image_domain, bucket=self.bucket_name, key=key) else: resource_url = u"//{domain}/{bucket}/{key}".format( domain=self.asset_domain, bucket=self.bucket_name, key=key) if with_scheme: resource_url = self.scheme + ':' + resource_url return resource_url def delete_cache_file(self, filename): """删除文件缓存""" filepath = os.path.abspath(os.path.join(self.cache_path, filename)) assert os.path.isfile(filepath), '非文件或文件不存在' os.remove(filepath) def search_cache_file(self, filename): """文件缓存搜索""" # 拼接绝对路径 filepath = os.path.abspath(os.path.join(self.cache_path, filename)) if os.path.isfile(filepath): return filepath else: return None @classmethod def make_dir(cls, dir_path): """新建目录""" try: os.makedirs(dir_path) except OSError: pass @classmethod def copy_file(cls, src, dst): """拷贝文件""" dst_dir = os.path.dirname(dst) cls.make_dir(dst_dir) shutil.copy(src, dst) def check(self): """通过上传和下载检查对象存储配置是否正确""" try: # 检查bucket是否正确 assert self.is_exist_bucket(), f'{self.bucket_name}: No Such Bucket' # CORS 检查 # assert self._cors_check(), f'{self.bucket_name}: CORS设置错误' # 生成一个带有随机字符串的内存文件 temp_file = create_temp_file(text_length=32) text = temp_file.getvalue().decode() key = f'storage_check_{text}.txt' logger.debug(f'file_exists: {self.file_exists(key)}') # 上传 file_url = self.upload_obj(temp_file, key=key) logger.debug(f'upload: {file_url}') assert file_url, f'{self.bucket_name}: Upload Failed' # 加签url assert self._check_sign_url(key), f'{self.bucket_name}: Sign Url Error' # 下载 download_file = self.download(key=f'storage_check_{text}.txt', is_stream=True) download_text = download_file.read().decode() assert download_text == text, f'{self.bucket_name}: DownloadFailed' # 获取文件元数据 metadata = self.get_object_meta(key) logger.debug(f'get_object_meta: {metadata}') assert metadata, f'{self.bucket_name}: Get object metadata Failed' # 修改文件元数据 assert self.update_file_headers(key, {'Content-Type': 'application/octet-stream'}), f'{self.bucket_name}: Update object metadata Failed' logger.debug(f'update_file_headers: {self.get_object_meta(key)}') # 遍历文件 objects = self.iter_objects(key) logger.debug(f'iter_objects: {objects}') assert objects, f'{self.bucket_name} iter objects Failed' # 删除文件 self.delete_object(key) logger.debug(f'file_exists: {self.file_exists(key)}') assert not self.file_exists(key), f'{self.bucket_name} delete object Failed' # 生成post policy policy = self.get_policy(filepath='upload_policy/', callback_url='https://hub.realibox.com/api/hub/v1/test', callback_data={'a':'b'}) logger.debug(f'get_policy: {policy}') assert isinstance(policy, dict), f'{self.bucket_name}: Get policy Failed' return True except AssertionError as e: raise StorageRequestError(e) def _cors_check(self): """检查存储桶的CORS配置是否设置正确""" allowed_methods = {'GET', 'PUT', 'POST', 'DELETE', 'HEAD'} cors_dict = self.get_bucket_cors() logger.debug(f'_cors_check: {cors_dict}') if set(cors_dict['allowed_methods']) != allowed_methods: raise StorageRequestError(f'{self.bucket_name}: CORS设置错误') if cors_dict['allowed_headers'] != ['*']: raise StorageRequestError(f'{self.bucket_name}: CORS设置错误') if cors_dict['allowed_origins'] != ['*']: raise StorageRequestError(f'{self.bucket_name}: CORS设置错误') return True def _check_sign_url(self, key): """判断加签url是否可以正常打开,并且配置了https""" try: sign_url = self.get_sign_url(key=key, expire=600) resp = urlopen(self.scheme + ':' + sign_url) assert resp.status < 300, f'{self.bucket_name}: Sign Url Error, {sign_url}' except URLError as e: if isinstance(e.reason, SSLCertVerificationError): raise StorageRequestError(f'{self.bucket_name}: 未开启https') raise StorageRequestError(f'{self.bucket_name}: Sign Url Error') return True @staticmethod def parse_content_type(filename): ext = filename.split('.')[-1].lower() return CONTENT_TYPE.get(ext, DEFAULT_CONTENT_TYPE) def get_key_from_url(self, url, urldecode=False): """ 从URL中获取对象存储key oss/obs: 去掉最前面的 / """ url_path = get_url_path(url, urldecode) return url_path[1:] def _get_key_from_url_minio(self, url, urldecode=False): """ 从URL中获取对象存储key minio/s3/azure: 去掉最前面的 f'/{bucket_name}/' """ url_path = get_url_path(url, urldecode) return url_path[len(self.bucket_name)+2:]
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/storage/base.py
base.py
import traceback from datetime import datetime, timedelta from io import BytesIO from typing import Union, IO, AnyStr from os import PathLike from yzcore.extensions.storage.base import StorageManagerBase, StorageRequestError, logger from yzcore.extensions.storage.schemas import AzureConfig from yzcore.extensions.storage.azure.utils import wrap_request_raise_404 from yzcore.utils.time_utils import datetime2str try: from azure.storage.blob import BlobServiceClient, ContentSettings, ContainerClient, generate_blob_sas,\ BlobSasPermissions from azure.core.exceptions import ResourceExistsError except: BlobServiceClient = None class AzureManager(StorageManagerBase): def __init__(self, conf: AzureConfig): super(AzureManager, self).__init__(conf) self.connection_string = conf.connection_string self.account_key = conf.account_key self.account_name = conf.account_name self.__init() def __init(self, bucket_name=None): """初始化对象""" if BlobServiceClient is None: raise ImportError("'azure-storage-blob' must be installed to use AzureManager") if bucket_name: self.bucket_name = bucket_name self.blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) self.container_client = self.blob_service_client.get_container_client(self.bucket_name) def create_bucket(self, bucket_name): try: self.blob_service_client.create_container(bucket_name) except ResourceExistsError: pass self.bucket_name = bucket_name def get_bucket_cors(self): cors_dict = { 'allowed_origins': [], 'allowed_methods': [], 'allowed_headers': [], } for cors_rule in self.blob_service_client.get_service_properties()['cors']: cors_dict['allowed_origins'] = cors_rule.allowed_origins cors_dict['allowed_methods'] = cors_rule.allowed_methods cors_dict['allowed_headers'] = cors_rule.allowed_headers return cors_dict def list_buckets(self): return self.blob_service_client.list_containers() def is_exist_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name container_client = self.blob_service_client.get_container_client(bucket_name) return container_client.exists() def delete_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name self.blob_service_client.delete_container(bucket_name) def get_sign_url(self, key, expire=0): expire_time = datetime.utcnow() + timedelta(seconds=expire or self.private_expire_time) blob_client = self.container_client.get_blob_client(blob=key) sas_sign = generate_blob_sas( account_name=self.account_name, container_name=self.bucket_name, blob_name=key, account_key=self.account_key, expiry=expire_time, permission=BlobSasPermissions(read=True) ) url = f'{blob_client.url}?{sas_sign}' return '//' + url.split('//', 1)[-1] def post_sign_url(self, key): pass def put_sign_url(self, key): """ 获取put上传文件的链接 请求必填的headers: x-ms-blob-type:BlockBlob 文件直接放入 body:binary中 """ expire_time = datetime.utcnow() + timedelta(seconds=self.policy_expire_time) blob_client = self.container_client.get_blob_client(blob=key) sas_sign = generate_blob_sas( account_name=self.account_name, container_name=self.bucket_name, blob_name=key, account_key=self.account_key, expiry=expire_time, permission=BlobSasPermissions(write=True) ) return f'{blob_client.url}?{sas_sign}' def get_file_url(self, key, with_scheme=False): return self._get_file_url_minio(key, with_scheme) @property def host(self): return self._host_minio def get_key_from_url(self, url, urldecode=False): """从URL中获取对象存储key""" return self._get_key_from_url_minio(url, urldecode) def iter_objects(self, prefix='', marker=None, delimiter=None, max_keys=100): objects = self.container_client.list_blobs(name_starts_with=prefix, results_per_page=max_keys) _result = [] for obj in objects: _result.append({ 'key': obj.name, 'url': self.get_file_url(obj.name), 'size': obj.size, }) return _result @wrap_request_raise_404 def get_object_meta(self, key: str): """azure的etag不像 oss/obs/minio 是文件的md5,而content_md5需要在上传时指定""" blob_client = self.container_client.get_blob_client(blob=key) metadata = blob_client.get_blob_properties() content_md5 = metadata['content_settings']['content_md5'] or '' if isinstance(content_md5, bytearray): content_md5 = content_md5.hex() return { 'etag': content_md5.lower(), # metadata['etag'].strip('"').lower() 'size': metadata['size'], 'last_modified': datetime2str(metadata['last_modified']), 'content_type': metadata['content_settings']['content_type'] } @wrap_request_raise_404 def _set_object_headers(self, key: str, headers: dict): blob_client = self.container_client.get_blob_client(blob=key) if not any([headers.get('content_md5'), headers.get('Content-MD5')]): metadata = blob_client.get_blob_properties() headers['content_md5'] = metadata['content_settings']['content_md5'] blob_client.set_http_headers(ContentSettings(**headers)) return True def file_exists(self, key): blob_client = self.container_client.get_blob_client(blob=key) return blob_client.exists() @wrap_request_raise_404 def download_stream(self, key, **kwargs): blob_client = self.container_client.get_blob_client(blob=key) stream = BytesIO() blob_client.download_blob().readinto(stream) stream.seek(0) return stream @wrap_request_raise_404 def download_file(self, key, local_name, **kwargs): blob_client = self.container_client.get_blob_client(blob=key) with open(local_name, 'wb') as f: f.write(blob_client.download_blob().readall()) def upload_file(self, filepath: Union[str, PathLike], key: str, **kwargs): """上传文件流""" with open(filepath, 'rb') as f: return self.upload_obj(f, key) def upload_obj(self, file_obj: Union[IO, AnyStr], key: str, **kwargs): """上传文件流""" try: content_settings = ContentSettings(content_type=self.parse_content_type(key)) blob_client = self.container_client.get_blob_client(blob=key) blob_client.upload_blob(file_obj, overwrite=True, content_settings=content_settings) return self.get_file_url(key) except Exception: logger.error(f'azure blob upload error: {traceback.format_exc()}') raise StorageRequestError(f'azure blob upload error') def delete_object(self, key: str): """删除文件""" blob_client = self.container_client.get_blob_client(blob=key) blob_client.delete_blob(delete_snapshots='include') return True def get_policy( self, filepath: str, callback_url: str, callback_data: dict, **kwargs ): """ 授权给第三方上传 :param filepath: 需要在前一步拼接好完整的key :param callback_url: 对象存储的回调地址 :param callback_data: 需要回传的参数 :return: """ return { 'mode': self.mode, 'host': self.put_sign_url(filepath), 'headers': {'x-ms-blob-type': 'BlockBlob'}, 'callback': {'url': callback_url, 'data': callback_data}, }
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/storage/azure/__init__.py
__init__.py
import traceback from typing import Union, IO, AnyStr from os import PathLike from yzcore.extensions.storage.base import StorageManagerBase, StorageRequestError, logger from yzcore.extensions.storage.schemas import S3Config from yzcore.extensions.storage.utils import AnyStr2BytesIO from yzcore.extensions.storage.amazon.utils import wrap_request_return_bool, wrap_request_raise_404 from yzcore.utils import datetime2str try: import boto3 from boto3.session import Session from botocore.exceptions import ClientError except ImportError: boto3 = None class S3Manager(StorageManagerBase): def __init__(self, conf: S3Config): super(S3Manager, self).__init__(conf) self.endpoint_url = f'{self.scheme}://{self.endpoint}' self.__init() def __init(self, bucket_name=None): """初始化对象""" if boto3 is None: raise ImportError("'boto3' must be installed to use AmazonS3Manager") if bucket_name: self.bucket_name = bucket_name self.client = boto3.client( 's3', aws_access_key_id=self.access_key_id, aws_secret_access_key=self.access_key_secret, endpoint_url=self.endpoint_url, ) def create_bucket(self, bucket_name): pass def get_bucket_cors(self): pass def list_buckets(self): return self.client.list_buckets()['Buckets'] @wrap_request_return_bool def is_exist_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name return self.client.head_bucket(Bucket=bucket_name) def delete_bucket(self, bucket_name=None): pass def get_sign_url(self, key, expire=0): url = self.client.generate_presigned_url( ClientMethod='get_object', Params={'Bucket': self.bucket_name, 'Key': key}, ExpiresIn=expire or self.private_expire_time, HttpMethod='GET', ) return '//' + url.split('//', 1)[-1] def post_sign_url(self, key): """" 获取post上传文件时需要的参数 :param key: 这里的key作为将来key的前缀 :return: {'key': key, 'AWSAccessKeyId': '', 'policy': '', 'signature': ''} """ return self.client.generate_presigned_post( Bucket=self.bucket_name, Key=key+'${filename}', ExpiresIn=self.policy_expire_time, ) def put_sign_url(self, key): return self.client.generate_presigned_url( ClientMethod='put_object', Params={'Bucket': self.bucket_name, 'Key': key}, ExpiresIn=self.policy_expire_time, HttpMethod='PUT', ) def iter_objects(self, prefix='', delimiter='', max_keys=100, **kwargs): result = [] for obj in self.client.list_objects_v2(Bucket=self.bucket_name, Prefix=prefix, Delimiter=delimiter, MaxKeys=max_keys).get('Contents', []): result.append({ 'key': obj.get('Key'), 'url': self.get_file_url(obj.get('Key')), 'size': obj.get('Size'), }) return result @wrap_request_raise_404 def get_object_meta(self, key: str): response = self.client.head_object(Bucket=self.bucket_name, Key=key) return { 'etag': response['ETag'].strip('"').lower(), 'size': response['ContentLength'], 'last_modified': datetime2str(response['LastModified']), 'content_type': response['ContentType'], } @wrap_request_raise_404 def _set_object_headers(self, key: str, headers: dict): content_type = headers.pop('Content-Type') self.client.copy_object( Bucket=self.bucket_name, Key=key, CopySource={'Bucket': self.bucket_name, 'Key': key}, ContentType=content_type, Metadata=headers, MetadataDirective='REPLACE', ) return True @wrap_request_return_bool def file_exists(self, key): return self.client.head_object(Bucket=self.bucket_name, Key=key) @wrap_request_raise_404 def download_stream(self, key, **kwargs): return self.client.get_object(Bucket=self.bucket_name, Key=key)['Body'] @wrap_request_raise_404 def download_file(self, key, local_name, **kwargs): self.client.download_file(Bucket=self.bucket_name, Key=key, Filename=local_name) def upload_file(self, filepath: Union[str, PathLike], key: str, **kwargs): """上传文件""" extra_args = {'ContentType': self.parse_content_type(key)} try: self.client.upload_file(Bucket=self.bucket_name, Key=key, Filename=filepath, ExtraArgs=extra_args) return self.get_file_url(key) except Exception: logger.error(f's3 upload error: {traceback.format_exc()}') raise StorageRequestError(f's3 upload error') def upload_obj(self, file_obj: Union[IO, AnyStr], key: str, **kwargs): """上传文件流""" extra_args = {'ContentType': self.parse_content_type(key)} try: if isinstance(file_obj, (str, bytes)): file_obj = AnyStr2BytesIO(file_obj) self.client.upload_fileobj(Bucket=self.bucket_name, Key=key, Fileobj=file_obj, ExtraArgs=extra_args) return self.get_file_url(key) except Exception: logger.error(f's3 upload error: {traceback.format_exc()}') raise StorageRequestError(f's3 upload error') def delete_object(self, key: str): """删除文件""" self.client.delete_object(Bucket=self.bucket_name, Key=key) return True def get_policy( self, filepath: str, callback_url: str, callback_data: dict, **kwargs ): form_data = self.post_sign_url(filepath) form_data['fields'].pop('key') data = { 'mode': self.mode, 'host': form_data['url'], 'dir': filepath, 'callback': {'url': callback_url, 'data': callback_data}, **form_data['fields'], } return data @property def host(self): return self._host_minio def get_file_url(self, key, with_scheme=False): return self._get_file_url_minio(key, with_scheme) def get_key_from_url(self, url, urldecode=False): """从URL中获取对象存储key""" return self._get_key_from_url_minio(url, urldecode)
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/storage/amazon/__init__.py
__init__.py
import json import traceback from datetime import timedelta, datetime from os import PathLike from typing import Union, IO, AnyStr from yzcore.extensions.storage.base import StorageManagerBase, StorageRequestError, logger from yzcore.extensions.storage.schemas import MinioConfig from yzcore.extensions.storage.utils import AnyStr2BytesIO from yzcore.extensions.storage.minio.utils import wrap_request_return_bool, wrap_request_raise_404 from yzcore.utils.time_utils import datetime2str try: from minio import Minio from minio.datatypes import PostPolicy from minio.commonconfig import CopySource from minio.deleteobjects import DeleteObject from minio.error import S3Error except: Minio = None class MinioManager(StorageManagerBase): def __init__(self, conf: MinioConfig): super(MinioManager, self).__init__(conf) self.internal_endpoint = conf.internal_endpoint self.disable_internal_endpoint = conf.disable_internal_endpoint self.internal_minioClient = None self.__init() def __init(self, bucket_name=None): """初始化对象""" if Minio is None: raise ImportError("'minio' must be installed to use MinioManager") if bucket_name: self.bucket_name = bucket_name self.minioClient = Minio( self.endpoint, access_key=self.access_key_id, secret_key=self.access_key_secret, secure=True if self.scheme == 'https' else False, ) if self.internal_endpoint and not self.disable_internal_endpoint: self.internal_minioClient = Minio( self.internal_endpoint, access_key=self.access_key_id, secret_key=self.access_key_secret, secure=False, ) def _internal_minio_client_first(self): """优先使用内网连接minio服务""" if self.internal_minioClient: return self.internal_minioClient else: return self.minioClient def get_bucket_cors(self): """返回的内容格式和OSS/OBS差异太大""" client = self._internal_minio_client_first() result = client.get_bucket_policy(self.bucket_name) return json.loads(result) def set_bucket_cors(self, policy: dict): client = self._internal_minio_client_first() return client.set_bucket_policy(self.bucket_name, policy) def _cors_check(self): passed = False action_slots = ['s3:AbortMultipartUpload', 's3:DeleteObject', 's3:GetObject', 's3:ListMultipartUploadParts', 's3:PutObject'] cors_dict = self.get_bucket_cors() logger.debug(f'_cors_check: {cors_dict}') for cors in cors_dict['Statement']: effect = cors['Effect'] resource = cors['Resource'][0] actions = cors['Action'] if effect == 'Allow': if resource == f'arn:aws:s3:::{self.bucket_name}/*' or resource == 'arn:aws:s3:::*': if all([False for i in action_slots if i not in actions]): passed = True return passed def create_bucket(self, bucket_name=None): """创建bucket,并且作为当前操作bucket""" client = self._internal_minio_client_first() client.make_bucket(bucket_name) self.bucket_name = bucket_name def list_buckets(self): client = self._internal_minio_client_first() return client.list_buckets() def is_exist_bucket(self, bucket_name=None): client = self._internal_minio_client_first() if bucket_name is None: bucket_name = self.bucket_name return client.bucket_exists(bucket_name) def delete_bucket(self, bucket_name=None): client = self._internal_minio_client_first() if bucket_name is None: bucket_name = self.bucket_name return client.remove_bucket(bucket_name) def get_sign_url(self, key, expire=0): expire_time = timedelta(seconds=expire or self.private_expire_time) url = self.minioClient.presigned_get_object(self.bucket_name, key, expires=expire_time) return '//' + url.split('//', 1)[-1] def post_sign_url(self, key): client = self._internal_minio_client_first() expire_time = datetime.now() + timedelta(seconds=self.policy_expire_time) policy = PostPolicy(bucket_name=self.bucket_name, expiration=expire_time) policy.add_starts_with_condition('$key', key) # policy.add_content_length_range_condition(1, 1024*1024*1024) # 限制文件大小 return client.presigned_post_policy(policy) def put_sign_url(self, key): return self.minioClient.presigned_put_object(self.bucket_name, key) def iter_objects(self, prefix='', **kwargs): client = self._internal_minio_client_first() objects = client.list_objects(self.bucket_name, prefix=prefix) _result = [] for obj in objects: _result.append({ 'key': obj.object_name, 'url': self.get_file_url(obj.object_name), 'size': obj.size, }) return _result @wrap_request_raise_404 def get_object_meta(self, key: str): """获取文件基本元信息,包括该Object的ETag、Size(文件大小)、LastModified,Content-Type,并不返回其内容""" client = self._internal_minio_client_first() meta = client.stat_object(self.bucket_name, key) return { 'etag': meta.etag, 'size': meta.size, 'last_modified': datetime2str(meta.last_modified), 'content_type': meta.content_type, } @wrap_request_raise_404 def _set_object_headers(self, key: str, headers: dict): """更新文件的metadata,主要用于更新Content-Type""" client = self._internal_minio_client_first() client.copy_object(self.bucket_name, key, CopySource(self.bucket_name, key), metadata=headers, metadata_directive='REPLACE') return True @wrap_request_return_bool def file_exists(self, key): client = self._internal_minio_client_first() return client.stat_object(self.bucket_name, key) @wrap_request_raise_404 def download_stream(self, key, **kwargs): client = self._internal_minio_client_first() return client.get_object(self.bucket_name, key) @wrap_request_raise_404 def download_file(self, key, local_name, **kwargs): client = self._internal_minio_client_first() client.fget_object(self.bucket_name, key, local_name) def upload_file(self, filepath: Union[str, PathLike], key: str, **kwargs): """上传文件""" client = self._internal_minio_client_first() try: content_type = self.parse_content_type(key) client.fput_object(self.bucket_name, key, filepath, content_type=content_type) return self.get_file_url(key) except Exception: logger.error(f'minio upload error: {traceback.format_exc()}') raise StorageRequestError('minio upload error') def upload_obj(self, file_obj: Union[IO, AnyStr], key: str, **kwargs): """上传文件流""" client = self._internal_minio_client_first() try: if isinstance(file_obj, (str, bytes)): file_obj = AnyStr2BytesIO(file_obj) content_type = self.parse_content_type(key) client.put_object(self.bucket_name, key, file_obj, length=-1, content_type=content_type, part_size=1024 * 1024 * 5) return self.get_file_url(key) except Exception: logger.error(f'minio upload error: {traceback.format_exc()}') raise StorageRequestError('minio upload error') def delete_object(self, key: str): """删除文件""" client = self._internal_minio_client_first() errors = client.remove_objects(self.bucket_name, [DeleteObject(key)]) for error in errors: logger.error(f'minio delete file error: {error}') raise StorageRequestError('minio delete file error') return True def get_policy( self, filepath: str, callback_url: str, callback_data: dict, **kwargs ): """ 授权给第三方上传, minio无回调功能,返回callback数据给前端发起回调请求 :param filepath: :param callback_url: 对象存储的回调地址 :param callback_data: 需要回传的参数 :return: """ form_data = self.post_sign_url(filepath) data = { 'mode': self.mode, 'dir': filepath, 'host': f'{self.scheme}:{self.host}', 'success_action_status': 200, 'callback': {'url': callback_url, 'data': callback_data}, # 'Content-Type': '上传时指定Content-Type', **form_data, } return data @property def host(self): return self._host_minio def get_key_from_url(self, url, urldecode=False): """从URL中获取对象存储key""" return self._get_key_from_url_minio(url, urldecode) def get_file_url(self, key, with_scheme=False): return self._get_file_url_minio(key, with_scheme)
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/storage/minio/__init__.py
__init__.py
import os import json import base64 import hmac import datetime import hashlib from urllib import parse from typing import Union, IO, AnyStr from os import PathLike from yzcore.extensions.storage.base import StorageManagerBase, StorageRequestError from yzcore.extensions.storage.oss.const import * from yzcore.extensions.storage.oss.utils import wrap_request_return_bool, wrap_request_raise_404 from yzcore.extensions.storage.schemas import OssConfig try: import oss2 from oss2 import CaseInsensitiveDict except: oss2 = None class OssManager(StorageManagerBase): def __init__(self, conf: OssConfig): super(OssManager, self).__init__(conf) self.internal_endpoint = conf.internal_endpoint self.bucket = None self.service = None self.__init() def __init(self, bucket_name=None): """初始化对象""" if oss2 is None: raise ImportError("'oss2' must be installed to use OssManager") if bucket_name: self.bucket_name = bucket_name self.auth = oss2.Auth(self.access_key_id, self.access_key_secret) # 优先内网endpoint if self.internal_endpoint: self.bucket = oss2.Bucket(self.auth, self.internal_endpoint, self.bucket_name) else: self.bucket = oss2.Bucket(self.auth, self.endpoint, self.bucket_name) def reload_oss(self, **kwargs): """重新加载oss配置""" self.access_key_id = kwargs.get("access_key_id") self.access_key_secret = kwargs.get("access_key_secret") self.bucket_name = kwargs.get("bucket_name") self.endpoint = kwargs.get("endpoint") self.__init() def create_bucket(self, bucket_name=None, acl_type='private', storage_type='standard', redundancy_type='zrs'): """创建bucket,并且作为当前操作bucket""" permission = ACL_TYPE.get(acl_type) config = oss2.models.BucketCreateConfig( storage_class=STORAGE_CLS.get(storage_type), data_redundancy_type=REDUNDANCY_TYPE.get(redundancy_type) ) result = self.bucket.create_bucket(permission, input=config) self.__init(bucket_name=bucket_name) return result def get_bucket_cors(self): """获取存储桶的CORS配置""" cors_dict = { 'allowed_origins': [], 'allowed_methods': [], 'allowed_headers': [], } try: cors = self.bucket.get_bucket_cors() for rule in cors.rules: cors_dict['allowed_origins'] = rule.allowed_origins cors_dict['allowed_headers'] = rule.allowed_headers cors_dict['allowed_methods'] = rule.allowed_methods except oss2.exceptions.NoSuchCors: pass return cors_dict def list_buckets(self, prefix='', marker='', max_keys=100, params=None): """根据前缀罗列用户的Bucket。 :param str prefix: 只罗列Bucket名为该前缀的Bucket,空串表示罗列所有的Bucket :param str marker: 分页标志。首次调用传空串,后续使用返回值中的next_marker :param int max_keys: 每次调用最多返回的Bucket数目 :param dict params: list操作参数,传入'tag-key','tag-value'对结果进行过滤 :return: 罗列的结果 :rtype: oss2.models.ListBucketsResult """ if not hasattr(self, 'service'): self.service = oss2.Service(self.auth, self.endpoint) return self.service.list_buckets( prefix=prefix, marker=marker, max_keys=max_keys, params=params) @wrap_request_return_bool def is_exist_bucket(self, **kwargs): """判断存储空间是否存在""" return self.bucket.get_bucket_info() def delete_bucket(self, **kwargs): """删除bucket""" try: resp = self.bucket.delete_bucket() if resp.status < 300: return True elif resp.status == 404: return False except: import traceback print(traceback.format_exc()) def encrypt_bucket(self): """加密bucket""" # 创建Bucket加密配置,以AES256加密为例。 rule = oss2.models.ServerSideEncryptionRule() rule.sse_algorithm = oss2.SERVER_SIDE_ENCRYPTION_AES256 # 设置KMS密钥ID,加密方式为KMS可设置此项。 # 如需使用指定的密钥加密,需输入指定的CMK ID; # 若使用OSS托管的CMK进行加密,此项为空。使用AES256进行加密时,此项必须为空。 rule.kms_master_keyid = "" # 设置Bucket加密。 result = self.bucket.put_bucket_encryption(rule) # 查看HTTP返回码。 print('http response code:', result.status) return result def delete_encrypt_bucket(self): # 删除Bucket加密配置。 result = self.bucket.delete_bucket_encryption() # 查看HTTP返回码。 print('http status:', result.status) return result def get_sign_url(self, key, expire=0): url = self.bucket.sign_url("GET", key, expire or self.private_expire_time) return '//' + url.split('//', 1)[-1] def post_sign_url(self, key): pass def put_sign_url(self, key): return self.bucket.sign_url("PUT", key, self.policy_expire_time) def iter_objects(self, prefix='', marker='', delimiter='', max_keys=100): """ 遍历bucket下的文件 :param prefix: key前缀 :param marker: :param delimiter: :param max_keys: :return: dict """ _result = [] for obj in oss2.ObjectIterator(self.bucket, prefix=prefix, marker=marker, delimiter=delimiter, max_keys=max_keys): _result.append({ 'key': obj.key, 'url': self.get_file_url(key=obj.key), 'size': obj.size, }) return _result @wrap_request_raise_404 def download_stream(self, key, process=None): return self.bucket.get_object(key, process=process) @wrap_request_raise_404 def download_file(self, key, local_name, process=None): self.bucket.get_object_to_file(key, local_name, process=process) def upload_file(self, filepath: Union[str, PathLike], key: str, *, num_threads=2, multipart_threshold=None): """ 上传文件流 :param filepath: 文件路径 :param key: :param num_threads: :param multipart_threshold: """ headers = CaseInsensitiveDict({'Content-Type': self.parse_content_type(key)}) result = oss2.resumable_upload( self.bucket, key, filepath, headers=headers, num_threads=num_threads, multipart_threshold=multipart_threshold, ) if result.status // 100 != 2: raise StorageRequestError(f'oss upload error: {result.resp}') # 返回下载链接 return self.get_file_url(key) def upload_obj(self, file_obj: Union[IO, AnyStr], key: str, **kwargs): """上传文件流""" headers = CaseInsensitiveDict({'Content-Type': self.parse_content_type(key)}) result = self.bucket.put_object(key, file_obj, headers=headers) if result.status // 100 != 2: raise StorageRequestError(f'oss upload error: {result.resp}') # 返回下载链接 return self.get_file_url(key) def delete_object(self, key: str): """删除文件""" self.bucket.delete_object(key) return True def get_policy( self, filepath: str, callback_url: str, callback_data: dict, callback_content_type: str = "application/x-www-form-urlencoded", ): """ 授权给第三方上传 :param filepath: :param callback_url: :param callback_data: 需要回传的参数 :param callback_content_type: 回调时的Content-Type "application/json" "application/x-www-form-urlencoded" :return: """ params = parse.urlencode( dict(data=json.dumps(callback_data))) policy_encode = self._get_policy_encode(filepath) sign = self.get_signature(policy_encode) callback_dict = dict() callback_dict["callbackUrl"] = callback_url callback_dict["callbackBody"] = ( "filepath=${object}&size=${size}&mime_type=${mimeType}" "&etag=${etag}" "&img_height=${imageInfo.height}&img_width=${imageInfo.width}" "&img_format=${imageInfo.format}&" + params ) callback_dict["callbackBodyType"] = callback_content_type callback_param = json.dumps(callback_dict).strip().encode() base64_callback_body = base64.b64encode(callback_param) return dict( mode=self.mode, dir=filepath, host=f'{self.scheme}:{self.host}', OSSAccessKeyId=self.access_key_id, policy=policy_encode.decode(), signature=sign, callback=base64_callback_body.decode(), ) def _get_policy_encode(self, filepath): expire_time = datetime.datetime.now() + datetime.timedelta( seconds=self.policy_expire_time ) policy_dict = dict( expiration=expire_time.isoformat() + "Z", conditions=[ ["starts-with", "$key", filepath], # 指定值开始 # ["eq", "$success_action_redirect", "public-read"], # 精确匹配 # ["content-length-range", 1, 1024*1024*1024] # 对象大小限制 ], ) policy = json.dumps(policy_dict).strip().encode() return base64.b64encode(policy) def get_signature(self, policy_encode): """ 获取签名 :param policy_encode: :return: """ h = hmac.new( self.access_key_secret.encode("utf-8"), policy_encode, hashlib.sha1 ) sign_result = base64.encodebytes(h.digest()).strip() return sign_result.decode() @wrap_request_raise_404 def _set_object_headers(self, key: str, headers: dict): self.bucket.update_object_meta(key, headers) return True def file_exists(self, key): """检查文件是否存在""" return self.bucket.object_exists(key) @wrap_request_raise_404 def get_object_meta(self, key: str): """获取文件基本元信息,包括该Object的ETag、Size(文件大小)、LastModified,Content-Type,并不返回其内容""" # meta = self.bucket.get_object_meta(key) # get_object_meta获取到的信息有限 meta = self.bucket.head_object(key) return { 'etag': meta.etag.lower(), 'size': meta.content_length, 'last_modified': meta.headers['Last-Modified'], 'content_type': meta.headers['Content-Type'] }
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/storage/oss/__init__.py
__init__.py
import base64 import json from typing import Union, IO, AnyStr from os import PathLike from yzcore.extensions.storage.base import StorageManagerBase, StorageRequestError from yzcore.extensions.storage.obs.utils import wrap_request_return_bool from yzcore.extensions.storage.schemas import ObsConfig from yzcore.exceptions import NotFoundObject try: import obs from obs import SetObjectMetadataHeader from .obs_inherit import ObsClient except: obs = None class ObsManager(StorageManagerBase): def __init__(self, conf: ObsConfig): super(ObsManager, self).__init__(conf) self.callback_directly = conf.callback_directly self.__init() def __init(self, bucket_name=None): """初始化对象""" if obs is None: raise ImportError("'esdk-obs-python' must be installed to use ObsManager") if bucket_name: self.bucket_name = bucket_name # 创建ObsClient实例 self.obsClient = ObsClient( access_key_id=self.access_key_id, secret_access_key=self.access_key_secret, server=self.endpoint, ) def create_bucket(self, bucket_name=None, location='cn-south-1'): """创建bucket,并且作为当前操作bucket""" resp = self.obsClient.createBucket(bucket_name, location=location) if resp.status < 300: self.bucket_name = bucket_name return resp else: raise StorageRequestError( f"static_code: {resp.status}, errorCode: {resp.errorCode}. Message: {resp.errorMessage}.") def list_buckets(self): resp = self.obsClient.listBuckets(isQueryLocation=True) if resp.status < 300: return resp.body.buckets else: raise StorageRequestError( f"static_code: {resp.status}, errorCode: {resp.errorCode}. Message: {resp.errorMessage}.") @wrap_request_return_bool def is_exist_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name return self.obsClient.headBucket(bucket_name) def delete_bucket(self, bucket_name=None): if bucket_name is None: bucket_name = self.bucket_name return self.obsClient.deleteBucket(bucket_name) def get_sign_url(self, key, expire=0): res = self.obsClient.createSignedUrl( "GET", self.bucket_name, objectKey=key, expires=expire or self.private_expire_time) return '//' + res.signedUrl.split('//', 1)[-1] def post_sign_url(self, key, form_param=None): return self.obsClient.createPostSignature( self.bucket_name, objectKey=key, expires=self.policy_expire_time, formParams=form_param) def put_sign_url(self, key): res = self.obsClient.createSignedUrl( "PUT", self.bucket_name, objectKey=key, expires=self.policy_expire_time) return res.signedUrl def iter_objects(self, prefix='', marker=None, delimiter=None, max_keys=100): """ 遍历bucket下的文件 :param prefix: key前缀 :param marker: :param delimiter: :param max_keys: :return: dict """ _result = [] resp = self.obsClient.listObjects(self.bucket_name, prefix=prefix, marker=marker, delimiter=delimiter, max_keys=max_keys) if resp.status >= 300: raise StorageRequestError( f"static_code: {resp.status}, errorCode: {resp.errorCode}. Message: {resp.errorMessage}.") for obj in resp.body.contents: _result.append({ 'key': obj['key'], 'url': self.get_file_url(key=obj['key']), 'size': obj['size'] }) return _result def download_stream(self, key, **kwargs): resp = self.obsClient.getObject(self.bucket_name, key, loadStreamInMemory=False) if resp.status == 404: raise NotFoundObject() return resp.body.response def download_file(self, key, local_name, progress_callback=None): resp = self.obsClient.getObject( self.bucket_name, key, downloadPath=local_name, progressCallback=progress_callback ) if resp.status == 404: raise NotFoundObject() def upload_file(self, filepath: Union[str, PathLike], key: str, **kwargs): """上传文件""" headers = obs.PutObjectHeader(contentType=self.parse_content_type(key)) resp = self.obsClient.putFile( self.bucket_name, key, filepath, headers=headers) if resp.status >= 300: msg = resp.errorMessage raise StorageRequestError(f'obs upload error: {msg}') return self.get_file_url(key) def upload_obj(self, file_obj: Union[IO, AnyStr], key: str, **kwargs): """上传文件流""" headers = obs.PutObjectHeader(contentType=self.parse_content_type(key)) resp = self.obsClient.putContent( self.bucket_name, key, content=file_obj, headers=headers) if resp.status >= 300: msg = resp.errorMessage raise StorageRequestError(f'obs upload error: {msg}') return self.get_file_url(key) def delete_object(self, key: str): """删除文件""" self.obsClient.deleteObject(self.bucket_name, key) return True def get_policy( self, filepath: str, callback_url: str, callback_data: dict, callback_content_type: str = "application/json", ): """ 授权给第三方上传 由于华为云只有单az模式支持回调,多az不支持,可以根据不同情况选择obs回调或者前端发起回调 callback_directly: True OBS直接发起回调 / False 由前端发起回调 :param filepath: :param callback_url: :param callback_data: 需要回传的参数 :param callback_content_type: 回调时的Content-Type "application/json" "application/x-www-form-urlencoded" 华为云目前只能用application/json格式,用x-www-form-urlencoded时回调数据会在url中 :return: """ if self.callback_directly: callback_body = '{"filepath":"$(key)","etag":"$(etag)","size":$(fsize),"mime_type":"$(ext)",' \ '"data":' \ + json.dumps(callback_data) + '}' callback_body_plain = json.dumps(callback_body).strip().encode() base64_callback_body = base64.b64encode(callback_body_plain) form_param = { 'body': base64_callback_body.decode(), 'url': callback_url, 'body-type': callback_content_type, # 'success_action_status': '200', } else: form_param = {} res = self.post_sign_url(key=filepath, form_param=form_param) data = dict( mode=self.mode, dir=filepath, host=f'{self.scheme}:{self.host}', AccessKeyId=self.access_key_id, policy=res.policy, signature=res.signature, ) if not self.callback_directly: data['callback'] = {'url': callback_url, 'data': callback_data} return data def _set_object_headers(self, key: str, headers: dict): # 兼容 oss.update_file_headers obs_headers = SetObjectMetadataHeader() obs_headers.contentType = headers['Content-Type'] # oss 和 obs的参数名称不相同 resp = self.obsClient.setObjectMetadata(self.bucket_name, key, headers=obs_headers) if resp.status == 404: raise NotFoundObject() return True @wrap_request_return_bool def file_exists(self, key): """检查文件是否存在""" return self.obsClient.headObject(self.bucket_name, key) def get_object_meta(self, key: str): """获取文件基本元信息,包括该Object的ETag、Size(文件大小)、LastModified,Content-Type,并不返回其内容""" resp = self.obsClient.getObjectMetadata(self.bucket_name, key) if resp.status == 404: raise NotFoundObject() return { 'etag': resp.body.etag.strip('"').lower(), 'size': resp.body.contentLength, 'last_modified': resp.body.lastModified, 'content_type': resp.body.contentType, } def get_bucket_cors(self): cors_dict = { 'allowed_origins': [], 'allowed_methods': [], 'allowed_headers': [], } resp = self.obsClient.getBucketCors(self.bucket_name) if resp.status < 300: for rule in resp.body: cors_dict['allowed_origins'] = rule.allowedOrigin cors_dict['allowed_headers'] = rule.allowedHeader cors_dict['allowed_methods'] = rule.allowedMethod return cors_dict else: raise StorageRequestError( f"static_code: {resp.status}, errorCode: {resp.errorCode}. Message: {resp.errorMessage}.")
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/storage/obs/__init__.py
__init__.py
from datetime import datetime, timedelta try: import obs from obs import ObsClient as _ObsClient, const, util, client except: obs = None class ObsClient(_ObsClient): def createPostSignature(self, bucketName=None, objectKey=None, expires=300, formParams=None): return self._createPostSignature(bucketName, objectKey, expires, formParams, self.signature.lower() == 'v4') def _createPostSignature(self, bucketName=None, objectKey=None, expires=300, formParams=None, is_v4=False): date = datetime.utcnow() shortDate = date.strftime(const.SHORT_DATE_FORMAT) longDate = date.strftime(const.LONG_DATE_FORMAT) securityProvider = self._get_token() expires = 300 if expires is None else util.to_int(expires) expires = date + timedelta(seconds=expires) expires = expires.strftime(const.EXPIRATION_DATE_FORMAT) formParams = self._parse_post_params(formParams, securityProvider, is_v4, bucketName, objectKey, longDate, shortDate) policy = [f'{{"expiration":"{expires}",'] # 添加callback数据 if formParams.get('url'): policy.append('"callback":[') policy.append(f'{{"url":"{formParams.get("url", "")}"}},') policy.append(f'{{"body":"{formParams.get("body", "")}"}},') policy.append(f'{{"body-type":"{formParams.get("body-type", "")}"}},') policy.append('], ') policy.append('"conditions":[') matchAnyBucket = True matchAnyKey = True conditionAllowKeys = ['acl', 'bucket', 'key', 'success_action_redirect', 'redirect', 'success_action_status'] for key, value in formParams.items(): if key: key = util.to_string(key).lower() if key == 'bucket': matchAnyBucket = False elif key == 'key': matchAnyKey = False policy.append(f'["starts-with", "$key", "{value}"],') continue if key not in const.ALLOWED_REQUEST_HTTP_HEADER_METADATA_NAMES \ and not key.startswith(self.ha._get_header_prefix()) \ and not key.startswith(const.OBS_HEADER_PREFIX) and key not in conditionAllowKeys: continue policy.append('{"') policy.append(key) policy.append('":"') policy.append(util.to_string(value)) policy.append('"},') if matchAnyBucket: policy.append('["starts-with", "$bucket", ""],') if matchAnyKey: policy.append('["starts-with", "$key", ""],') policy.append(']}') originPolicy = ''.join(policy) policy = util.base64_encode(originPolicy) result = self._parse_post_signature_type(is_v4, securityProvider, originPolicy, policy, formParams, shortDate, longDate) return client._CreatePostSignatureResponse(**result)
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/extensions/storage/obs/obs_inherit.py
obs_inherit.py
import os import sys curr_path = os.path.abspath(os.path.dirname(os.curdir)) sys.path.append(curr_path) # _path = os.path.join(os.path.dirname(os.path.dirname(curr_path)), 'output') LOG_PATH = os.path.join(curr_path, 'logs') # from src.settings import log_conf # LOG_PATH = log_conf.get('log_path') from .filters import * LOGGING_CONFIG = { "version": 1, "disable_existing_loggers": False, # 不禁用完成配置之前创建的所有日志处理器 "formatters": { "simple": { # 简单的输出模式 'format': '%(asctime)s | %(levelname)s | PID:%(process)d | TID:%(threadName)s | [%(module)s: %(funcName)s] | - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'standard': { # 较为复杂的输出模式,可以进行自定义 'format': '%(asctime)s | %(levelname)s | PID:%(process)d | TID:%(threadName)s | [%(module)s: %(funcName)s] | - %(message)s' }, 'custom': { # 自定义json输出 'format': '{"level": "%(levelname)s", "now": "%(created)f", "PID": "%(process)d", "pathname": "%(pathname)s", "lineno": "%(lineno)d", "message": "%(message)s"}' }, }, # 过滤器 "filters": { 'debug_filter': { '()': DebugFilter }, 'info_filter': { '()': InfoFilter }, 'warning_filter': { '()': WarningFilter }, 'error_filter': { '()': ErrorFilter }, 'critical_filter': { '()': CriticalFilter }, 'notset_filter': { '()': NotSetFilter } }, "handlers": { # # 输出到控制台的handler # "console": { # # 定义输出流的类 # "class": "logging.StreamHandler", # # handler等级,如果实际执行等级高于此等级,则不触发handler # "level": "DEBUG", # # 输出的日志格式 # "formatter": "simple", # # 流调用系统输出 # "stream": "ext://sys.stdout" # }, # # 写入到文件的hanler,写入等级为info,命名为request是为了专门记录一些网络请求日志 # "file": { # # 定义写入文件的日志类,此类为按时间分割日志类,还有一些按日志大小分割日志的类等 # "class": "handlers.TimedRotatingFileHandlerMP", # # 日志等级 # "level": "DEBUG", # # 日志写入格式,因为要写入到文件后期可能会debug用,所以用了较为详细的standard日志格式 # "formatter": "standard", # # 要写入的文件名 # "filename": os.path.join(LOG_PATH, 'default', 'info.log'), # # 分割单位,S秒,D日,H小时,M分钟,W0-W6星期,星期一为 'W0' # # 比如文件名为test.log,到凌晨0点的时候会自动分离出test.log.yyyy-mm-dd # "when": 'D', # "interval": 1, # 'backupCount': 5, # 备份份数 # "encoding": "utf8", # "filters": ["info_filter"] # }, # "info_file": { # "class": "handlers.TimedRotatingFileHandlerMP", # "level": "INFO", # "formatter": "standard", # "filename": os.path.join(LOG_PATH, 'default', 'info.log'), # "when": 'D', # 'backupCount': 5, # 备份份数 # "encoding": "utf8", # "filters": ["info_filter"] # }, # "err_file": { # "class": "handlers.TimedRotatingFileHandlerMP", # "level": "WARN", # "formatter": "standard", # "filename": os.path.join(LOG_PATH, 'default', 'error.log'), # "when": 'D', # 'backupCount': 5, # 备份份数 # "encoding": "utf8", # "filters": ["info_filter"] # }, }, # "loggers": {}, "loggers": { # # logger名字 # "default_logger": { # # logger集成的handler # 'handlers': ['console', 'file'], # # logger等级,如果实际执行等级,高于此等级,则不触发此logger,logger中所有的handler均不会被触发 # 'level': "DEBUG", # # 是否继承root日志,如果继承,root的handler会加入到当前logger的handlers中 # 'propagate': False # }, # "debug_logger": { # 'handlers': ['console', 'debug_file'], # 'level': "DEBUG", # 'propagate': False # }, # "info_logger": { # 'handlers': ['console', 'info_file'], # 'level': "INFO", # 'propagate': False # }, # "warn_logger": { # 'handlers': ['console', 'err_file'], # 'level': "WARN", # 'propagate': False # }, # "error_logger": { # 'handlers': ['console', 'err_file'], # 'level': "ERROR", # 'propagate': False # }, # "critical_logger": { # 'handlers': ['console', 'err_file'], # 'level': "DEBUG", # 'propagate': False # }, }, # 基础logger,当不指定logger名称时,默认选择此logger # "root": { # 'handlers': ['file'], # 'level': "DEBUG", # 'propagate': True # }, # 增量添加配置,默认不增量。 # "incremental": False }
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/logger/config.py
config.py
import os import sys sys.path.append(os.path.dirname(os.pardir)) import re SYS_ENV = 'win' if re.search('[Ww]in', sys.platform) else 'unix' import logging from .config import LOG_PATH, LOGGING_CONFIG from .handlers import TimedRotatingFileHandlerMP as TRFMP # from logging.config import dictConfig # from logging.handlers import TimedRotatingFileHandler class InitLoggerConfig: __instance = {} # 单例模式存储对象 __is_init = False # 防止重复初始化 def __new__(cls, app_name='default', *args, **kwargs): """对于每个app_name单例模式""" if app_name not in cls.__instance: cls.__instance[app_name] = super().__new__(cls) return cls.__instance[app_name] def __init__(self, app_name='default', log_config=LOGGING_CONFIG, log_path=LOG_PATH, is_debug=True): """初始化logger,通过LOGGING配置logger""" if self.__is_init is True: return self.log_path = log_path log_file_dir = os.path.join(self.log_path, app_name) self.__is_init = True self.app_name = app_name self.is_debug = is_debug self.log_config = log_config # 日志名和日志等级的映射 self.log_levels = ['debug', 'info', 'warning', 'error', 'critical'] # 默认路径为当前项目根目录下的logs/${app_name} self.log_cur_path = os.path.join(self.log_path, self.app_name) if not is_debug: # 默认路径为当前项目根目录下的logs/${app_name},debug模式下全部输出到终端没必要创建文件夹 print("初始化%s的logger,日志写入:%s 文件夹下" % (app_name, log_file_dir)) self.mkdir_log_path() self.configure_logging() def mkdir_log_path(self): if not os.path.exists(self.log_cur_path): # 不存在就创建default目录 os.makedirs(self.log_cur_path) def configure_logging(self): # logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") if sys.version_info < (3, 7): # https://bugs.python.org/issue30520 import pickle import logging def __reduce__(self): if isinstance(self, logging.RootLogger): return logging.getLogger, () if logging.getLogger(self.name) is not self: raise pickle.PicklingError("logger cannot be pickled") return logging.getLogger, (self.name,) logging.Logger.__reduce__ = __reduce__ # 根据app_name动态更新LOGGING_CONFIG配置,为每个app_name创建文件夹,配置handler for level in self.log_levels: handler_name = '%s_%s' % (self.app_name, level) if level == 'debug': self.log_config['handlers'][ handler_name] = self.get_console_handler_conf() else: lev_up = level.upper() if self.is_debug: # is_debug开启输出到终端 console_handler_conf = self.get_console_handler_conf(lev_up) self.log_config['handlers'][ handler_name] = console_handler_conf else: filename = os.path.join(self.log_cur_path, (level + '.log')) self.log_config['handlers'][ handler_name] = self.get_file_handler_conf( filename=filename, level=lev_up) # 添加app logger及app_request logger logger_name = '%s_logger' % self.app_name self.log_config['loggers'][logger_name] = self.get_logger_conf() # 将LOGGING_CONFIG中的配置信息更新到logging中 if self.log_config is not None: from logging import config if isinstance(self.log_config, dict): config.dictConfig(self.log_config) else: config.fileConfig(self.log_config) # dictConfig(LOGGING_CONFIG) def get_console_handler_conf(self, level='DEBUG'): console_handler_conf = { # 定义输出流的类 "class": "logging.StreamHandler", # handler等级,如果实际执行等级高于此等级,则不触发handler "level": level, # 输出的日志格式 "formatter": "custom", # 流调用系统输出 "stream": "ext://sys.stdout", 'filters': ['%s_filter' % (level.lower())] } return console_handler_conf @staticmethod def get_file_handler_conf(filename: str, level='INFO'): file_handler_conf = { "class": f"{TRFMP.__module__}.{TRFMP.__name__}", "formatter": "custom", # 要写入的文件名 # 分割单位,D日,H小时,M分钟,W星期,一般是以小时或天为单位 # 比如文件名为test.log,到凌晨0点的时候会自动分离出test.log.yyyy-mm-dd "when": 'D', "interval": 1, 'backupCount': 5, # 备份份数 "encoding": "utf8", } if SYS_ENV == 'win': file_handler_conf[ 'class'] = 'logging.handlers.TimedRotatingFileHandler' filters = ['%s_filter' % (level.lower())] update_dict = {'filename': filename, 'level': level, 'filters': filters} file_handler_conf.update(update_dict) return file_handler_conf def get_email_handler_conf(self): """""" def get_queue_handler_conf(self): """""" def get_http_handler_conf(self): """""" def get_file_rotating_conf(self): """文件根据大小切换备份""" def get_logger_conf(self): """ logger 配置 :return: """ logger_conf = {'handlers': [], 'level': "DEBUG", 'propagate': False} # 如果只是debug级别logger,只配置打印handler,不会记录到文件中 logger_conf['handlers'] = [ '%s_%s' % (self.app_name, level) for level in self.log_levels] return logger_conf # 获取日常logger def get_logger(app_name: str, is_debug=True): InitLoggerConfig(app_name, is_debug=is_debug) logger_name = '%s_logger' % app_name logger = logging.getLogger(logger_name) return logger if __name__ == '__main__': # 单例模式测试 logger = get_logger('cml_test', is_debug=True) logger.error('error log') logger.debug('debug log') logger.debug('debug log')
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/logger/__init__.py
__init__.py
from logging import StreamHandler, FileHandler from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler, SMTPHandler import fcntl, time, os, codecs, string, re, types, pickle, struct, shutil from stat import ST_DEV, ST_INO, ST_MTIME class StreamHandlerMP(StreamHandler): """ A handler class which writes logging records, appropriately formatted, to a stream. Use for multiprocess. """ def emit(self, record): """ Emit a record. First seek the end of file for multiprocess to log to the same file 寻找文件结尾以供多进程登录到同一文件 """ try: if hasattr(self.stream, "seek"): self.stream.seek(0, os.SEEK_END) except IOError as e: pass StreamHandler.emit(self, record) class FileHandlerMP(FileHandler, StreamHandlerMP): """ A handler class which writes formatted logging records to disk files for multiprocess """ def emit(self, record): """ Emit a record. If the stream was not opened because 'delay' was specified in the constructor, open it before calling the superclass's emit. 如果由于在构造函数中指定了“delay”而未打开流,请在调用超类的emit之前将其打开。 """ if self.stream is None: self.stream = self._open() StreamHandlerMP.emit(self, record) class RotatingFileHandlerMP(RotatingFileHandler, FileHandlerMP): """ Handler for logging to a set of files, which switches from one file to the next when the current file reaches a certain size. Based on logging.RotatingFileHandler, modified for Multiprocess """ _lock_dir = '.lock' if os.path.exists(_lock_dir): pass else: os.mkdir(_lock_dir) def doRollover(self): """ Do a rollover, as described in __init__(). For multiprocess, we use shutil.copy instead of rename. """ self.stream.close() if self.backupCount > 0: for i in range(self.backupCount - 1, 0, -1): sfn = "%s.%d" % (self.baseFilename, i) dfn = "%s.%d" % (self.baseFilename, i + 1) if os.path.exists(sfn): if os.path.exists(dfn): os.remove(dfn) shutil.copy(sfn, dfn) dfn = self.baseFilename + ".1" if os.path.exists(dfn): os.remove(dfn) if os.path.exists(self.baseFilename): shutil.copy(self.baseFilename, dfn) self.mode = 'w' self.stream = self._open() def emit(self, record): """ Emit a record. Output the record to the file, catering for rollover as described in doRollover(). For multiprocess, we use file lock. Any better method ? """ try: if self.shouldRollover(record): self.doRollover() FileLock = self._lock_dir + '/' + os.path.basename(self.baseFilename) + '.' + record.levelname f = open(FileLock, "w+") fcntl.flock(f.fileno(), fcntl.LOCK_EX) FileHandlerMP.emit(self, record) fcntl.flock(f.fileno(), fcntl.LOCK_UN) f.close() except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) class TimedRotatingFileHandlerMP(TimedRotatingFileHandler, FileHandlerMP): """ Handler for logging to a file, rotating the log file at certain timed intervals. If backupCount is > 0, when rollover is done, no more than backupCount files are kept - the oldest ones are deleted. """ _lock_dir = '.lock' if os.path.exists(_lock_dir): pass else: os.mkdir(_lock_dir) def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=0, utc=0): FileHandlerMP.__init__(self, filename, 'a', encoding, delay) self.encoding = encoding self.when = when.upper() self.backupCount = backupCount self.utc = utc # Calculate the real rollover interval, which is just the number of # seconds between rollovers. Also set the filename suffix used when # a rollover occurs. Current 'when' events supported: # S - Seconds # M - Minutes # H - Hours # D - Days # midnight - roll over at midnight # W{0-6} - roll over on a certain day; 0 - Monday # # Case of the 'when' specifier is not important; lower or upper case # will work. if self.when == 'S': self.suffix = "%Y-%m-%d_%H-%M-%S" self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}$" elif self.when == 'M': self.suffix = "%Y-%m-%d_%H-%M" self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}$" elif self.when == 'H': self.suffix = "%Y-%m-%d_%H" self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}$" elif self.when == 'D' or self.when == 'MIDNIGHT': self.suffix = "%Y-%m-%d" self.extMatch = r"^\d{4}-\d{2}-\d{2}$" elif self.when.startswith('W'): if len(self.when) != 2: raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when) if self.when[1] < '0' or self.when[1] > '6': raise ValueError("Invalid day specified for weekly rollover: %s" % self.when) self.dayOfWeek = int(self.when[1]) self.suffix = "%Y-%m-%d" self.extMatch = r"^\d{4}-\d{2}-\d{2}$" else: raise ValueError("Invalid rollover interval specified: %s" % self.when) self.extMatch = re.compile(self.extMatch) if interval != 1: raise ValueError("Invalid rollover interval, must be 1") def shouldRollover(self, record): """ Determine if rollover should occur. record is not used, as we are just comparing times, but it is needed so the method signatures are the same """ if not os.path.exists(self.baseFilename): # print "file don't exist" return 0 cTime = time.localtime(time.time()) mTime = time.localtime(os.stat(self.baseFilename)[ST_MTIME]) if self.when == "S" and cTime[5] != mTime[5]: # print "cTime:", cTime[5], "mTime:", mTime[5] return 1 elif self.when == 'M' and cTime[4] != mTime[4]: # print "cTime:", cTime[4], "mTime:", mTime[4] return 1 elif self.when == 'H' and cTime[3] != mTime[3]: # print "cTime:", cTime[3], "mTime:", mTime[3] return 1 elif (self.when == 'MIDNIGHT' or self.when == 'D') and cTime[2] != mTime[2]: # print "cTime:", cTime[2], "mTime:", mTime[2] return 1 elif self.when == 'W' and cTime[1] != mTime[1]: # print "cTime:", cTime[1], "mTime:", mTime[1] return 1 else: return 0 def doRollover(self): """ do a rollover; in this case, a date/time stamp is appended to the filename when the rollover happens. However, you want the file to be named for the start of the interval, not the current time. If there is a backup count, then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix. For multiprocess, we use shutil.copy instead of rename. """ if self.stream: self.stream.close() # get the time that this sequence started at and make it a TimeTuple # t = self.rolloverAt - self.interval t = int(time.time()) if self.utc: timeTuple = time.gmtime(t) else: timeTuple = time.localtime(t) dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple) if os.path.exists(dfn): os.remove(dfn) if os.path.exists(self.baseFilename): shutil.copy(self.baseFilename, dfn) # print "%s -> %s" % (self.baseFilename, dfn) # os.rename(self.baseFilename, dfn) if self.backupCount > 0: # find the oldest log file and delete it # s = glob.glob(self.baseFilename + ".20*") # if len(s) > self.backupCount: # s.sort() # os.remove(s[0]) for s in self.getFilesToDelete(): os.remove(s) self.mode = 'w' self.stream = self._open() def emit(self, record): """ Emit a record. Output the record to the file, catering for rollover as described in doRollover(). For multiprocess, we use file lock. Any better method ? """ try: if self.shouldRollover(record): self.doRollover() FileLock = self._lock_dir + '/' + os.path.basename(self.baseFilename) + '.' + record.levelname f = open(FileLock, "w+") fcntl.flock(f.fileno(), fcntl.LOCK_EX) FileHandlerMP.emit(self, record) fcntl.flock(f.fileno(), fcntl.LOCK_UN) f.close() except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/logger/handlers.py
handlers.py
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union from pydantic import BaseModel, AnyUrl from pymongo.client_session import ClientSession try: from pymongo import InsertOne, DeleteOne, ReplaceOne, UpdateMany from pymongo.collection import Collection from pymongo import MongoClient except: pass ModelType = TypeVar("ModelType", bound=str) CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) DictorList = TypeVar("DictorList", dict, list) # DictorList = Union[Dict, List] class MongoCRUDBase(Generic[CreateSchemaType, UpdateSchemaType]): def __init__( self, collection_name: ModelType, db_name: str = "test_db", db_url: AnyUrl = "mongodb://localhost:27017/", client: ClientSession = None ): if client: self.client = client else: self.client = MongoClient(db_url) self.db = self.client[db_name] self.collection = self.db[collection_name] # self.coll_name = collection_name def count(self, opt: dict = None, session: ClientSession = None): """ 统计数目 :param opt: :param session: 事务操作 :return: """ if opt: return self.collection.count_documents(opt, session=session) return self.collection.estimated_document_count(session=session) def get(self, opt: dict = None, is_logical_del: bool = False, select_col: DictorList = None, session: ClientSession = None): """ 查询操作 :param opt: :param is_logical_del: 是否逻辑删除 :param select_col: 应在结果集中返回的字段名列表,或指定要包含或排除的字段的dict :param session: 事务操作 :return: """ if is_logical_del: opt.update({"isDelete": False}) return self.collection.find_one(opt, projection=select_col, session=session) def list(self, opt: dict = None, select_col: DictorList = None, limit: int = 0, offset: int = 0, sort: List[tuple] = None, is_logical_del: bool = False, session: ClientSession = None ): """ `projection`(可选):应在结果集中返回的字段名列表,或指定要包含或排除的字段的dict。 如果“projection”是一个列表,则始终返回“_id”。使用dict从结果中排除字段 (例如projection={'_id':False})。 :param opt: :param select_col: {'_id': 0, 'author': 1, 'age': 1} :param limit: 0 :param offset: 0 :param sort: [ ('field1', pymongo.ASCENDING), ('field2', pymongo.DESCENDING) ] :param is_logical_del: :param session: 事务操作 :return: """ if opt is None: opt = dict() if is_logical_del: opt.update({"isDelete": False}) data = dict( filter=opt, projection=select_col, skip=offset, limit=limit, sort=sort ) results = list(self.collection.find(**data, session=session)) return results def create(self, data: DictorList, is_return_obj: bool = False, session: ClientSession = None): """ 插入操作 :param data: :param is_return_obj: :param session: 事务操作 :return: """ if isinstance(data, dict): result = self.collection.insert_one(data, session=session) if is_return_obj: result = self.collection.find_one({'_id': result.inserted_id}, session=session) elif isinstance(data, list): result = self.collection.insert_many(data, session=session) if is_return_obj: result = self.list({'_id': {'$in': result.inserted_ids}}, session=session) else: raise Exception('Create failed!') return result def update(self, opt, data: Dict, is_many: bool = False, is_set: bool = True, session: ClientSession = None): """ 更新操作 :param opt: 查询条件 opt={'field1': 'xxx'} opt={'field1': 'xxx', 'field2': 123} opt={'field1': {'$gt': 'a'}, 'field2': {'$regex': '^d'}} :param data: 需要更新的数据: {'field': 'xxx'} :param is_many: 是否批量更新,默认为False :param is_set: 是否设置$set,默认为True :param session: 事务操作 :return: """ if is_set: update = {"$set": data} else: update = data if not is_many: result = self.collection.update_one(opt, update, session=session) # result = self.collection.find_one_and_update(opt, update) else: result = self.collection.update_many(opt, update, session=session) if result.acknowledged: return result def delete(self, opt, is_logical_del: bool = False, is_many: bool = False, session: ClientSession = None): """ 删除操作: 默认执行逻辑删除,当physical为True时,执行物理删除 :param opt: 搜索条件 :param is_logical_del: 是否逻辑删除 :param is_many: 是否删除多个 :param session: 事务操作 :return: """ if is_logical_del: update = {"$set": {"isDelete": True}} if not is_many: result = self.collection.update_one(filter=opt, update=update, session=session) else: result = self.collection.update_many(filter=opt, update=update, session=session) return result.modified_count else: if not is_many: result = self.collection.delete_one(filter=opt, session=session) else: result = self.collection.delete_many(filter=opt, session=session) return result.deleted_count def batch_update(self, bulk_update_datas: List[dict], session: ClientSession = None): """ 批量更新 :param bulk_update_datas: 格式:[{"opt": {}, "data": {}}] :param session: 事务操作 :return: """ if not bulk_update_datas: return 0 requests = [] for bulk_update_data in bulk_update_datas: requests.append(UpdateMany(bulk_update_data['opt'], bulk_update_data['data'])) result = self.collection.bulk_write(requests=requests, session=session) return result.modified_count def aggregate(self, pipeline: List[dict], session: ClientSession = None, **kwargs): """ 聚合管道 :param pipeline: :param session: 事务操作 :return: """ cursor = self.collection.aggregate(pipeline, session=session, **kwargs) return list(cursor) if __name__ == '__main__': db = MongoCRUDBase('hello_cml') print(db.count())
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/db/pymongo_crud_base.py
pymongo_crud_base.py
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union try: from pydantic import BaseModel import sqlalchemy from sqlalchemy import text from sqlalchemy.orm import Session from sqlalchemy.ext.declarative import as_declarative, declared_attr except ImportError: pass from yzcore.core.encoders import jsonable_encoder @as_declarative() class Base: # Generate __tablename__ automatically @declared_attr def __tablename__(cls) -> str: return cls.__name__.lower() id: Any __name__: str ModelType = TypeVar("ModelType", bound=Base) CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) class OrmCRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]): def __init__(self, model: Type[ModelType]): """ CRUD object with default methods to Create, Read, Update, Delete (CRUD). **Parameters** * `model`: A SQLAlchemy model class * `schema`: A Pydantic model (schema) class """ assert sqlalchemy is not None, "'sqlalchemy' must be installed to use OrmCRUDBase" self.model = model def count(self, db: Session, **kwargs): """ 根据条件获取总数量 :param db: :param kwargs: :return: """ if kwargs: return db.query(self.model).filter_by(**kwargs).count() return db.query(self.model).count() def get(self, db: Session, model_id: Any) -> Optional[ModelType]: """ 根据id获取数据 :param db: :param model_id: :return: """ return db.query(self.model).get(model_id) def get_one(self, db: Session, **kwargs): """ 根据查询条件获取一个数据 :param db: :param kwargs: :return: """ return db.query(self.model).filter_by(**kwargs).one_or_none() def list( self, db: Session, *, sort: List[str] = None, offset: int = 0, limit: int = 100, **kwargs ) -> List[ModelType]: """ 根据查询条件获取数据列表 :param db: :param sort: 需要排序的字段 ['-create_time', 'update_time'] (负号为降序) :param offset: :param limit: :param kwargs: :return: """ if sort: sort = text(','.join(sort)) if kwargs: return db.query(self.model).filter_by( **kwargs).order_by(sort).offset(offset).limit(limit).all() else: return db.query(self.model).order_by(sort).offset( offset).limit(limit).all() def create( self, db: Session, *, data: Union[Dict[str, Any], CreateSchemaType], is_transaction: bool = False ) -> ModelType: """ 插入操作,返回创建的模型 :param db: :param data: 创建所需的数据 :param is_transaction: 是否开启事务功能 :return: """ if isinstance(data, BaseModel): data = jsonable_encoder(data) db_obj = self.model(**data) # type: ignore db.add(db_obj) if not is_transaction: db.commit() db.refresh(db_obj) return db_obj def update( self, db: Session, *, model_id: int = None, obj: ModelType = None, query: Dict[str, Any] = None, data: Union[UpdateSchemaType, Dict[str, Any]], is_return_obj: bool = False, is_transaction: bool = False ) -> ModelType: """ 单个对象更新 更新有两种方式: 方式一: 传入需要更新的模型和更新的数据。 方式二: 传入id和更新的数据 注意: 如果传入模型来进行更新,则'is_return_obj=False'失效,返回更新后的模型 :param db: :param model_id: 模型ID :param obj: 模型对象 :param query: 模型查询参数 :param data: 需要更新的数据 :param is_return_obj: 是否需要返回模型数据,默认为False,只返回更新成功的行数 :param is_transaction: 是否开启事务功能 :return: update_count or obj or None """ if not any((model_id, obj, query)): raise ValueError('At least one of [model_id、query、obj] exists') if isinstance(data, dict): update_data = data else: update_data = data.dict(exclude_unset=True) if not is_return_obj and not obj: if model_id: update_count = db.query(self.model).filter( self.model.id == model_id).update(update_data) else: update_count = db.query(self.model).filter_by( **query).update(update_data) if not is_transaction: db.commit() return update_count else: if not obj: if model_id: obj = self.get(db, model_id) else: obj = self.get_one(db, **query) if obj: obj_data = jsonable_encoder(obj) for field in obj_data: if field in update_data: setattr(obj, field, update_data[field]) db.add(obj) if not is_transaction: db.commit() db.refresh(obj) return obj def delete( self, db: Session, *, model_id: int, is_return_obj: bool = False, is_transaction: bool = False ) -> ModelType: """ :param db: :param model_id: 模型ID :param is_return_obj: 是否需要返回模型数据,默认为False,只返回删除成功的行数 :param is_transaction: 是否开启事务功能 :return: """ if is_return_obj: obj = db.query(self.model).get(model_id) db.delete(obj) if not is_transaction: db.commit() return obj else: del_count = db.query(self.model).filter( self.model.id == model_id).delete(synchronize_session=False) if not is_transaction: db.commit() return del_count def bulk_delete(self, db: Session, ids: List[int] = None, **kwargs): """ :param db: :param kwargs: :return: """ if ids: del_count = db.query(self.model).filter( self.model.id.in_(ids)).delete(synchronize_session=False) else: del_count = db.query(self.model).filter_by( **kwargs).delete(synchronize_session=False) return del_count
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/db/sqlalchemy_crud_base.py
sqlalchemy_crud_base.py
from collections import defaultdict from enum import Enum from pathlib import PurePath from types import GeneratorType from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union from pydantic import BaseModel from pydantic.json import ENCODERS_BY_TYPE SetIntStr = Set[Union[int, str]] DictIntStrAny = Dict[Union[int, str], Any] def generate_encoders_by_class_tuples( type_encoder_map: Dict[Any, Callable] ) -> Dict[Callable, Tuple]: encoders_by_class_tuples: Dict[Callable, Tuple] = defaultdict(tuple) for type_, encoder in type_encoder_map.items(): encoders_by_class_tuples[encoder] += (type_,) return encoders_by_class_tuples encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) def jsonable_encoder( obj: Any, include: Optional[Union[SetIntStr, DictIntStrAny]] = None, exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, by_alias: bool = True, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, custom_encoder: dict = {}, sqlalchemy_safe: bool = True, ) -> Any: if include is not None and not isinstance(include, set): include = set(include) if exclude is not None and not isinstance(exclude, set): exclude = set(exclude) if isinstance(obj, BaseModel): encoder = getattr(obj.__config__, "json_encoders", {}) if custom_encoder: encoder.update(custom_encoder) obj_dict = obj.dict( include=include, exclude=exclude, by_alias=by_alias, exclude_unset=exclude_unset, exclude_none=exclude_none, exclude_defaults=exclude_defaults, ) if "__root__" in obj_dict: obj_dict = obj_dict["__root__"] return jsonable_encoder( obj_dict, exclude_none=exclude_none, exclude_defaults=exclude_defaults, custom_encoder=encoder, sqlalchemy_safe=sqlalchemy_safe, ) if isinstance(obj, Enum): return obj.value if isinstance(obj, PurePath): return str(obj) if isinstance(obj, (str, int, float, type(None))): return obj if isinstance(obj, dict): encoded_dict = {} for key, value in obj.items(): if ( ( not sqlalchemy_safe or (not isinstance(key, str)) or (not key.startswith("_sa")) ) and (value is not None or not exclude_none) and ((include and key in include) or not exclude or key not in exclude) ): encoded_key = jsonable_encoder( key, by_alias=by_alias, exclude_unset=exclude_unset, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, ) encoded_value = jsonable_encoder( value, by_alias=by_alias, exclude_unset=exclude_unset, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, ) encoded_dict[encoded_key] = encoded_value return encoded_dict if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): encoded_list = [] for item in obj: encoded_list.append( jsonable_encoder( item, include=include, exclude=exclude, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, ) ) return encoded_list if custom_encoder: if type(obj) in custom_encoder: return custom_encoder[type(obj)](obj) else: for encoder_type, encoder in custom_encoder.items(): if isinstance(obj, encoder_type): return encoder(obj) if type(obj) in ENCODERS_BY_TYPE: return ENCODERS_BY_TYPE[type(obj)](obj) for encoder, classes_tuple in encoders_by_class_tuples.items(): if isinstance(obj, classes_tuple): return encoder(obj) errors: List[Exception] = [] try: data = dict(obj) except Exception as e: errors.append(e) try: data = vars(obj) except Exception as e: errors.append(e) raise ValueError(errors) return jsonable_encoder( data, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, custom_encoder=custom_encoder, sqlalchemy_safe=sqlalchemy_safe, )
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/core/encoders.py
encoders.py
from typing import List, Tuple class ValuesSortDict(dict): """ 多值排序字典 每次value更新都会重新排序 """ def __init__(self, tuple_len_limit=4, tuple_default_value=('', 0, 0.0, 0.0)): """ 每个元组值的等长,默认为4 :param tuple_len_limit: """ self._len_limit = tuple_len_limit self._default_tuple = tuple_default_value super().__init__() def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, super().__repr__()) def __getitem__(self, key): try: _values = super().__getitem__(key) except KeyError: _values = [] return _values def __setitem__(self, key, value: List[Tuple]): if isinstance(value, list): super().__setitem__(key, value) elif isinstance(value, tuple) and len(value) == self._len_limit: super().__setitem__(key, [value]) else: raise ValueError('This is not the value I want.') # def get_one(self, key, index=0, default=None): # _values = self.__getitem__(key) # # if val == []: # return default # return val def add(self, key, value: tuple): """ :param key: :param value: :return: """ if isinstance(value, str): v_list = [value] v_list.extend([v for i, v in enumerate(self._default_tuple) if i != 0]) value = tuple(v_list) if isinstance(value, tuple): if len(value) != self._len_limit: raise ValueError(f'Error: len(tuple_value)!={self._len_limit}') _values = self.__getitem__(key) result = self._sort_values(value, _values) self.__setitem__(key, result) else: raise ValueError('This is not the value I want.') def _sort_values(self, value: Tuple, _values: List[Tuple]) -> List[Tuple]: """ :param value: :param _values: :return: """ # one # _values.append(value) # return sorted(_values, key=lambda x: x[1], reverse=False) # two 推荐 for i, v in enumerate(_values): if value[1] < v[1]: _values.insert(i, value) return _values _values.append(value) return _values def increase(self, key, v, inc=1): """ 自增元组值的序位 :param key: 键 :param v: 元组值的第一位 :param inc: :return: """ _values = self.__getitem__(key) if isinstance(v, str): # 推荐 for i, _v in enumerate(_values): if _v[0] == v: _v = _values.pop(i) self.__setitem__( key, self._sort_values( ( _v[0], _v[1]+inc, *[__v for i, __v in enumerate(_v) if i not in [0, 1]] ), _values ) ) return # 如果遍历结束还未返回,报错处理 raise ValueError(f'The value[0]:[{v}] is not exist') elif isinstance(v, tuple) and len(v) == self._len_limit: try: _values.remove(v) except ValueError: pass # v[1] += inc self.__setitem__(key, self._sort_values( (v[0], v[1]+inc, *[_v for i, _v in enumerate(v) if i not in [0, 1]]), _values)) else: raise ValueError(f'The value:[{v}] is error') if __name__ == '__main__': import random # _values = [(f"ip{i}", random.randint(1, 10000)) for i in range(100000)] _values = [(f"ip{i}", i) for i in range(100000)] ipnum = random.randint(1, 100000) value = (f"ip{ipnum}", ipnum) print(value) import time d = ValuesSortDict() # d["model"] = _values d.add('model', value) start = time.time() d.increase("model", value) end1 = time.time() # print(d) d.increase("model", value[0]) end2 = time.time() print("===>add:", end1-start) print("===>add0:", end2-end1) # print(d) # import pickle # # pickle.dump(q, file=open('history.pkl', 'wb')) # 保存,注意使用二进制 # q = pickle.load(file=open('history.pkl', 'rb')) # 读取,注意使用二进制
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/core/datastructures.py
datastructures.py
from yzcore.extensions.storage import StorageManage, StorageRequestError from yzcore.default_settings import default_setting as settings from abc import ABCMeta, abstractmethod from yzcore.utils.decorator import cached_property, classproperty __all__ = [ 'StorageRequestError', 'StorageController', 'StorageManage', ] class StorageController(metaclass=ABCMeta): """ 对象存储控制器 组织自定义对象存储 >>> storage_ctrl = await StorageController.init(organiz_id='organiz_id') >>> storage_ctrl.organiz_storage_conf # 组织自定义对象存储配置,未配置则为空字典 >>> storage_ctrl.storage_conf # 组织自定义对象存储配置或全局配置 >>> storage_ctrl.public_storage_manage # 非加密存储控制器 >>> storage_ctrl.private_storage_manage # 加密存储控制器 全局对象存储 >>> global_storage_ctrl = StorageController.sync_init() >>> global_storage_ctrl.public_storage_manage # 全局非加密存储控制器 >>> global_storage_ctrl.private_storage_manage # 全局加密存储控制器 """ @classproperty def global_storage_conf(cls): """全局对象存储配置""" return settings.STORAGE_CONF def __init__(self, organiz_id: str = ''): """ 不可直接使用,请使用 StorageController.init() / StorageController.sync_init() 初始化 """ self.organiz_id = organiz_id self.organiz_storage_conf = dict() # 组织自定义对象存储 self.storage_conf = dict() # 对象存储实际使用的配置 self.storage_mode = '' @classmethod async def init(cls, organiz_id: str = ''): """ organiz_id为空的情况下返回全局的对象存储 因在 __init__ 方法中不能调用异步方法,需要在这里初始化""" storage_ctrl = cls(organiz_id) # 获取组织自定义对象存储配置 if organiz_id: organiz_storage_conf = await storage_ctrl._get_organiz_storage_conf() if organiz_storage_conf: # 覆盖回全局对象存储配置,避免丢失全局配置 _storage_conf = cls.global_storage_conf.copy() _storage_conf.update(organiz_storage_conf) storage_ctrl.organiz_storage_conf = _storage_conf storage_ctrl.storage_conf = storage_ctrl.organiz_storage_conf or cls.global_storage_conf storage_ctrl.storage_mode = storage_ctrl.storage_conf['mode'] return storage_ctrl @classmethod def sync_init(cls): """同步方式初始化,只适用于全局存储的初始化""" storage_ctrl = cls('') storage_ctrl.storage_conf = cls.global_storage_conf storage_ctrl.storage_mode = storage_ctrl.storage_conf['mode'] return storage_ctrl @abstractmethod async def _get_organiz_storage_conf(self): """ 获取组织的自定义对象存储配置 :return: dict(**organiz_storage_conf) or None """ @cached_property def public_storage_manage(self): """非加密存储桶控制器""" return self._init_public_storage_manage(self.storage_conf) @cached_property def private_storage_manage(self): """加密存储桶控制器""" return self._init_private_storage_manage(self.storage_conf) @classmethod async def check_organiz_conf(cls, organiz_conf: dict): """检查自定义对象存储配置是否有效""" public_storage = cls._init_public_storage_manage(organiz_conf) private_storage = cls._init_private_storage_manage(organiz_conf) public_storage.check() private_storage.check() @classmethod def _init_public_storage_manage(cls, storage_conf: dict): """ 初始化非加密存储桶 :param storage_conf: 字段: mode: str access_key_id: str access_key_secret: str endpoint: str public_bucket_name: str # 非加密桶使用 private_bucket_name: Optional[str] # 加密桶使用 image_domain: Optional[str] # 非加密桶使用,可选 asset_domain: Optional[str] # 非加密桶使用,可选 private_domain: Optional[str] # 加密桶使用,可选 :return: _StorageManage实例,即 ObsManager 或 OssManager """ storage_conf.update({ 'bucket_name': storage_conf['public_bucket_name'], # 注意区分加密/非加密存储桶 'cache_path': cls.global_storage_conf.get('cache_path'), # 来自全局配置 'policy_expire_time': cls.global_storage_conf.get('policy_expire_time'), # 来自全局配置 'private_expire_time': cls.global_storage_conf.get('private_expire_time'), # 来自全局配置 }) return StorageManage(storage_conf) @classmethod def _init_private_storage_manage(cls, storage_conf: dict): """ 初始化加密存储桶 :param storage_conf: 字段: mode: str access_key_id: str access_key_secret: str endpoint: str public_bucket_name: Optional[str] # 非加密桶使用 private_bucket_name: str # 加密桶使用 image_domain: Optional[str] # 非加密桶使用,可选 asset_domain: Optional[str] # 非加密桶使用,可选 private_domain: Optional[str] # 加密桶使用,可选 :return: _StorageManage实例,即 ObsManager 或 OssManager """ storage_conf.update({ 'bucket_name': storage_conf['private_bucket_name'], # 注意区分加密/非加密存储桶 'image_domain': storage_conf.get('private_domain'), 'asset_domain': storage_conf.get('private_domain'), 'cache_path': cls.global_storage_conf.get('cache_path'), # 来自全局配置 'policy_expire_time': cls.global_storage_conf.get('policy_expire_time'), # 来自全局配置 'private_expire_time': cls.global_storage_conf.get('private_expire_time'), # 来自全局配置 }) return StorageManage(storage_conf)
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/core/storage.py
storage.py
import os import shutil import stat from os import path import yzcore from . import CommandError from yzrpc.commands import CommandBase class TemplateCommand(CommandBase): requires_system_checks = False # The supported URL schemes url_schemes = ['http', 'https', 'ftp'] # Rewrite the following suffixes when determining the target filename. rewrite_template_suffixes = ( # Allow shipping invalid .py files without byte-compilation. ('.py-tpl', '.py'), ) def add_arguments(self, parser): parser.add_argument('name', help='Name of the application or project.') parser.add_argument('directory', nargs='?', help='Optional destination directory') # parser.add_argument('--template', help='The path or URL to load the template from.') def handle(self, app_or_project, name, target=None, **options): """ :param app_or_project: "app" or "project" :param name: 工程或者应用名称 :param target: :param options: :return: """ self.app_or_project = app_or_project # self.paths_to_remove = [] # self.verbosity = options['verbosity'] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) if self.app_or_project == "app": top_dir = path.join(top_dir, name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project template_dir = path.join(yzcore.__path__[0], 'templates', base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) # relative_dir = path_rest.replace(base_subdir, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if path.exists(new_path): raise CommandError("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) shutil.copyfile(old_path, new_path) # if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) def validate_name(self, name, app_or_project): a_or_an = 'an' if app_or_project == 'app' else 'a' if name is None: raise CommandError('you must provide {an} {app} name'.format( an=a_or_an, app=app_or_project, )) def make_writeable(self, filename): """ Make sure that the file is writeable. Useful if our source is read-only. """ if not os.access(filename, os.W_OK): st = os.stat(filename) new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR os.chmod(filename, new_permissions)
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/core/management/templates.py
templates.py
import codecs import datetime import locale from decimal import Decimal from urllib.parse import quote class Promise: """ Base class for the proxy class created in the closure of the lazy function. It's used to recognize promises in code. """ pass class UnicodeDecodeErr(UnicodeDecodeError): def __init__(self, obj, *args): self.obj = obj super().__init__(*args) def __str__(self): return '%s. You passed in %r (%s)' % (super().__str__(), self.obj, type(self.obj)) def smart_text(s, encoding='utf-8', strings_only=False, errors='strict'): """ Return a string representing 's'. Treat bytestrings using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_text(s, encoding, strings_only, errors) _PROTECTED_TYPES = ( type(None), int, float, Decimal, datetime.datetime, datetime.date, datetime.time, ) def is_protected_type(obj): """Determine if the object instance is of a protected type. Objects of protected types are preserved as-is when passed to force_text(strings_only=True). """ return isinstance(obj, _PROTECTED_TYPES) def force_text(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first for performance reasons. if issubclass(type(s), str): return s if strings_only and is_protected_type(s): return s try: if isinstance(s, bytes): s = str(s, encoding, errors) else: s = str(s) except UnicodeDecodeError as e: raise UnicodeDecodeErr(s, *e.args) return s def smart_bytes(s, encoding='utf-8', strings_only=False, errors='strict'): """ Return a bytestring version of 's', encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_bytes(s, encoding, strings_only, errors) def force_bytes(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_bytes, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first for performance reasons. if isinstance(s, bytes): if encoding == 'utf-8': return s else: return s.decode('utf-8', errors).encode(encoding, errors) if strings_only and is_protected_type(s): return s if isinstance(s, memoryview): return bytes(s) return str(s).encode(encoding, errors) smart_str = smart_text force_str = force_text smart_str.__doc__ = """ Apply smart_text in Python 3 and smart_bytes in Python 2. This is suitable for writing to sys.stdout (for instance). """ force_str.__doc__ = """ Apply force_text in Python 3 and force_bytes in Python 2. """ def iri_to_uri(iri): """ Convert an Internationalized Resource Identifier (IRI) portion to a URI portion that is suitable for inclusion in a URL. This is the algorithm from section 3.1 of RFC 3987, slightly simplified since the input is assumed to be a string rather than an arbitrary byte stream. Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/'). """ # The list of safe characters here is constructed from the "reserved" and # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: # reserved = gen-delims / sub-delims # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" # / "*" / "+" / "," / ";" / "=" # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" # Of the unreserved characters, urllib.parse.quote() already considers all # but the ~ safe. # The % character is also added to the list of safe characters here, as the # end of section 3.1 of RFC 3987 specifically mentions that % must not be # converted. if iri is None: return iri elif isinstance(iri, Promise): iri = str(iri) return quote(iri, safe="/#%[]=:;$&()+,!?*@'~") # List of byte values that uri_to_iri() decodes from percent encoding. # First, the unreserved characters from RFC 3986: _ascii_ranges = [[45, 46, 95, 126], range(65, 91), range(97, 123)] _hextobyte = { (fmt % char).encode(): bytes((char,)) for ascii_range in _ascii_ranges for char in ascii_range for fmt in ['%02x', '%02X'] } # And then everything above 128, because bytes ≥ 128 are part of multibyte # unicode characters. _hexdig = '0123456789ABCDEFabcdef' _hextobyte.update({ (a + b).encode(): bytes.fromhex(a + b) for a in _hexdig[8:] for b in _hexdig }) def uri_to_iri(uri): """ Convert a Uniform Resource Identifier(URI) into an Internationalized Resource Identifier(IRI). This is the algorithm from section 3.2 of RFC 3987, excluding step 4. Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return a string containing the encoded result (e.g. '/I%20♥%20Django/'). """ if uri is None: return uri uri = force_bytes(uri) # Fast selective unqote: First, split on '%' and then starting with the # second block, decode the first 2 bytes if they represent a hex code to # decode. The rest of the block is the part after '%AB', not containing # any '%'. Add that to the output without further processing. bits = uri.split(b'%') if len(bits) == 1: iri = uri else: parts = [bits[0]] append = parts.append hextobyte = _hextobyte for item in bits[1:]: hex = item[:2] if hex in hextobyte: append(hextobyte[item[:2]]) append(item[2:]) else: append(b'%') append(item) iri = b''.join(parts) return repercent_broken_unicode(iri).decode() def escape_uri_path(path): """ Escape the unsafe characters from the path portion of a Uniform Resource Identifier (URI). """ # These are the "reserved" and "unreserved" characters specified in # sections 2.2 and 2.3 of RFC 2396: # reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," # unreserved = alphanum | mark # mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")" # The list of safe characters here is constructed subtracting ";", "=", # and "?" according to section 3.3 of RFC 2396. # The reason for not subtracting and escaping "/" is that we are escaping # the entire path, not a path segment. return quote(path, safe="/:@&+$,-_.!~*'()") def repercent_broken_unicode(path): """ As per section 3.2 of RFC 3987, step three of converting a URI into an IRI, repercent-encode any octet produced that is not part of a strictly legal UTF-8 octet sequence. """ while True: try: path.decode() except UnicodeDecodeError as e: # CVE-2019-14235: A recursion shouldn't be used since the exception # handling uses massive amounts of memory repercent = quote(path[e.start:e.end], safe=b"/#%[]=:;$&()+,!?*@'~") path = path[:e.start] + force_bytes(repercent) + path[e.end:] else: return path def filepath_to_uri(path): """Convert a file system path to a URI portion that is suitable for inclusion in a URL. Encode certain chars that would normally be recognized as special chars for URIs. Do not encode the ' character, as it is a valid character within URIs. See the encodeURIComponent() JavaScript function for details. """ if path is None: return path # I know about `os.sep` and `os.altsep` but I want to leave # some flexibility for hardcoding separators. return quote(path.replace("\\", "/"), safe="/~!*()'") def get_system_encoding(): """ The encoding of the default system locale. Fallback to 'ascii' if the #encoding is unsupported by Python or could not be determined. See tickets #10335 and #5846. """ try: encoding = locale.getdefaultlocale()[1] or 'ascii' codecs.lookup(encoding) except Exception: encoding = 'ascii' return encoding DEFAULT_LOCALE_ENCODING = get_system_encoding()
yz-core2
/yz-core2-1.0.61b2.tar.gz/yz-core2-1.0.61b2/yzcore/utils/encoding.py
encoding.py